1use std::assert_matches::assert_matches;
10use std::borrow::{Borrow, Cow};
11use std::cell::Cell;
12use std::collections::VecDeque;
13use std::{fmt, ptr};
14
15use rustc_abi::{Align, HasDataLayout, Size};
16use rustc_ast::Mutability;
17use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
18use rustc_middle::mir::display_allocation;
19use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
20use rustc_middle::{bug, throw_ub_format};
21use tracing::{debug, instrument, trace};
22
23use super::{
24 AllocBytes, AllocId, AllocInit, AllocMap, AllocRange, Allocation, CheckAlignMsg,
25 CheckInAllocMsg, CtfeProvenance, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak,
26 Misalignment, Pointer, PointerArithmetic, Provenance, Scalar, alloc_range, err_ub,
27 err_ub_custom, interp_ok, throw_ub, throw_ub_custom, throw_unsup, throw_unsup_format,
28};
29use crate::const_eval::ConstEvalErrKind;
30use crate::fluent_generated as fluent;
31
32#[derive(Debug, PartialEq, Copy, Clone)]
33pub enum MemoryKind<T> {
34 Stack,
36 CallerLocation,
38 Machine(T),
40}
41
42impl<T: MayLeak> MayLeak for MemoryKind<T> {
43 #[inline]
44 fn may_leak(self) -> bool {
45 match self {
46 MemoryKind::Stack => false,
47 MemoryKind::CallerLocation => true,
48 MemoryKind::Machine(k) => k.may_leak(),
49 }
50 }
51}
52
53impl<T: fmt::Display> fmt::Display for MemoryKind<T> {
54 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
55 match self {
56 MemoryKind::Stack => write!(f, "stack variable"),
57 MemoryKind::CallerLocation => write!(f, "caller location"),
58 MemoryKind::Machine(m) => write!(f, "{m}"),
59 }
60 }
61}
62
63#[derive(Copy, Clone, PartialEq, Debug)]
65pub enum AllocKind {
66 LiveData,
68 Function,
70 VTable,
72 TypeId,
74 Dead,
76}
77
78#[derive(Copy, Clone, PartialEq, Debug)]
80pub struct AllocInfo {
81 pub size: Size,
82 pub align: Align,
83 pub kind: AllocKind,
84 pub mutbl: Mutability,
85}
86
87impl AllocInfo {
88 fn new(size: Size, align: Align, kind: AllocKind, mutbl: Mutability) -> Self {
89 Self { size, align, kind, mutbl }
90 }
91}
92
93#[derive(Debug, Copy, Clone)]
95pub enum FnVal<'tcx, Other> {
96 Instance(Instance<'tcx>),
97 Other(Other),
98}
99
100impl<'tcx, Other> FnVal<'tcx, Other> {
101 pub fn as_instance(self) -> InterpResult<'tcx, Instance<'tcx>> {
102 match self {
103 FnVal::Instance(instance) => interp_ok(instance),
104 FnVal::Other(_) => {
105 throw_unsup_format!("'foreign' function pointers are not supported in this context")
106 }
107 }
108 }
109}
110
111pub struct Memory<'tcx, M: Machine<'tcx>> {
114 pub(super) alloc_map: M::MemoryMap,
125
126 extra_fn_ptr_map: FxIndexMap<AllocId, M::ExtraFnVal>,
128
129 pub(super) dead_alloc_map: FxIndexMap<AllocId, (Size, Align)>,
134
135 validation_in_progress: Cell<bool>,
139}
140
141#[derive(Copy, Clone)]
144pub struct AllocRef<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
145 alloc: &'a Allocation<Prov, Extra, Bytes>,
146 range: AllocRange,
147 tcx: TyCtxt<'tcx>,
148 alloc_id: AllocId,
149}
150pub struct AllocRefMut<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
153 alloc: &'a mut Allocation<Prov, Extra, Bytes>,
154 range: AllocRange,
155 tcx: TyCtxt<'tcx>,
156 alloc_id: AllocId,
157}
158
159impl<'tcx, M: Machine<'tcx>> Memory<'tcx, M> {
160 pub fn new() -> Self {
161 Memory {
162 alloc_map: M::MemoryMap::default(),
163 extra_fn_ptr_map: FxIndexMap::default(),
164 dead_alloc_map: FxIndexMap::default(),
165 validation_in_progress: Cell::new(false),
166 }
167 }
168
169 pub fn alloc_map(&self) -> &M::MemoryMap {
171 &self.alloc_map
172 }
173}
174
175impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
176 #[inline]
186 pub fn global_root_pointer(
187 &self,
188 ptr: Pointer<CtfeProvenance>,
189 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
190 let alloc_id = ptr.provenance.alloc_id();
191 match self.tcx.try_get_global_alloc(alloc_id) {
193 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_thread_local_static(def_id) => {
194 bug!("global memory cannot point to thread-local static")
197 }
198 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_foreign_item(def_id) => {
199 return M::extern_static_pointer(self, def_id);
200 }
201 None => {
202 assert!(
203 self.memory.extra_fn_ptr_map.contains_key(&alloc_id),
204 "{alloc_id:?} is neither global nor a function pointer"
205 );
206 }
207 _ => {}
208 }
209 M::adjust_alloc_root_pointer(self, ptr, M::GLOBAL_KIND.map(MemoryKind::Machine))
211 }
212
213 pub fn fn_ptr(&mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>) -> Pointer<M::Provenance> {
214 let id = match fn_val {
215 FnVal::Instance(instance) => {
216 let salt = M::get_global_alloc_salt(self, Some(instance));
217 self.tcx.reserve_and_set_fn_alloc(instance, salt)
218 }
219 FnVal::Other(extra) => {
220 let id = self.tcx.reserve_alloc_id();
222 let old = self.memory.extra_fn_ptr_map.insert(id, extra);
223 assert!(old.is_none());
224 id
225 }
226 };
227 self.global_root_pointer(Pointer::from(id)).unwrap()
230 }
231
232 pub fn allocate_ptr(
233 &mut self,
234 size: Size,
235 align: Align,
236 kind: MemoryKind<M::MemoryKind>,
237 init: AllocInit,
238 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
239 let params = self.machine.get_default_alloc_params();
240 let alloc = if M::PANIC_ON_ALLOC_FAIL {
241 Allocation::new(size, align, init, params)
242 } else {
243 Allocation::try_new(size, align, init, params)?
244 };
245 self.insert_allocation(alloc, kind)
246 }
247
248 pub fn allocate_bytes_ptr(
249 &mut self,
250 bytes: &[u8],
251 align: Align,
252 kind: MemoryKind<M::MemoryKind>,
253 mutability: Mutability,
254 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
255 let params = self.machine.get_default_alloc_params();
256 let alloc = Allocation::from_bytes(bytes, align, mutability, params);
257 self.insert_allocation(alloc, kind)
258 }
259
260 pub fn insert_allocation(
261 &mut self,
262 alloc: Allocation<M::Provenance, (), M::Bytes>,
263 kind: MemoryKind<M::MemoryKind>,
264 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
265 assert!(alloc.size() <= self.max_size_of_val());
266 let id = self.tcx.reserve_alloc_id();
267 debug_assert_ne!(
268 Some(kind),
269 M::GLOBAL_KIND.map(MemoryKind::Machine),
270 "dynamically allocating global memory"
271 );
272 let extra = M::init_local_allocation(self, id, kind, alloc.size(), alloc.align)?;
275 let alloc = alloc.with_extra(extra);
276 self.memory.alloc_map.insert(id, (kind, alloc));
277 M::adjust_alloc_root_pointer(self, Pointer::from(id), Some(kind))
278 }
279
280 pub fn reallocate_ptr(
283 &mut self,
284 ptr: Pointer<Option<M::Provenance>>,
285 old_size_and_align: Option<(Size, Align)>,
286 new_size: Size,
287 new_align: Align,
288 kind: MemoryKind<M::MemoryKind>,
289 init_growth: AllocInit,
290 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
291 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
292 if offset.bytes() != 0 {
293 throw_ub_custom!(
294 fluent::const_eval_realloc_or_alloc_with_offset,
295 ptr = format!("{ptr:?}"),
296 kind = "realloc"
297 );
298 }
299
300 let new_ptr = self.allocate_ptr(new_size, new_align, kind, init_growth)?;
306 let old_size = match old_size_and_align {
307 Some((size, _align)) => size,
308 None => self.get_alloc_raw(alloc_id)?.size(),
309 };
310 self.mem_copy(ptr, new_ptr.into(), old_size.min(new_size), true)?;
312 self.deallocate_ptr(ptr, old_size_and_align, kind)?;
313
314 interp_ok(new_ptr)
315 }
316
317 pub fn make_const_heap_ptr_global(
319 &mut self,
320 ptr: Pointer<Option<CtfeProvenance>>,
321 ) -> InterpResult<'tcx>
322 where
323 M: Machine<'tcx, MemoryKind = crate::const_eval::MemoryKind, Provenance = CtfeProvenance>,
324 {
325 let (alloc_id, offset, _) = self.ptr_get_alloc_id(ptr, 0)?;
326 if offset.bytes() != 0 {
327 return Err(ConstEvalErrKind::ConstMakeGlobalWithOffset(ptr)).into();
328 }
329
330 if matches!(self.tcx.try_get_global_alloc(alloc_id), Some(_)) {
331 return Err(ConstEvalErrKind::ConstMakeGlobalPtrIsNonHeap(ptr)).into();
333 }
334
335 let (kind, alloc) = self
338 .memory
339 .alloc_map
340 .get_mut_or(alloc_id, || Err(ConstEvalErrKind::ConstMakeGlobalWithDanglingPtr(ptr)))?;
341
342 match kind {
344 MemoryKind::Stack | MemoryKind::CallerLocation => {
345 return Err(ConstEvalErrKind::ConstMakeGlobalPtrIsNonHeap(ptr)).into();
346 }
347 MemoryKind::Machine(crate::const_eval::MemoryKind::Heap { was_made_global }) => {
348 if *was_made_global {
349 return Err(ConstEvalErrKind::ConstMakeGlobalPtrAlreadyMadeGlobal(alloc_id))
350 .into();
351 }
352 *was_made_global = true;
353 }
354 }
355
356 alloc.mutability = Mutability::Not;
358
359 interp_ok(())
360 }
361
362 #[instrument(skip(self), level = "debug")]
363 pub fn deallocate_ptr(
364 &mut self,
365 ptr: Pointer<Option<M::Provenance>>,
366 old_size_and_align: Option<(Size, Align)>,
367 kind: MemoryKind<M::MemoryKind>,
368 ) -> InterpResult<'tcx> {
369 let (alloc_id, offset, prov) = self.ptr_get_alloc_id(ptr, 0)?;
370 trace!("deallocating: {alloc_id:?}");
371
372 if offset.bytes() != 0 {
373 throw_ub_custom!(
374 fluent::const_eval_realloc_or_alloc_with_offset,
375 ptr = format!("{ptr:?}"),
376 kind = "dealloc",
377 );
378 }
379
380 let Some((alloc_kind, mut alloc)) = self.memory.alloc_map.remove(&alloc_id) else {
381 return Err(match self.tcx.try_get_global_alloc(alloc_id) {
383 Some(GlobalAlloc::Function { .. }) => {
384 err_ub_custom!(
385 fluent::const_eval_invalid_dealloc,
386 alloc_id = alloc_id,
387 kind = "fn",
388 )
389 }
390 Some(GlobalAlloc::VTable(..)) => {
391 err_ub_custom!(
392 fluent::const_eval_invalid_dealloc,
393 alloc_id = alloc_id,
394 kind = "vtable",
395 )
396 }
397 Some(GlobalAlloc::TypeId { .. }) => {
398 err_ub_custom!(
399 fluent::const_eval_invalid_dealloc,
400 alloc_id = alloc_id,
401 kind = "typeid",
402 )
403 }
404 Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => {
405 err_ub_custom!(
406 fluent::const_eval_invalid_dealloc,
407 alloc_id = alloc_id,
408 kind = "static_mem"
409 )
410 }
411 None => err_ub!(PointerUseAfterFree(alloc_id, CheckInAllocMsg::MemoryAccess)),
412 })
413 .into();
414 };
415
416 if alloc.mutability.is_not() {
417 throw_ub_custom!(fluent::const_eval_dealloc_immutable, alloc = alloc_id,);
418 }
419 if alloc_kind != kind {
420 throw_ub_custom!(
421 fluent::const_eval_dealloc_kind_mismatch,
422 alloc = alloc_id,
423 alloc_kind = format!("{alloc_kind}"),
424 kind = format!("{kind}"),
425 );
426 }
427 if let Some((size, align)) = old_size_and_align {
428 if size != alloc.size() || align != alloc.align {
429 throw_ub_custom!(
430 fluent::const_eval_dealloc_incorrect_layout,
431 alloc = alloc_id,
432 size = alloc.size().bytes(),
433 align = alloc.align.bytes(),
434 size_found = size.bytes(),
435 align_found = align.bytes(),
436 )
437 }
438 }
439
440 let size = alloc.size();
442 M::before_memory_deallocation(
443 self.tcx,
444 &mut self.machine,
445 &mut alloc.extra,
446 ptr,
447 (alloc_id, prov),
448 size,
449 alloc.align,
450 kind,
451 )?;
452
453 let old = self.memory.dead_alloc_map.insert(alloc_id, (size, alloc.align));
455 if old.is_some() {
456 bug!("Nothing can be deallocated twice");
457 }
458
459 interp_ok(())
460 }
461
462 #[inline(always)]
464 fn get_ptr_access(
465 &self,
466 ptr: Pointer<Option<M::Provenance>>,
467 size: Size,
468 ) -> InterpResult<'tcx, Option<(AllocId, Size, M::ProvenanceExtra)>> {
469 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(
471 self,
472 ptr,
473 size,
474 CheckInAllocMsg::MemoryAccess,
475 |this, alloc_id, offset, prov| {
476 let (size, align) =
477 this.get_live_alloc_size_and_align(alloc_id, CheckInAllocMsg::MemoryAccess)?;
478 interp_ok((size, align, (alloc_id, offset, prov)))
479 },
480 )
481 }
482
483 #[inline(always)]
486 pub fn check_ptr_access(
487 &self,
488 ptr: Pointer<Option<M::Provenance>>,
489 size: Size,
490 msg: CheckInAllocMsg,
491 ) -> InterpResult<'tcx> {
492 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
494 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
495 interp_ok((size, align, ()))
496 })?;
497 interp_ok(())
498 }
499
500 pub fn check_ptr_access_signed(
504 &self,
505 ptr: Pointer<Option<M::Provenance>>,
506 size: i64,
507 msg: CheckInAllocMsg,
508 ) -> InterpResult<'tcx> {
509 Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
510 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
511 interp_ok((size, align, ()))
512 })?;
513 interp_ok(())
514 }
515
516 fn check_and_deref_ptr<T, R: Borrow<Self>>(
525 this: R,
526 ptr: Pointer<Option<M::Provenance>>,
527 size: i64,
528 msg: CheckInAllocMsg,
529 alloc_size: impl FnOnce(
530 R,
531 AllocId,
532 Size,
533 M::ProvenanceExtra,
534 ) -> InterpResult<'tcx, (Size, Align, T)>,
535 ) -> InterpResult<'tcx, Option<T>> {
536 if size == 0 {
538 return interp_ok(None);
539 }
540
541 interp_ok(match this.borrow().ptr_try_get_alloc_id(ptr, size) {
542 Err(addr) => {
543 throw_ub!(DanglingIntPointer { addr, inbounds_size: size, msg });
545 }
546 Ok((alloc_id, offset, prov)) => {
547 let tcx = this.borrow().tcx;
548 let (alloc_size, _alloc_align, ret_val) = alloc_size(this, alloc_id, offset, prov)?;
549 let offset = offset.bytes();
550 let (begin, end) = if size >= 0 {
552 (Some(offset), offset.checked_add(size as u64))
553 } else {
554 (offset.checked_sub(size.unsigned_abs()), Some(offset))
555 };
556 let in_bounds = begin.is_some() && end.is_some_and(|e| e <= alloc_size.bytes());
558 if !in_bounds {
559 throw_ub!(PointerOutOfBounds {
560 alloc_id,
561 alloc_size,
562 ptr_offset: tcx.sign_extend_to_target_isize(offset),
563 inbounds_size: size,
564 msg,
565 })
566 }
567
568 Some(ret_val)
569 }
570 })
571 }
572
573 pub(super) fn check_misalign(
574 &self,
575 misaligned: Option<Misalignment>,
576 msg: CheckAlignMsg,
577 ) -> InterpResult<'tcx> {
578 if let Some(misaligned) = misaligned {
579 throw_ub!(AlignmentCheckFailed(misaligned, msg))
580 }
581 interp_ok(())
582 }
583
584 pub(super) fn is_ptr_misaligned(
585 &self,
586 ptr: Pointer<Option<M::Provenance>>,
587 align: Align,
588 ) -> Option<Misalignment> {
589 if !M::enforce_alignment(self) || align.bytes() == 1 {
590 return None;
591 }
592
593 #[inline]
594 fn is_offset_misaligned(offset: u64, align: Align) -> Option<Misalignment> {
595 if offset.is_multiple_of(align.bytes()) {
596 None
597 } else {
598 let offset_pow2 = 1 << offset.trailing_zeros();
600 Some(Misalignment { has: Align::from_bytes(offset_pow2).unwrap(), required: align })
601 }
602 }
603
604 match self.ptr_try_get_alloc_id(ptr, 0) {
605 Err(addr) => is_offset_misaligned(addr, align),
606 Ok((alloc_id, offset, _prov)) => {
607 let alloc_info = self.get_alloc_info(alloc_id);
608 if let Some(misalign) = M::alignment_check(
609 self,
610 alloc_id,
611 alloc_info.align,
612 alloc_info.kind,
613 offset,
614 align,
615 ) {
616 Some(misalign)
617 } else if M::Provenance::OFFSET_IS_ADDR {
618 is_offset_misaligned(ptr.addr().bytes(), align)
619 } else {
620 if alloc_info.align.bytes() < align.bytes() {
622 Some(Misalignment { has: alloc_info.align, required: align })
623 } else {
624 is_offset_misaligned(offset.bytes(), align)
625 }
626 }
627 }
628 }
629 }
630
631 pub fn check_ptr_align(
635 &self,
636 ptr: Pointer<Option<M::Provenance>>,
637 align: Align,
638 ) -> InterpResult<'tcx> {
639 self.check_misalign(self.is_ptr_misaligned(ptr, align), CheckAlignMsg::AccessedPtr)
640 }
641}
642
643impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
644 pub fn remove_unreachable_allocs(&mut self, reachable_allocs: &FxHashSet<AllocId>) {
646 #[allow(rustc::potential_query_instability)] self.memory.dead_alloc_map.retain(|id, _| reachable_allocs.contains(id));
651 }
652}
653
654impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
656 fn get_global_alloc(
662 &self,
663 id: AllocId,
664 is_write: bool,
665 ) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::Provenance, M::AllocExtra, M::Bytes>>> {
666 let (alloc, def_id) = match self.tcx.try_get_global_alloc(id) {
667 Some(GlobalAlloc::Memory(mem)) => {
668 (mem, None)
670 }
671 Some(GlobalAlloc::Function { .. }) => throw_ub!(DerefFunctionPointer(id)),
672 Some(GlobalAlloc::VTable(..)) => throw_ub!(DerefVTablePointer(id)),
673 Some(GlobalAlloc::TypeId { .. }) => throw_ub!(DerefTypeIdPointer(id)),
674 None => throw_ub!(PointerUseAfterFree(id, CheckInAllocMsg::MemoryAccess)),
675 Some(GlobalAlloc::Static(def_id)) => {
676 assert!(self.tcx.is_static(def_id));
677 assert!(!self.tcx.is_thread_local_static(def_id));
680 if self.tcx.is_foreign_item(def_id) {
691 throw_unsup!(ExternStatic(def_id));
694 }
695
696 let val = self.ctfe_query(|tcx| tcx.eval_static_initializer(def_id))?;
698 (val, Some(def_id))
699 }
700 };
701 M::before_access_global(self.tcx, &self.machine, id, alloc, def_id, is_write)?;
702 M::adjust_global_allocation(
704 self,
705 id, alloc.inner(),
707 )
708 }
709
710 pub fn get_alloc_raw(
715 &self,
716 id: AllocId,
717 ) -> InterpResult<'tcx, &Allocation<M::Provenance, M::AllocExtra, M::Bytes>> {
718 let a = self.memory.alloc_map.get_or(id, || {
723 let alloc = self.get_global_alloc(id, false).report_err().map_err(Err)?;
726 match alloc {
727 Cow::Borrowed(alloc) => {
728 Err(Ok(alloc))
731 }
732 Cow::Owned(alloc) => {
733 let kind = M::GLOBAL_KIND.expect(
735 "I got a global allocation that I have to copy but the machine does \
736 not expect that to happen",
737 );
738 Ok((MemoryKind::Machine(kind), alloc))
739 }
740 }
741 });
742 match a {
744 Ok(a) => interp_ok(&a.1),
745 Err(a) => a.into(),
746 }
747 }
748
749 pub fn get_alloc_bytes_unchecked_raw(&self, id: AllocId) -> InterpResult<'tcx, *const u8> {
752 let alloc = self.get_alloc_raw(id)?;
753 interp_ok(alloc.get_bytes_unchecked_raw())
754 }
755
756 pub fn get_ptr_alloc<'a>(
758 &'a self,
759 ptr: Pointer<Option<M::Provenance>>,
760 size: Size,
761 ) -> InterpResult<'tcx, Option<AllocRef<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
762 {
763 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
765 self,
766 ptr,
767 size_i64,
768 CheckInAllocMsg::MemoryAccess,
769 |this, alloc_id, offset, prov| {
770 let alloc = this.get_alloc_raw(alloc_id)?;
771 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc)))
772 },
773 )?;
774 if !self.memory.validation_in_progress.get() {
778 if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(ptr, size_i64) {
779 M::before_alloc_access(self.tcx, &self.machine, alloc_id)?;
780 }
781 }
782
783 if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc {
784 let range = alloc_range(offset, size);
785 if !self.memory.validation_in_progress.get() {
786 M::before_memory_read(
787 self.tcx,
788 &self.machine,
789 &alloc.extra,
790 ptr,
791 (alloc_id, prov),
792 range,
793 )?;
794 }
795 interp_ok(Some(AllocRef { alloc, range, tcx: *self.tcx, alloc_id }))
796 } else {
797 interp_ok(None)
798 }
799 }
800
801 pub fn get_alloc_extra<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, &'a M::AllocExtra> {
803 interp_ok(&self.get_alloc_raw(id)?.extra)
804 }
805
806 pub fn get_alloc_mutability<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, Mutability> {
808 interp_ok(self.get_alloc_raw(id)?.mutability)
809 }
810
811 pub fn get_alloc_raw_mut(
819 &mut self,
820 id: AllocId,
821 ) -> InterpResult<'tcx, (&mut Allocation<M::Provenance, M::AllocExtra, M::Bytes>, &mut M)> {
822 if self.memory.alloc_map.get_mut(id).is_none() {
830 let alloc = self.get_global_alloc(id, true)?;
833 let kind = M::GLOBAL_KIND.expect(
834 "I got a global allocation that I have to copy but the machine does \
835 not expect that to happen",
836 );
837 self.memory.alloc_map.insert(id, (MemoryKind::Machine(kind), alloc.into_owned()));
838 }
839
840 let (_kind, alloc) = self.memory.alloc_map.get_mut(id).unwrap();
841 if alloc.mutability.is_not() {
842 throw_ub!(WriteToReadOnly(id))
843 }
844 interp_ok((alloc, &mut self.machine))
845 }
846
847 pub fn get_alloc_bytes_unchecked_raw_mut(
850 &mut self,
851 id: AllocId,
852 ) -> InterpResult<'tcx, *mut u8> {
853 let alloc = self.get_alloc_raw_mut(id)?.0;
854 interp_ok(alloc.get_bytes_unchecked_raw_mut())
855 }
856
857 pub fn get_ptr_alloc_mut<'a>(
859 &'a mut self,
860 ptr: Pointer<Option<M::Provenance>>,
861 size: Size,
862 ) -> InterpResult<'tcx, Option<AllocRefMut<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
863 {
864 let tcx = self.tcx;
865 let validation_in_progress = self.memory.validation_in_progress.get();
866
867 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
869 self,
870 ptr,
871 size_i64,
872 CheckInAllocMsg::MemoryAccess,
873 |this, alloc_id, offset, prov| {
874 let (alloc, machine) = this.get_alloc_raw_mut(alloc_id)?;
875 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc, machine)))
876 },
877 )?;
878
879 if let Some((alloc_id, offset, prov, alloc, machine)) = ptr_and_alloc {
880 let range = alloc_range(offset, size);
881 if !validation_in_progress {
882 M::before_alloc_access(tcx, machine, alloc_id)?;
885 M::before_memory_write(
886 tcx,
887 machine,
888 &mut alloc.extra,
889 ptr,
890 (alloc_id, prov),
891 range,
892 )?;
893 }
894 interp_ok(Some(AllocRefMut { alloc, range, tcx: *tcx, alloc_id }))
895 } else {
896 interp_ok(None)
897 }
898 }
899
900 pub fn get_alloc_extra_mut<'a>(
902 &'a mut self,
903 id: AllocId,
904 ) -> InterpResult<'tcx, (&'a mut M::AllocExtra, &'a mut M)> {
905 let (alloc, machine) = self.get_alloc_raw_mut(id)?;
906 interp_ok((&mut alloc.extra, machine))
907 }
908
909 pub fn is_alloc_live(&self, id: AllocId) -> bool {
913 self.memory.alloc_map.contains_key_ref(&id)
914 || self.memory.extra_fn_ptr_map.contains_key(&id)
915 || self.tcx.try_get_global_alloc(id).is_some()
918 }
919
920 pub fn get_alloc_info(&self, id: AllocId) -> AllocInfo {
923 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
928 return AllocInfo::new(
929 alloc.size(),
930 alloc.align,
931 AllocKind::LiveData,
932 alloc.mutability,
933 );
934 }
935
936 if let Some(fn_val) = self.get_fn_alloc(id) {
939 let align = match fn_val {
940 FnVal::Instance(instance) => {
941 self.tcx.codegen_instance_attrs(instance.def).alignment.unwrap_or(Align::ONE)
942 }
943 FnVal::Other(_) => Align::ONE,
945 };
946
947 return AllocInfo::new(Size::ZERO, align, AllocKind::Function, Mutability::Not);
948 }
949
950 if let Some(global_alloc) = self.tcx.try_get_global_alloc(id) {
952 let (size, align) = global_alloc.size_and_align(*self.tcx, self.typing_env);
953 let mutbl = global_alloc.mutability(*self.tcx, self.typing_env);
954 let kind = match global_alloc {
955 GlobalAlloc::Static { .. } | GlobalAlloc::Memory { .. } => AllocKind::LiveData,
956 GlobalAlloc::Function { .. } => bug!("We already checked function pointers above"),
957 GlobalAlloc::VTable { .. } => AllocKind::VTable,
958 GlobalAlloc::TypeId { .. } => AllocKind::TypeId,
959 };
960 return AllocInfo::new(size, align, kind, mutbl);
961 }
962
963 let (size, align) = *self
965 .memory
966 .dead_alloc_map
967 .get(&id)
968 .expect("deallocated pointers should all be recorded in `dead_alloc_map`");
969 AllocInfo::new(size, align, AllocKind::Dead, Mutability::Not)
970 }
971
972 fn get_live_alloc_size_and_align(
974 &self,
975 id: AllocId,
976 msg: CheckInAllocMsg,
977 ) -> InterpResult<'tcx, (Size, Align)> {
978 let info = self.get_alloc_info(id);
979 if matches!(info.kind, AllocKind::Dead) {
980 throw_ub!(PointerUseAfterFree(id, msg))
981 }
982 interp_ok((info.size, info.align))
983 }
984
985 fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> {
986 if let Some(extra) = self.memory.extra_fn_ptr_map.get(&id) {
987 Some(FnVal::Other(*extra))
988 } else {
989 match self.tcx.try_get_global_alloc(id) {
990 Some(GlobalAlloc::Function { instance, .. }) => Some(FnVal::Instance(instance)),
991 _ => None,
992 }
993 }
994 }
995
996 pub fn get_ptr_type_id(
999 &self,
1000 ptr: Pointer<Option<M::Provenance>>,
1001 ) -> InterpResult<'tcx, (Ty<'tcx>, u64)> {
1002 let (alloc_id, offset, _meta) = self.ptr_get_alloc_id(ptr, 0)?;
1003 let Some(GlobalAlloc::TypeId { ty }) = self.tcx.try_get_global_alloc(alloc_id) else {
1004 throw_ub_format!("invalid `TypeId` value: not all bytes carry type id metadata")
1005 };
1006 interp_ok((ty, offset.bytes()))
1007 }
1008
1009 pub fn get_ptr_fn(
1010 &self,
1011 ptr: Pointer<Option<M::Provenance>>,
1012 ) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
1013 trace!("get_ptr_fn({:?})", ptr);
1014 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
1015 if offset.bytes() != 0 {
1016 throw_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset)))
1017 }
1018 self.get_fn_alloc(alloc_id)
1019 .ok_or_else(|| err_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset))))
1020 .into()
1021 }
1022
1023 pub fn get_ptr_vtable_ty(
1026 &self,
1027 ptr: Pointer<Option<M::Provenance>>,
1028 expected_trait: Option<&'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>>,
1029 ) -> InterpResult<'tcx, Ty<'tcx>> {
1030 trace!("get_ptr_vtable({:?})", ptr);
1031 let (alloc_id, offset, _tag) = self.ptr_get_alloc_id(ptr, 0)?;
1032 if offset.bytes() != 0 {
1033 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
1034 }
1035 let Some(GlobalAlloc::VTable(ty, vtable_dyn_type)) =
1036 self.tcx.try_get_global_alloc(alloc_id)
1037 else {
1038 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
1039 };
1040 if let Some(expected_dyn_type) = expected_trait {
1041 self.check_vtable_for_type(vtable_dyn_type, expected_dyn_type)?;
1042 }
1043 interp_ok(ty)
1044 }
1045
1046 pub fn alloc_mark_immutable(&mut self, id: AllocId) -> InterpResult<'tcx> {
1047 self.get_alloc_raw_mut(id)?.0.mutability = Mutability::Not;
1048 interp_ok(())
1049 }
1050
1051 pub fn visit_reachable_allocs(
1054 &mut self,
1055 start: Vec<AllocId>,
1056 mut visit: impl FnMut(&mut Self, AllocId, &AllocInfo) -> InterpResult<'tcx>,
1057 ) -> InterpResult<'tcx> {
1058 let mut done = FxHashSet::default();
1059 let mut todo = start;
1060 while let Some(id) = todo.pop() {
1061 if !done.insert(id) {
1062 continue;
1064 }
1065 let info = self.get_alloc_info(id);
1066
1067 if matches!(info.kind, AllocKind::LiveData) {
1071 let alloc = self.get_alloc_raw(id)?;
1072 for prov in alloc.provenance().provenances() {
1073 if let Some(id) = prov.get_alloc_id() {
1074 todo.push(id);
1075 }
1076 }
1077 }
1078
1079 visit(self, id, &info)?;
1081 }
1082 interp_ok(())
1083 }
1084
1085 #[must_use]
1088 pub fn dump_alloc<'a>(&'a self, id: AllocId) -> DumpAllocs<'a, 'tcx, M> {
1089 self.dump_allocs(vec![id])
1090 }
1091
1092 #[must_use]
1095 pub fn dump_allocs<'a>(&'a self, mut allocs: Vec<AllocId>) -> DumpAllocs<'a, 'tcx, M> {
1096 allocs.sort();
1097 allocs.dedup();
1098 DumpAllocs { ecx: self, allocs }
1099 }
1100
1101 pub fn print_alloc_bytes_for_diagnostics(&self, id: AllocId) -> String {
1103 let alloc = self.get_alloc_raw(id).unwrap();
1106 let mut bytes = String::new();
1107 if alloc.size() != Size::ZERO {
1108 bytes = "\n".into();
1109 rustc_middle::mir::pretty::write_allocation_bytes(*self.tcx, alloc, &mut bytes, " ")
1111 .unwrap();
1112 }
1113 bytes
1114 }
1115
1116 pub fn take_leaked_allocations(
1122 &mut self,
1123 static_roots: impl FnOnce(&Self) -> &[AllocId],
1124 ) -> Vec<(AllocId, MemoryKind<M::MemoryKind>, Allocation<M::Provenance, M::AllocExtra, M::Bytes>)>
1125 {
1126 let reachable = {
1128 let mut reachable = FxHashSet::default();
1129 let global_kind = M::GLOBAL_KIND.map(MemoryKind::Machine);
1130 let mut todo: Vec<_> =
1131 self.memory.alloc_map.filter_map_collect(move |&id, &(kind, _)| {
1132 if Some(kind) == global_kind { Some(id) } else { None }
1133 });
1134 todo.extend(static_roots(self));
1135 while let Some(id) = todo.pop() {
1136 if reachable.insert(id) {
1137 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
1141 todo.extend(
1142 alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id()),
1143 );
1144 }
1145 }
1146 }
1147 reachable
1148 };
1149
1150 let leaked: Vec<_> = self.memory.alloc_map.filter_map_collect(|&id, &(kind, _)| {
1152 if kind.may_leak() || reachable.contains(&id) { None } else { Some(id) }
1153 });
1154 let mut result = Vec::new();
1155 for &id in leaked.iter() {
1156 let (kind, alloc) = self.memory.alloc_map.remove(&id).unwrap();
1157 result.push((id, kind, alloc));
1158 }
1159 result
1160 }
1161
1162 pub fn run_for_validation_mut<R>(&mut self, f: impl FnOnce(&mut Self) -> R) -> R {
1168 assert!(
1171 self.memory.validation_in_progress.replace(true) == false,
1172 "`validation_in_progress` was already set"
1173 );
1174 let res = f(self);
1175 assert!(
1176 self.memory.validation_in_progress.replace(false) == true,
1177 "`validation_in_progress` was unset by someone else"
1178 );
1179 res
1180 }
1181
1182 pub fn run_for_validation_ref<R>(&self, f: impl FnOnce(&Self) -> R) -> R {
1188 assert!(
1191 self.memory.validation_in_progress.replace(true) == false,
1192 "`validation_in_progress` was already set"
1193 );
1194 let res = f(self);
1195 assert!(
1196 self.memory.validation_in_progress.replace(false) == true,
1197 "`validation_in_progress` was unset by someone else"
1198 );
1199 res
1200 }
1201
1202 pub(super) fn validation_in_progress(&self) -> bool {
1203 self.memory.validation_in_progress.get()
1204 }
1205}
1206
1207#[doc(hidden)]
1208pub struct DumpAllocs<'a, 'tcx, M: Machine<'tcx>> {
1210 ecx: &'a InterpCx<'tcx, M>,
1211 allocs: Vec<AllocId>,
1212}
1213
1214impl<'a, 'tcx, M: Machine<'tcx>> std::fmt::Debug for DumpAllocs<'a, 'tcx, M> {
1215 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1216 fn write_allocation_track_relocs<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
1218 fmt: &mut std::fmt::Formatter<'_>,
1219 tcx: TyCtxt<'tcx>,
1220 allocs_to_print: &mut VecDeque<AllocId>,
1221 alloc: &Allocation<Prov, Extra, Bytes>,
1222 ) -> std::fmt::Result {
1223 for alloc_id in alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id())
1224 {
1225 allocs_to_print.push_back(alloc_id);
1226 }
1227 write!(fmt, "{}", display_allocation(tcx, alloc))
1228 }
1229
1230 let mut allocs_to_print: VecDeque<_> = self.allocs.iter().copied().collect();
1231 let mut allocs_printed = FxHashSet::default();
1233
1234 while let Some(id) = allocs_to_print.pop_front() {
1235 if !allocs_printed.insert(id) {
1236 continue;
1238 }
1239
1240 write!(fmt, "{id:?}")?;
1241 match self.ecx.memory.alloc_map.get(id) {
1242 Some((kind, alloc)) => {
1243 write!(fmt, " ({kind}, ")?;
1245 write_allocation_track_relocs(
1246 &mut *fmt,
1247 *self.ecx.tcx,
1248 &mut allocs_to_print,
1249 alloc,
1250 )?;
1251 }
1252 None => {
1253 match self.ecx.tcx.try_get_global_alloc(id) {
1255 Some(GlobalAlloc::Memory(alloc)) => {
1256 write!(fmt, " (unchanged global, ")?;
1257 write_allocation_track_relocs(
1258 &mut *fmt,
1259 *self.ecx.tcx,
1260 &mut allocs_to_print,
1261 alloc.inner(),
1262 )?;
1263 }
1264 Some(GlobalAlloc::Function { instance, .. }) => {
1265 write!(fmt, " (fn: {instance})")?;
1266 }
1267 Some(GlobalAlloc::VTable(ty, dyn_ty)) => {
1268 write!(fmt, " (vtable: impl {dyn_ty} for {ty})")?;
1269 }
1270 Some(GlobalAlloc::TypeId { ty }) => {
1271 write!(fmt, " (typeid for {ty})")?;
1272 }
1273 Some(GlobalAlloc::Static(did)) => {
1274 write!(fmt, " (static: {})", self.ecx.tcx.def_path_str(did))?;
1275 }
1276 None => {
1277 write!(fmt, " (deallocated)")?;
1278 }
1279 }
1280 }
1281 }
1282 writeln!(fmt)?;
1283 }
1284 Ok(())
1285 }
1286}
1287
1288impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>
1290 AllocRefMut<'a, 'tcx, Prov, Extra, Bytes>
1291{
1292 pub fn as_ref<'b>(&'b self) -> AllocRef<'b, 'tcx, Prov, Extra, Bytes> {
1293 AllocRef { alloc: self.alloc, range: self.range, tcx: self.tcx, alloc_id: self.alloc_id }
1294 }
1295
1296 pub fn write_scalar(&mut self, range: AllocRange, val: Scalar<Prov>) -> InterpResult<'tcx> {
1298 let range = self.range.subrange(range);
1299 debug!("write_scalar at {:?}{range:?}: {val:?}", self.alloc_id);
1300
1301 self.alloc
1302 .write_scalar(&self.tcx, range, val)
1303 .map_err(|e| e.to_interp_error(self.alloc_id))
1304 .into()
1305 }
1306
1307 pub fn write_ptr_sized(&mut self, offset: Size, val: Scalar<Prov>) -> InterpResult<'tcx> {
1309 self.write_scalar(alloc_range(offset, self.tcx.data_layout().pointer_size()), val)
1310 }
1311
1312 pub fn write_uninit(&mut self, range: AllocRange) -> InterpResult<'tcx> {
1314 let range = self.range.subrange(range);
1315
1316 self.alloc
1317 .write_uninit(&self.tcx, range)
1318 .map_err(|e| e.to_interp_error(self.alloc_id))
1319 .into()
1320 }
1321
1322 pub fn write_uninit_full(&mut self) -> InterpResult<'tcx> {
1324 self.alloc
1325 .write_uninit(&self.tcx, self.range)
1326 .map_err(|e| e.to_interp_error(self.alloc_id))
1327 .into()
1328 }
1329
1330 pub fn clear_provenance(&mut self) -> InterpResult<'tcx> {
1332 self.alloc
1333 .clear_provenance(&self.tcx, self.range)
1334 .map_err(|e| e.to_interp_error(self.alloc_id))
1335 .into()
1336 }
1337}
1338
1339impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes> AllocRef<'a, 'tcx, Prov, Extra, Bytes> {
1340 pub fn read_scalar(
1342 &self,
1343 range: AllocRange,
1344 read_provenance: bool,
1345 ) -> InterpResult<'tcx, Scalar<Prov>> {
1346 let range = self.range.subrange(range);
1347 self.alloc
1348 .read_scalar(&self.tcx, range, read_provenance)
1349 .map_err(|e| e.to_interp_error(self.alloc_id))
1350 .into()
1351 }
1352
1353 pub fn read_integer(&self, range: AllocRange) -> InterpResult<'tcx, Scalar<Prov>> {
1355 self.read_scalar(range, false)
1356 }
1357
1358 pub fn read_pointer(&self, offset: Size) -> InterpResult<'tcx, Scalar<Prov>> {
1360 self.read_scalar(
1361 alloc_range(offset, self.tcx.data_layout().pointer_size()),
1362 true,
1363 )
1364 }
1365
1366 pub fn get_bytes_strip_provenance<'b>(&'b self) -> InterpResult<'tcx, &'a [u8]> {
1368 self.alloc
1369 .get_bytes_strip_provenance(&self.tcx, self.range)
1370 .map_err(|e| e.to_interp_error(self.alloc_id))
1371 .into()
1372 }
1373
1374 pub fn has_provenance(&self) -> bool {
1376 !self.alloc.provenance().range_empty(self.range, &self.tcx)
1377 }
1378}
1379
1380impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1381 pub fn read_bytes_ptr_strip_provenance(
1386 &self,
1387 ptr: Pointer<Option<M::Provenance>>,
1388 size: Size,
1389 ) -> InterpResult<'tcx, &[u8]> {
1390 let Some(alloc_ref) = self.get_ptr_alloc(ptr, size)? else {
1391 return interp_ok(&[]);
1393 };
1394 interp_ok(
1397 alloc_ref
1398 .alloc
1399 .get_bytes_strip_provenance(&alloc_ref.tcx, alloc_ref.range)
1400 .map_err(|e| e.to_interp_error(alloc_ref.alloc_id))?,
1401 )
1402 }
1403
1404 pub fn write_bytes_ptr(
1408 &mut self,
1409 ptr: Pointer<Option<M::Provenance>>,
1410 src: impl IntoIterator<Item = u8>,
1411 ) -> InterpResult<'tcx> {
1412 let mut src = src.into_iter();
1413 let (lower, upper) = src.size_hint();
1414 let len = upper.expect("can only write bounded iterators");
1415 assert_eq!(lower, len, "can only write iterators with a precise length");
1416
1417 let size = Size::from_bytes(len);
1418 let Some(alloc_ref) = self.get_ptr_alloc_mut(ptr, size)? else {
1419 assert_matches!(src.next(), None, "iterator said it was empty but returned an element");
1421 return interp_ok(());
1422 };
1423
1424 let alloc_id = alloc_ref.alloc_id;
1427 let bytes = alloc_ref
1428 .alloc
1429 .get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range)
1430 .map_err(move |e| e.to_interp_error(alloc_id))?;
1431 for dest in bytes {
1434 *dest = src.next().expect("iterator was shorter than it said it would be");
1435 }
1436 assert_matches!(src.next(), None, "iterator was longer than it said it would be");
1437 interp_ok(())
1438 }
1439
1440 pub fn mem_copy(
1441 &mut self,
1442 src: Pointer<Option<M::Provenance>>,
1443 dest: Pointer<Option<M::Provenance>>,
1444 size: Size,
1445 nonoverlapping: bool,
1446 ) -> InterpResult<'tcx> {
1447 self.mem_copy_repeatedly(src, dest, size, 1, nonoverlapping)
1448 }
1449
1450 pub fn mem_copy_repeatedly(
1456 &mut self,
1457 src: Pointer<Option<M::Provenance>>,
1458 dest: Pointer<Option<M::Provenance>>,
1459 size: Size,
1460 num_copies: u64,
1461 nonoverlapping: bool,
1462 ) -> InterpResult<'tcx> {
1463 let tcx = self.tcx;
1464 let src_parts = self.get_ptr_access(src, size)?;
1466 let dest_parts = self.get_ptr_access(dest, size * num_copies)?; if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(src, size.bytes().try_into().unwrap())
1472 {
1473 M::before_alloc_access(tcx, &self.machine, alloc_id)?;
1474 }
1475
1476 let Some((src_alloc_id, src_offset, src_prov)) = src_parts else {
1481 return interp_ok(());
1483 };
1484 let src_alloc = self.get_alloc_raw(src_alloc_id)?;
1485 let src_range = alloc_range(src_offset, size);
1486 assert!(!self.memory.validation_in_progress.get(), "we can't be copying during validation");
1487
1488 M::before_memory_read(
1492 tcx,
1493 &self.machine,
1494 &src_alloc.extra,
1495 src,
1496 (src_alloc_id, src_prov),
1497 src_range,
1498 )?;
1499 let Some((dest_alloc_id, dest_offset, dest_prov)) = dest_parts else {
1502 return interp_ok(());
1504 };
1505
1506 let src_bytes = src_alloc.get_bytes_unchecked(src_range).as_ptr(); let provenance = src_alloc
1513 .provenance()
1514 .prepare_copy(src_range, dest_offset, num_copies, self)
1515 .map_err(|e| e.to_interp_error(src_alloc_id))?;
1516 let init = src_alloc.init_mask().prepare_copy(src_range);
1518
1519 let (dest_alloc, machine) = self.get_alloc_raw_mut(dest_alloc_id)?;
1521 let dest_range = alloc_range(dest_offset, size * num_copies);
1522 M::before_alloc_access(tcx, machine, dest_alloc_id)?;
1524 M::before_memory_write(
1525 tcx,
1526 machine,
1527 &mut dest_alloc.extra,
1528 dest,
1529 (dest_alloc_id, dest_prov),
1530 dest_range,
1531 )?;
1532 let dest_bytes = dest_alloc
1534 .get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range)
1535 .map_err(|e| e.to_interp_error(dest_alloc_id))?
1536 .as_mut_ptr();
1537
1538 if init.no_bytes_init() {
1539 dest_alloc
1546 .write_uninit(&tcx, dest_range)
1547 .map_err(|e| e.to_interp_error(dest_alloc_id))?;
1548 return interp_ok(());
1550 }
1551
1552 unsafe {
1558 if src_alloc_id == dest_alloc_id {
1559 if nonoverlapping {
1560 if (src_offset <= dest_offset && src_offset + size > dest_offset)
1562 || (dest_offset <= src_offset && dest_offset + size > src_offset)
1563 {
1564 throw_ub_custom!(fluent::const_eval_copy_nonoverlapping_overlapping);
1565 }
1566 }
1567 }
1568 if num_copies > 1 {
1569 assert!(nonoverlapping, "multi-copy only supported in non-overlapping mode");
1570 }
1571
1572 let size_in_bytes = size.bytes_usize();
1573 if size_in_bytes == 1 {
1576 debug_assert!(num_copies >= 1); let value = *src_bytes;
1579 dest_bytes.write_bytes(value, (size * num_copies).bytes_usize());
1580 } else if src_alloc_id == dest_alloc_id {
1581 let mut dest_ptr = dest_bytes;
1582 for _ in 0..num_copies {
1583 ptr::copy(src_bytes, dest_ptr, size_in_bytes);
1586 dest_ptr = dest_ptr.add(size_in_bytes);
1587 }
1588 } else {
1589 let mut dest_ptr = dest_bytes;
1590 for _ in 0..num_copies {
1591 ptr::copy_nonoverlapping(src_bytes, dest_ptr, size_in_bytes);
1592 dest_ptr = dest_ptr.add(size_in_bytes);
1593 }
1594 }
1595 }
1596
1597 dest_alloc.init_mask_apply_copy(
1599 init,
1600 alloc_range(dest_offset, size), num_copies,
1602 );
1603 dest_alloc.provenance_apply_copy(provenance);
1605
1606 interp_ok(())
1607 }
1608}
1609
1610impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1612 pub fn scalar_may_be_null(&self, scalar: Scalar<M::Provenance>) -> InterpResult<'tcx, bool> {
1615 match scalar.try_to_scalar_int() {
1616 Ok(int) => interp_ok(int.is_null()),
1617 Err(_) => {
1618 let ptr = scalar.to_pointer(self)?;
1620 match self.ptr_try_get_alloc_id(ptr, 0) {
1621 Ok((alloc_id, offset, _)) => {
1622 let info = self.get_alloc_info(alloc_id);
1623 if matches!(info.kind, AllocKind::TypeId) {
1624 return interp_ok(true);
1629 }
1630 if offset <= info.size {
1632 return interp_ok(false);
1633 }
1634 if !offset.bytes().is_multiple_of(info.align.bytes()) {
1638 return interp_ok(false);
1639 }
1640 interp_ok(true)
1642 }
1643 Err(_offset) => bug!("a non-int scalar is always a pointer"),
1644 }
1645 }
1646 }
1647 }
1648
1649 pub fn ptr_try_get_alloc_id(
1663 &self,
1664 ptr: Pointer<Option<M::Provenance>>,
1665 size: i64,
1666 ) -> Result<(AllocId, Size, M::ProvenanceExtra), u64> {
1667 match ptr.into_pointer_or_addr() {
1668 Ok(ptr) => match M::ptr_get_alloc(self, ptr, size) {
1669 Some((alloc_id, offset, extra)) => Ok((alloc_id, offset, extra)),
1670 None => {
1671 assert!(M::Provenance::OFFSET_IS_ADDR);
1672 let (_, addr) = ptr.into_raw_parts();
1674 Err(addr.bytes())
1675 }
1676 },
1677 Err(addr) => Err(addr.bytes()),
1678 }
1679 }
1680
1681 #[inline(always)]
1694 pub fn ptr_get_alloc_id(
1695 &self,
1696 ptr: Pointer<Option<M::Provenance>>,
1697 size: i64,
1698 ) -> InterpResult<'tcx, (AllocId, Size, M::ProvenanceExtra)> {
1699 self.ptr_try_get_alloc_id(ptr, size)
1700 .map_err(|offset| {
1701 err_ub!(DanglingIntPointer {
1702 addr: offset,
1703 inbounds_size: size,
1704 msg: CheckInAllocMsg::Dereferenceable
1705 })
1706 })
1707 .into()
1708 }
1709}