1use std::assert_matches::assert_matches;
10use std::borrow::{Borrow, Cow};
11use std::cell::Cell;
12use std::collections::VecDeque;
13use std::{fmt, ptr};
14
15use rustc_abi::{Align, HasDataLayout, Size};
16use rustc_ast::Mutability;
17use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
18use rustc_middle::mir::display_allocation;
19use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
20use rustc_middle::{bug, throw_ub_format};
21use tracing::{debug, instrument, trace};
22
23use super::{
24 AllocBytes, AllocId, AllocInit, AllocMap, AllocRange, Allocation, CheckAlignMsg,
25 CheckInAllocMsg, CtfeProvenance, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak,
26 Misalignment, Pointer, PointerArithmetic, Provenance, Scalar, alloc_range, err_ub,
27 err_ub_custom, interp_ok, throw_ub, throw_ub_custom, throw_unsup, throw_unsup_format,
28};
29use crate::fluent_generated as fluent;
30
31#[derive(Debug, PartialEq, Copy, Clone)]
32pub enum MemoryKind<T> {
33 Stack,
35 CallerLocation,
37 Machine(T),
39}
40
41impl<T: MayLeak> MayLeak for MemoryKind<T> {
42 #[inline]
43 fn may_leak(self) -> bool {
44 match self {
45 MemoryKind::Stack => false,
46 MemoryKind::CallerLocation => true,
47 MemoryKind::Machine(k) => k.may_leak(),
48 }
49 }
50}
51
52impl<T: fmt::Display> fmt::Display for MemoryKind<T> {
53 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
54 match self {
55 MemoryKind::Stack => write!(f, "stack variable"),
56 MemoryKind::CallerLocation => write!(f, "caller location"),
57 MemoryKind::Machine(m) => write!(f, "{m}"),
58 }
59 }
60}
61
62#[derive(Copy, Clone, PartialEq, Debug)]
64pub enum AllocKind {
65 LiveData,
67 Function,
69 VTable,
71 Dead,
73}
74
75#[derive(Copy, Clone, PartialEq, Debug)]
77pub struct AllocInfo {
78 pub size: Size,
79 pub align: Align,
80 pub kind: AllocKind,
81 pub mutbl: Mutability,
82}
83
84impl AllocInfo {
85 fn new(size: Size, align: Align, kind: AllocKind, mutbl: Mutability) -> Self {
86 Self { size, align, kind, mutbl }
87 }
88}
89
90#[derive(Debug, Copy, Clone)]
92pub enum FnVal<'tcx, Other> {
93 Instance(Instance<'tcx>),
94 Other(Other),
95}
96
97impl<'tcx, Other> FnVal<'tcx, Other> {
98 pub fn as_instance(self) -> InterpResult<'tcx, Instance<'tcx>> {
99 match self {
100 FnVal::Instance(instance) => interp_ok(instance),
101 FnVal::Other(_) => {
102 throw_unsup_format!("'foreign' function pointers are not supported in this context")
103 }
104 }
105 }
106}
107
108pub struct Memory<'tcx, M: Machine<'tcx>> {
111 pub(super) alloc_map: M::MemoryMap,
122
123 extra_fn_ptr_map: FxIndexMap<AllocId, M::ExtraFnVal>,
125
126 pub(super) dead_alloc_map: FxIndexMap<AllocId, (Size, Align)>,
131
132 validation_in_progress: Cell<bool>,
136}
137
138#[derive(Copy, Clone)]
141pub struct AllocRef<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
142 alloc: &'a Allocation<Prov, Extra, Bytes>,
143 range: AllocRange,
144 tcx: TyCtxt<'tcx>,
145 alloc_id: AllocId,
146}
147pub struct AllocRefMut<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
150 alloc: &'a mut Allocation<Prov, Extra, Bytes>,
151 range: AllocRange,
152 tcx: TyCtxt<'tcx>,
153 alloc_id: AllocId,
154}
155
156impl<'tcx, M: Machine<'tcx>> Memory<'tcx, M> {
157 pub fn new() -> Self {
158 Memory {
159 alloc_map: M::MemoryMap::default(),
160 extra_fn_ptr_map: FxIndexMap::default(),
161 dead_alloc_map: FxIndexMap::default(),
162 validation_in_progress: Cell::new(false),
163 }
164 }
165
166 pub fn alloc_map(&self) -> &M::MemoryMap {
168 &self.alloc_map
169 }
170}
171
172impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
173 #[inline]
183 pub fn global_root_pointer(
184 &self,
185 ptr: Pointer<CtfeProvenance>,
186 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
187 let alloc_id = ptr.provenance.alloc_id();
188 match self.tcx.try_get_global_alloc(alloc_id) {
190 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_thread_local_static(def_id) => {
191 bug!("global memory cannot point to thread-local static")
194 }
195 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_foreign_item(def_id) => {
196 return M::extern_static_pointer(self, def_id);
197 }
198 None => {
199 assert!(
200 self.memory.extra_fn_ptr_map.contains_key(&alloc_id),
201 "{alloc_id:?} is neither global nor a function pointer"
202 );
203 }
204 _ => {}
205 }
206 M::adjust_alloc_root_pointer(self, ptr, M::GLOBAL_KIND.map(MemoryKind::Machine))
208 }
209
210 pub fn fn_ptr(&mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>) -> Pointer<M::Provenance> {
211 let id = match fn_val {
212 FnVal::Instance(instance) => {
213 let salt = M::get_global_alloc_salt(self, Some(instance));
214 self.tcx.reserve_and_set_fn_alloc(instance, salt)
215 }
216 FnVal::Other(extra) => {
217 let id = self.tcx.reserve_alloc_id();
219 let old = self.memory.extra_fn_ptr_map.insert(id, extra);
220 assert!(old.is_none());
221 id
222 }
223 };
224 self.global_root_pointer(Pointer::from(id)).unwrap()
227 }
228
229 pub fn allocate_ptr(
230 &mut self,
231 size: Size,
232 align: Align,
233 kind: MemoryKind<M::MemoryKind>,
234 init: AllocInit,
235 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
236 let params = self.machine.get_default_alloc_params();
237 let alloc = if M::PANIC_ON_ALLOC_FAIL {
238 Allocation::new(size, align, init, params)
239 } else {
240 Allocation::try_new(size, align, init, params)?
241 };
242 self.insert_allocation(alloc, kind)
243 }
244
245 pub fn allocate_bytes_ptr(
246 &mut self,
247 bytes: &[u8],
248 align: Align,
249 kind: MemoryKind<M::MemoryKind>,
250 mutability: Mutability,
251 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
252 let params = self.machine.get_default_alloc_params();
253 let alloc = Allocation::from_bytes(bytes, align, mutability, params);
254 self.insert_allocation(alloc, kind)
255 }
256
257 pub fn insert_allocation(
258 &mut self,
259 alloc: Allocation<M::Provenance, (), M::Bytes>,
260 kind: MemoryKind<M::MemoryKind>,
261 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
262 assert!(alloc.size() <= self.max_size_of_val());
263 let id = self.tcx.reserve_alloc_id();
264 debug_assert_ne!(
265 Some(kind),
266 M::GLOBAL_KIND.map(MemoryKind::Machine),
267 "dynamically allocating global memory"
268 );
269 let extra = M::init_local_allocation(self, id, kind, alloc.size(), alloc.align)?;
272 let alloc = alloc.with_extra(extra);
273 self.memory.alloc_map.insert(id, (kind, alloc));
274 M::adjust_alloc_root_pointer(self, Pointer::from(id), Some(kind))
275 }
276
277 pub fn reallocate_ptr(
280 &mut self,
281 ptr: Pointer<Option<M::Provenance>>,
282 old_size_and_align: Option<(Size, Align)>,
283 new_size: Size,
284 new_align: Align,
285 kind: MemoryKind<M::MemoryKind>,
286 init_growth: AllocInit,
287 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
288 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
289 if offset.bytes() != 0 {
290 throw_ub_custom!(
291 fluent::const_eval_realloc_or_alloc_with_offset,
292 ptr = format!("{ptr:?}"),
293 kind = "realloc"
294 );
295 }
296
297 let new_ptr = self.allocate_ptr(new_size, new_align, kind, init_growth)?;
303 let old_size = match old_size_and_align {
304 Some((size, _align)) => size,
305 None => self.get_alloc_raw(alloc_id)?.size(),
306 };
307 self.mem_copy(ptr, new_ptr.into(), old_size.min(new_size), true)?;
309 self.deallocate_ptr(ptr, old_size_and_align, kind)?;
310
311 interp_ok(new_ptr)
312 }
313
314 #[instrument(skip(self), level = "debug")]
315 pub fn deallocate_ptr(
316 &mut self,
317 ptr: Pointer<Option<M::Provenance>>,
318 old_size_and_align: Option<(Size, Align)>,
319 kind: MemoryKind<M::MemoryKind>,
320 ) -> InterpResult<'tcx> {
321 let (alloc_id, offset, prov) = self.ptr_get_alloc_id(ptr, 0)?;
322 trace!("deallocating: {alloc_id:?}");
323
324 if offset.bytes() != 0 {
325 throw_ub_custom!(
326 fluent::const_eval_realloc_or_alloc_with_offset,
327 ptr = format!("{ptr:?}"),
328 kind = "dealloc",
329 );
330 }
331
332 let Some((alloc_kind, mut alloc)) = self.memory.alloc_map.remove(&alloc_id) else {
333 return Err(match self.tcx.try_get_global_alloc(alloc_id) {
335 Some(GlobalAlloc::Function { .. }) => {
336 err_ub_custom!(
337 fluent::const_eval_invalid_dealloc,
338 alloc_id = alloc_id,
339 kind = "fn",
340 )
341 }
342 Some(GlobalAlloc::VTable(..)) => {
343 err_ub_custom!(
344 fluent::const_eval_invalid_dealloc,
345 alloc_id = alloc_id,
346 kind = "vtable",
347 )
348 }
349 Some(GlobalAlloc::TypeId { .. }) => {
350 err_ub_custom!(
351 fluent::const_eval_invalid_dealloc,
352 alloc_id = alloc_id,
353 kind = "typeid",
354 )
355 }
356 Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => {
357 err_ub_custom!(
358 fluent::const_eval_invalid_dealloc,
359 alloc_id = alloc_id,
360 kind = "static_mem"
361 )
362 }
363 None => err_ub!(PointerUseAfterFree(alloc_id, CheckInAllocMsg::MemoryAccess)),
364 })
365 .into();
366 };
367
368 if alloc.mutability.is_not() {
369 throw_ub_custom!(fluent::const_eval_dealloc_immutable, alloc = alloc_id,);
370 }
371 if alloc_kind != kind {
372 throw_ub_custom!(
373 fluent::const_eval_dealloc_kind_mismatch,
374 alloc = alloc_id,
375 alloc_kind = format!("{alloc_kind}"),
376 kind = format!("{kind}"),
377 );
378 }
379 if let Some((size, align)) = old_size_and_align {
380 if size != alloc.size() || align != alloc.align {
381 throw_ub_custom!(
382 fluent::const_eval_dealloc_incorrect_layout,
383 alloc = alloc_id,
384 size = alloc.size().bytes(),
385 align = alloc.align.bytes(),
386 size_found = size.bytes(),
387 align_found = align.bytes(),
388 )
389 }
390 }
391
392 let size = alloc.size();
394 M::before_memory_deallocation(
395 self.tcx,
396 &mut self.machine,
397 &mut alloc.extra,
398 ptr,
399 (alloc_id, prov),
400 size,
401 alloc.align,
402 kind,
403 )?;
404
405 let old = self.memory.dead_alloc_map.insert(alloc_id, (size, alloc.align));
407 if old.is_some() {
408 bug!("Nothing can be deallocated twice");
409 }
410
411 interp_ok(())
412 }
413
414 #[inline(always)]
416 fn get_ptr_access(
417 &self,
418 ptr: Pointer<Option<M::Provenance>>,
419 size: Size,
420 ) -> InterpResult<'tcx, Option<(AllocId, Size, M::ProvenanceExtra)>> {
421 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(
423 self,
424 ptr,
425 size,
426 CheckInAllocMsg::MemoryAccess,
427 |this, alloc_id, offset, prov| {
428 let (size, align) =
429 this.get_live_alloc_size_and_align(alloc_id, CheckInAllocMsg::MemoryAccess)?;
430 interp_ok((size, align, (alloc_id, offset, prov)))
431 },
432 )
433 }
434
435 #[inline(always)]
438 pub fn check_ptr_access(
439 &self,
440 ptr: Pointer<Option<M::Provenance>>,
441 size: Size,
442 msg: CheckInAllocMsg,
443 ) -> InterpResult<'tcx> {
444 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
446 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
447 interp_ok((size, align, ()))
448 })?;
449 interp_ok(())
450 }
451
452 pub fn check_ptr_access_signed(
456 &self,
457 ptr: Pointer<Option<M::Provenance>>,
458 size: i64,
459 msg: CheckInAllocMsg,
460 ) -> InterpResult<'tcx> {
461 Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
462 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
463 interp_ok((size, align, ()))
464 })?;
465 interp_ok(())
466 }
467
468 fn check_and_deref_ptr<T, R: Borrow<Self>>(
477 this: R,
478 ptr: Pointer<Option<M::Provenance>>,
479 size: i64,
480 msg: CheckInAllocMsg,
481 alloc_size: impl FnOnce(
482 R,
483 AllocId,
484 Size,
485 M::ProvenanceExtra,
486 ) -> InterpResult<'tcx, (Size, Align, T)>,
487 ) -> InterpResult<'tcx, Option<T>> {
488 if size == 0 {
490 return interp_ok(None);
491 }
492
493 interp_ok(match this.borrow().ptr_try_get_alloc_id(ptr, size) {
494 Err(addr) => {
495 throw_ub!(DanglingIntPointer { addr, inbounds_size: size, msg });
497 }
498 Ok((alloc_id, offset, prov)) => {
499 let tcx = this.borrow().tcx;
500 let (alloc_size, _alloc_align, ret_val) = alloc_size(this, alloc_id, offset, prov)?;
501 let offset = offset.bytes();
502 let (begin, end) = if size >= 0 {
504 (Some(offset), offset.checked_add(size as u64))
505 } else {
506 (offset.checked_sub(size.unsigned_abs()), Some(offset))
507 };
508 let in_bounds = begin.is_some() && end.is_some_and(|e| e <= alloc_size.bytes());
510 if !in_bounds {
511 throw_ub!(PointerOutOfBounds {
512 alloc_id,
513 alloc_size,
514 ptr_offset: tcx.sign_extend_to_target_isize(offset),
515 inbounds_size: size,
516 msg,
517 })
518 }
519
520 Some(ret_val)
521 }
522 })
523 }
524
525 pub(super) fn check_misalign(
526 &self,
527 misaligned: Option<Misalignment>,
528 msg: CheckAlignMsg,
529 ) -> InterpResult<'tcx> {
530 if let Some(misaligned) = misaligned {
531 throw_ub!(AlignmentCheckFailed(misaligned, msg))
532 }
533 interp_ok(())
534 }
535
536 pub(super) fn is_ptr_misaligned(
537 &self,
538 ptr: Pointer<Option<M::Provenance>>,
539 align: Align,
540 ) -> Option<Misalignment> {
541 if !M::enforce_alignment(self) || align.bytes() == 1 {
542 return None;
543 }
544
545 #[inline]
546 fn is_offset_misaligned(offset: u64, align: Align) -> Option<Misalignment> {
547 if offset.is_multiple_of(align.bytes()) {
548 None
549 } else {
550 let offset_pow2 = 1 << offset.trailing_zeros();
552 Some(Misalignment { has: Align::from_bytes(offset_pow2).unwrap(), required: align })
553 }
554 }
555
556 match self.ptr_try_get_alloc_id(ptr, 0) {
557 Err(addr) => is_offset_misaligned(addr, align),
558 Ok((alloc_id, offset, _prov)) => {
559 let alloc_info = self.get_alloc_info(alloc_id);
560 if let Some(misalign) = M::alignment_check(
561 self,
562 alloc_id,
563 alloc_info.align,
564 alloc_info.kind,
565 offset,
566 align,
567 ) {
568 Some(misalign)
569 } else if M::Provenance::OFFSET_IS_ADDR {
570 is_offset_misaligned(ptr.addr().bytes(), align)
571 } else {
572 if alloc_info.align.bytes() < align.bytes() {
574 Some(Misalignment { has: alloc_info.align, required: align })
575 } else {
576 is_offset_misaligned(offset.bytes(), align)
577 }
578 }
579 }
580 }
581 }
582
583 pub fn check_ptr_align(
587 &self,
588 ptr: Pointer<Option<M::Provenance>>,
589 align: Align,
590 ) -> InterpResult<'tcx> {
591 self.check_misalign(self.is_ptr_misaligned(ptr, align), CheckAlignMsg::AccessedPtr)
592 }
593}
594
595impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
596 pub fn remove_unreachable_allocs(&mut self, reachable_allocs: &FxHashSet<AllocId>) {
598 #[allow(rustc::potential_query_instability)] self.memory.dead_alloc_map.retain(|id, _| reachable_allocs.contains(id));
603 }
604}
605
606impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
608 fn get_global_alloc(
614 &self,
615 id: AllocId,
616 is_write: bool,
617 ) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::Provenance, M::AllocExtra, M::Bytes>>> {
618 let (alloc, def_id) = match self.tcx.try_get_global_alloc(id) {
619 Some(GlobalAlloc::Memory(mem)) => {
620 (mem, None)
622 }
623 Some(GlobalAlloc::Function { .. }) => throw_ub!(DerefFunctionPointer(id)),
624 Some(GlobalAlloc::VTable(..)) => throw_ub!(DerefVTablePointer(id)),
625 Some(GlobalAlloc::TypeId { .. }) => throw_ub!(DerefTypeIdPointer(id)),
626 None => throw_ub!(PointerUseAfterFree(id, CheckInAllocMsg::MemoryAccess)),
627 Some(GlobalAlloc::Static(def_id)) => {
628 assert!(self.tcx.is_static(def_id));
629 assert!(!self.tcx.is_thread_local_static(def_id));
632 if self.tcx.is_foreign_item(def_id) {
643 throw_unsup!(ExternStatic(def_id));
646 }
647
648 let val = self.ctfe_query(|tcx| tcx.eval_static_initializer(def_id))?;
650 (val, Some(def_id))
651 }
652 };
653 M::before_access_global(self.tcx, &self.machine, id, alloc, def_id, is_write)?;
654 M::adjust_global_allocation(
656 self,
657 id, alloc.inner(),
659 )
660 }
661
662 pub fn get_alloc_raw(
667 &self,
668 id: AllocId,
669 ) -> InterpResult<'tcx, &Allocation<M::Provenance, M::AllocExtra, M::Bytes>> {
670 let a = self.memory.alloc_map.get_or(id, || {
675 let alloc = self.get_global_alloc(id, false).report_err().map_err(Err)?;
678 match alloc {
679 Cow::Borrowed(alloc) => {
680 Err(Ok(alloc))
683 }
684 Cow::Owned(alloc) => {
685 let kind = M::GLOBAL_KIND.expect(
687 "I got a global allocation that I have to copy but the machine does \
688 not expect that to happen",
689 );
690 Ok((MemoryKind::Machine(kind), alloc))
691 }
692 }
693 });
694 match a {
696 Ok(a) => interp_ok(&a.1),
697 Err(a) => a.into(),
698 }
699 }
700
701 pub fn get_alloc_bytes_unchecked_raw(&self, id: AllocId) -> InterpResult<'tcx, *const u8> {
704 let alloc = self.get_alloc_raw(id)?;
705 interp_ok(alloc.get_bytes_unchecked_raw())
706 }
707
708 pub fn get_ptr_alloc<'a>(
710 &'a self,
711 ptr: Pointer<Option<M::Provenance>>,
712 size: Size,
713 ) -> InterpResult<'tcx, Option<AllocRef<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
714 {
715 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
717 self,
718 ptr,
719 size_i64,
720 CheckInAllocMsg::MemoryAccess,
721 |this, alloc_id, offset, prov| {
722 let alloc = this.get_alloc_raw(alloc_id)?;
723 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc)))
724 },
725 )?;
726 if !self.memory.validation_in_progress.get() {
730 if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(ptr, size_i64) {
731 M::before_alloc_access(self.tcx, &self.machine, alloc_id)?;
732 }
733 }
734
735 if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc {
736 let range = alloc_range(offset, size);
737 if !self.memory.validation_in_progress.get() {
738 M::before_memory_read(
739 self.tcx,
740 &self.machine,
741 &alloc.extra,
742 ptr,
743 (alloc_id, prov),
744 range,
745 )?;
746 }
747 interp_ok(Some(AllocRef { alloc, range, tcx: *self.tcx, alloc_id }))
748 } else {
749 interp_ok(None)
750 }
751 }
752
753 pub fn get_alloc_extra<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, &'a M::AllocExtra> {
755 interp_ok(&self.get_alloc_raw(id)?.extra)
756 }
757
758 pub fn get_alloc_mutability<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, Mutability> {
760 interp_ok(self.get_alloc_raw(id)?.mutability)
761 }
762
763 pub fn get_alloc_raw_mut(
771 &mut self,
772 id: AllocId,
773 ) -> InterpResult<'tcx, (&mut Allocation<M::Provenance, M::AllocExtra, M::Bytes>, &mut M)> {
774 if self.memory.alloc_map.get_mut(id).is_none() {
782 let alloc = self.get_global_alloc(id, true)?;
785 let kind = M::GLOBAL_KIND.expect(
786 "I got a global allocation that I have to copy but the machine does \
787 not expect that to happen",
788 );
789 self.memory.alloc_map.insert(id, (MemoryKind::Machine(kind), alloc.into_owned()));
790 }
791
792 let (_kind, alloc) = self.memory.alloc_map.get_mut(id).unwrap();
793 if alloc.mutability.is_not() {
794 throw_ub!(WriteToReadOnly(id))
795 }
796 interp_ok((alloc, &mut self.machine))
797 }
798
799 pub fn get_alloc_bytes_unchecked_raw_mut(
802 &mut self,
803 id: AllocId,
804 ) -> InterpResult<'tcx, *mut u8> {
805 let alloc = self.get_alloc_raw_mut(id)?.0;
806 interp_ok(alloc.get_bytes_unchecked_raw_mut())
807 }
808
809 pub fn get_ptr_alloc_mut<'a>(
811 &'a mut self,
812 ptr: Pointer<Option<M::Provenance>>,
813 size: Size,
814 ) -> InterpResult<'tcx, Option<AllocRefMut<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
815 {
816 let tcx = self.tcx;
817 let validation_in_progress = self.memory.validation_in_progress.get();
818
819 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
821 self,
822 ptr,
823 size_i64,
824 CheckInAllocMsg::MemoryAccess,
825 |this, alloc_id, offset, prov| {
826 let (alloc, machine) = this.get_alloc_raw_mut(alloc_id)?;
827 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc, machine)))
828 },
829 )?;
830
831 if let Some((alloc_id, offset, prov, alloc, machine)) = ptr_and_alloc {
832 let range = alloc_range(offset, size);
833 if !validation_in_progress {
834 M::before_alloc_access(tcx, machine, alloc_id)?;
837 M::before_memory_write(
838 tcx,
839 machine,
840 &mut alloc.extra,
841 ptr,
842 (alloc_id, prov),
843 range,
844 )?;
845 }
846 interp_ok(Some(AllocRefMut { alloc, range, tcx: *tcx, alloc_id }))
847 } else {
848 interp_ok(None)
849 }
850 }
851
852 pub fn get_alloc_extra_mut<'a>(
854 &'a mut self,
855 id: AllocId,
856 ) -> InterpResult<'tcx, (&'a mut M::AllocExtra, &'a mut M)> {
857 let (alloc, machine) = self.get_alloc_raw_mut(id)?;
858 interp_ok((&mut alloc.extra, machine))
859 }
860
861 pub fn is_alloc_live(&self, id: AllocId) -> bool {
865 self.memory.alloc_map.contains_key_ref(&id)
866 || self.memory.extra_fn_ptr_map.contains_key(&id)
867 || self.tcx.try_get_global_alloc(id).is_some()
870 }
871
872 pub fn get_alloc_info(&self, id: AllocId) -> AllocInfo {
875 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
880 return AllocInfo::new(
881 alloc.size(),
882 alloc.align,
883 AllocKind::LiveData,
884 alloc.mutability,
885 );
886 }
887
888 if let Some(fn_val) = self.get_fn_alloc(id) {
891 let align = match fn_val {
892 FnVal::Instance(instance) => {
893 self.tcx.codegen_fn_attrs(instance.def_id()).alignment.unwrap_or(Align::ONE)
894 }
895 FnVal::Other(_) => Align::ONE,
897 };
898
899 return AllocInfo::new(Size::ZERO, align, AllocKind::Function, Mutability::Not);
900 }
901
902 if let Some(global_alloc) = self.tcx.try_get_global_alloc(id) {
904 let (size, align) = global_alloc.size_and_align(*self.tcx, self.typing_env);
905 let mutbl = global_alloc.mutability(*self.tcx, self.typing_env);
906 let kind = match global_alloc {
907 GlobalAlloc::TypeId { .. }
908 | GlobalAlloc::Static { .. }
909 | GlobalAlloc::Memory { .. } => AllocKind::LiveData,
910 GlobalAlloc::Function { .. } => bug!("We already checked function pointers above"),
911 GlobalAlloc::VTable { .. } => AllocKind::VTable,
912 };
913 return AllocInfo::new(size, align, kind, mutbl);
914 }
915
916 let (size, align) = *self
918 .memory
919 .dead_alloc_map
920 .get(&id)
921 .expect("deallocated pointers should all be recorded in `dead_alloc_map`");
922 AllocInfo::new(size, align, AllocKind::Dead, Mutability::Not)
923 }
924
925 fn get_live_alloc_size_and_align(
927 &self,
928 id: AllocId,
929 msg: CheckInAllocMsg,
930 ) -> InterpResult<'tcx, (Size, Align)> {
931 let info = self.get_alloc_info(id);
932 if matches!(info.kind, AllocKind::Dead) {
933 throw_ub!(PointerUseAfterFree(id, msg))
934 }
935 interp_ok((info.size, info.align))
936 }
937
938 fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> {
939 if let Some(extra) = self.memory.extra_fn_ptr_map.get(&id) {
940 Some(FnVal::Other(*extra))
941 } else {
942 match self.tcx.try_get_global_alloc(id) {
943 Some(GlobalAlloc::Function { instance, .. }) => Some(FnVal::Instance(instance)),
944 _ => None,
945 }
946 }
947 }
948
949 pub fn get_ptr_type_id(
952 &self,
953 ptr: Pointer<Option<M::Provenance>>,
954 ) -> InterpResult<'tcx, (Ty<'tcx>, Size)> {
955 let (alloc_id, offset, _meta) = self.ptr_get_alloc_id(ptr, 0)?;
956 let GlobalAlloc::TypeId { ty } = self.tcx.global_alloc(alloc_id) else {
957 throw_ub_format!("type_id_eq: `TypeId` provenance is not a type id")
958 };
959 interp_ok((ty, offset))
960 }
961
962 pub fn get_ptr_fn(
963 &self,
964 ptr: Pointer<Option<M::Provenance>>,
965 ) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
966 trace!("get_ptr_fn({:?})", ptr);
967 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
968 if offset.bytes() != 0 {
969 throw_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset)))
970 }
971 self.get_fn_alloc(alloc_id)
972 .ok_or_else(|| err_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset))))
973 .into()
974 }
975
976 pub fn get_ptr_vtable_ty(
979 &self,
980 ptr: Pointer<Option<M::Provenance>>,
981 expected_trait: Option<&'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>>,
982 ) -> InterpResult<'tcx, Ty<'tcx>> {
983 trace!("get_ptr_vtable({:?})", ptr);
984 let (alloc_id, offset, _tag) = self.ptr_get_alloc_id(ptr, 0)?;
985 if offset.bytes() != 0 {
986 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
987 }
988 let Some(GlobalAlloc::VTable(ty, vtable_dyn_type)) =
989 self.tcx.try_get_global_alloc(alloc_id)
990 else {
991 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
992 };
993 if let Some(expected_dyn_type) = expected_trait {
994 self.check_vtable_for_type(vtable_dyn_type, expected_dyn_type)?;
995 }
996 interp_ok(ty)
997 }
998
999 pub fn alloc_mark_immutable(&mut self, id: AllocId) -> InterpResult<'tcx> {
1000 self.get_alloc_raw_mut(id)?.0.mutability = Mutability::Not;
1001 interp_ok(())
1002 }
1003
1004 pub fn visit_reachable_allocs(
1007 &mut self,
1008 start: Vec<AllocId>,
1009 mut visit: impl FnMut(&mut Self, AllocId, &AllocInfo) -> InterpResult<'tcx>,
1010 ) -> InterpResult<'tcx> {
1011 let mut done = FxHashSet::default();
1012 let mut todo = start;
1013 while let Some(id) = todo.pop() {
1014 if !done.insert(id) {
1015 continue;
1017 }
1018 let info = self.get_alloc_info(id);
1019
1020 if matches!(info.kind, AllocKind::LiveData) {
1024 let alloc = self.get_alloc_raw(id)?;
1025 for prov in alloc.provenance().provenances() {
1026 if let Some(id) = prov.get_alloc_id() {
1027 todo.push(id);
1028 }
1029 }
1030 }
1031
1032 visit(self, id, &info)?;
1034 }
1035 interp_ok(())
1036 }
1037
1038 #[must_use]
1041 pub fn dump_alloc<'a>(&'a self, id: AllocId) -> DumpAllocs<'a, 'tcx, M> {
1042 self.dump_allocs(vec![id])
1043 }
1044
1045 #[must_use]
1048 pub fn dump_allocs<'a>(&'a self, mut allocs: Vec<AllocId>) -> DumpAllocs<'a, 'tcx, M> {
1049 allocs.sort();
1050 allocs.dedup();
1051 DumpAllocs { ecx: self, allocs }
1052 }
1053
1054 pub fn print_alloc_bytes_for_diagnostics(&self, id: AllocId) -> String {
1056 let alloc = self.get_alloc_raw(id).unwrap();
1059 let mut bytes = String::new();
1060 if alloc.size() != Size::ZERO {
1061 bytes = "\n".into();
1062 rustc_middle::mir::pretty::write_allocation_bytes(*self.tcx, alloc, &mut bytes, " ")
1064 .unwrap();
1065 }
1066 bytes
1067 }
1068
1069 pub fn take_leaked_allocations(
1075 &mut self,
1076 static_roots: impl FnOnce(&Self) -> &[AllocId],
1077 ) -> Vec<(AllocId, MemoryKind<M::MemoryKind>, Allocation<M::Provenance, M::AllocExtra, M::Bytes>)>
1078 {
1079 let reachable = {
1081 let mut reachable = FxHashSet::default();
1082 let global_kind = M::GLOBAL_KIND.map(MemoryKind::Machine);
1083 let mut todo: Vec<_> =
1084 self.memory.alloc_map.filter_map_collect(move |&id, &(kind, _)| {
1085 if Some(kind) == global_kind { Some(id) } else { None }
1086 });
1087 todo.extend(static_roots(self));
1088 while let Some(id) = todo.pop() {
1089 if reachable.insert(id) {
1090 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
1094 todo.extend(
1095 alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id()),
1096 );
1097 }
1098 }
1099 }
1100 reachable
1101 };
1102
1103 let leaked: Vec<_> = self.memory.alloc_map.filter_map_collect(|&id, &(kind, _)| {
1105 if kind.may_leak() || reachable.contains(&id) { None } else { Some(id) }
1106 });
1107 let mut result = Vec::new();
1108 for &id in leaked.iter() {
1109 let (kind, alloc) = self.memory.alloc_map.remove(&id).unwrap();
1110 result.push((id, kind, alloc));
1111 }
1112 result
1113 }
1114
1115 pub fn run_for_validation_mut<R>(&mut self, f: impl FnOnce(&mut Self) -> R) -> R {
1121 assert!(
1124 self.memory.validation_in_progress.replace(true) == false,
1125 "`validation_in_progress` was already set"
1126 );
1127 let res = f(self);
1128 assert!(
1129 self.memory.validation_in_progress.replace(false) == true,
1130 "`validation_in_progress` was unset by someone else"
1131 );
1132 res
1133 }
1134
1135 pub fn run_for_validation_ref<R>(&self, f: impl FnOnce(&Self) -> R) -> R {
1141 assert!(
1144 self.memory.validation_in_progress.replace(true) == false,
1145 "`validation_in_progress` was already set"
1146 );
1147 let res = f(self);
1148 assert!(
1149 self.memory.validation_in_progress.replace(false) == true,
1150 "`validation_in_progress` was unset by someone else"
1151 );
1152 res
1153 }
1154
1155 pub(super) fn validation_in_progress(&self) -> bool {
1156 self.memory.validation_in_progress.get()
1157 }
1158}
1159
1160#[doc(hidden)]
1161pub struct DumpAllocs<'a, 'tcx, M: Machine<'tcx>> {
1163 ecx: &'a InterpCx<'tcx, M>,
1164 allocs: Vec<AllocId>,
1165}
1166
1167impl<'a, 'tcx, M: Machine<'tcx>> std::fmt::Debug for DumpAllocs<'a, 'tcx, M> {
1168 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1169 fn write_allocation_track_relocs<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
1171 fmt: &mut std::fmt::Formatter<'_>,
1172 tcx: TyCtxt<'tcx>,
1173 allocs_to_print: &mut VecDeque<AllocId>,
1174 alloc: &Allocation<Prov, Extra, Bytes>,
1175 ) -> std::fmt::Result {
1176 for alloc_id in alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id())
1177 {
1178 allocs_to_print.push_back(alloc_id);
1179 }
1180 write!(fmt, "{}", display_allocation(tcx, alloc))
1181 }
1182
1183 let mut allocs_to_print: VecDeque<_> = self.allocs.iter().copied().collect();
1184 let mut allocs_printed = FxHashSet::default();
1186
1187 while let Some(id) = allocs_to_print.pop_front() {
1188 if !allocs_printed.insert(id) {
1189 continue;
1191 }
1192
1193 write!(fmt, "{id:?}")?;
1194 match self.ecx.memory.alloc_map.get(id) {
1195 Some((kind, alloc)) => {
1196 write!(fmt, " ({kind}, ")?;
1198 write_allocation_track_relocs(
1199 &mut *fmt,
1200 *self.ecx.tcx,
1201 &mut allocs_to_print,
1202 alloc,
1203 )?;
1204 }
1205 None => {
1206 match self.ecx.tcx.try_get_global_alloc(id) {
1208 Some(GlobalAlloc::Memory(alloc)) => {
1209 write!(fmt, " (unchanged global, ")?;
1210 write_allocation_track_relocs(
1211 &mut *fmt,
1212 *self.ecx.tcx,
1213 &mut allocs_to_print,
1214 alloc.inner(),
1215 )?;
1216 }
1217 Some(GlobalAlloc::Function { instance, .. }) => {
1218 write!(fmt, " (fn: {instance})")?;
1219 }
1220 Some(GlobalAlloc::VTable(ty, dyn_ty)) => {
1221 write!(fmt, " (vtable: impl {dyn_ty} for {ty})")?;
1222 }
1223 Some(GlobalAlloc::TypeId { ty }) => {
1224 write!(fmt, " (typeid for {ty})")?;
1225 }
1226 Some(GlobalAlloc::Static(did)) => {
1227 write!(fmt, " (static: {})", self.ecx.tcx.def_path_str(did))?;
1228 }
1229 None => {
1230 write!(fmt, " (deallocated)")?;
1231 }
1232 }
1233 }
1234 }
1235 writeln!(fmt)?;
1236 }
1237 Ok(())
1238 }
1239}
1240
1241impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>
1243 AllocRefMut<'a, 'tcx, Prov, Extra, Bytes>
1244{
1245 pub fn as_ref<'b>(&'b self) -> AllocRef<'b, 'tcx, Prov, Extra, Bytes> {
1246 AllocRef { alloc: self.alloc, range: self.range, tcx: self.tcx, alloc_id: self.alloc_id }
1247 }
1248
1249 pub fn write_scalar(&mut self, range: AllocRange, val: Scalar<Prov>) -> InterpResult<'tcx> {
1251 let range = self.range.subrange(range);
1252 debug!("write_scalar at {:?}{range:?}: {val:?}", self.alloc_id);
1253
1254 self.alloc
1255 .write_scalar(&self.tcx, range, val)
1256 .map_err(|e| e.to_interp_error(self.alloc_id))
1257 .into()
1258 }
1259
1260 pub fn write_ptr_sized(&mut self, offset: Size, val: Scalar<Prov>) -> InterpResult<'tcx> {
1262 self.write_scalar(alloc_range(offset, self.tcx.data_layout().pointer_size()), val)
1263 }
1264
1265 pub fn write_uninit(&mut self, range: AllocRange) -> InterpResult<'tcx> {
1267 let range = self.range.subrange(range);
1268
1269 self.alloc
1270 .write_uninit(&self.tcx, range)
1271 .map_err(|e| e.to_interp_error(self.alloc_id))
1272 .into()
1273 }
1274
1275 pub fn write_uninit_full(&mut self) -> InterpResult<'tcx> {
1277 self.alloc
1278 .write_uninit(&self.tcx, self.range)
1279 .map_err(|e| e.to_interp_error(self.alloc_id))
1280 .into()
1281 }
1282
1283 pub fn clear_provenance(&mut self) -> InterpResult<'tcx> {
1285 self.alloc
1286 .clear_provenance(&self.tcx, self.range)
1287 .map_err(|e| e.to_interp_error(self.alloc_id))
1288 .into()
1289 }
1290}
1291
1292impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes> AllocRef<'a, 'tcx, Prov, Extra, Bytes> {
1293 pub fn read_scalar(
1295 &self,
1296 range: AllocRange,
1297 read_provenance: bool,
1298 ) -> InterpResult<'tcx, Scalar<Prov>> {
1299 let range = self.range.subrange(range);
1300 self.alloc
1301 .read_scalar(&self.tcx, range, read_provenance)
1302 .map_err(|e| e.to_interp_error(self.alloc_id))
1303 .into()
1304 }
1305
1306 pub fn read_integer(&self, range: AllocRange) -> InterpResult<'tcx, Scalar<Prov>> {
1308 self.read_scalar(range, false)
1309 }
1310
1311 pub fn read_pointer(&self, offset: Size) -> InterpResult<'tcx, Scalar<Prov>> {
1313 self.read_scalar(
1314 alloc_range(offset, self.tcx.data_layout().pointer_size()),
1315 true,
1316 )
1317 }
1318
1319 pub fn get_bytes_strip_provenance<'b>(&'b self) -> InterpResult<'tcx, &'a [u8]> {
1321 self.alloc
1322 .get_bytes_strip_provenance(&self.tcx, self.range)
1323 .map_err(|e| e.to_interp_error(self.alloc_id))
1324 .into()
1325 }
1326
1327 pub fn has_provenance(&self) -> bool {
1329 !self.alloc.provenance().range_empty(self.range, &self.tcx)
1330 }
1331}
1332
1333impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1334 pub fn read_bytes_ptr_strip_provenance(
1339 &self,
1340 ptr: Pointer<Option<M::Provenance>>,
1341 size: Size,
1342 ) -> InterpResult<'tcx, &[u8]> {
1343 let Some(alloc_ref) = self.get_ptr_alloc(ptr, size)? else {
1344 return interp_ok(&[]);
1346 };
1347 interp_ok(
1350 alloc_ref
1351 .alloc
1352 .get_bytes_strip_provenance(&alloc_ref.tcx, alloc_ref.range)
1353 .map_err(|e| e.to_interp_error(alloc_ref.alloc_id))?,
1354 )
1355 }
1356
1357 pub fn write_bytes_ptr(
1361 &mut self,
1362 ptr: Pointer<Option<M::Provenance>>,
1363 src: impl IntoIterator<Item = u8>,
1364 ) -> InterpResult<'tcx> {
1365 let mut src = src.into_iter();
1366 let (lower, upper) = src.size_hint();
1367 let len = upper.expect("can only write bounded iterators");
1368 assert_eq!(lower, len, "can only write iterators with a precise length");
1369
1370 let size = Size::from_bytes(len);
1371 let Some(alloc_ref) = self.get_ptr_alloc_mut(ptr, size)? else {
1372 assert_matches!(src.next(), None, "iterator said it was empty but returned an element");
1374 return interp_ok(());
1375 };
1376
1377 let alloc_id = alloc_ref.alloc_id;
1380 let bytes = alloc_ref
1381 .alloc
1382 .get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range)
1383 .map_err(move |e| e.to_interp_error(alloc_id))?;
1384 for dest in bytes {
1387 *dest = src.next().expect("iterator was shorter than it said it would be");
1388 }
1389 assert_matches!(src.next(), None, "iterator was longer than it said it would be");
1390 interp_ok(())
1391 }
1392
1393 pub fn mem_copy(
1394 &mut self,
1395 src: Pointer<Option<M::Provenance>>,
1396 dest: Pointer<Option<M::Provenance>>,
1397 size: Size,
1398 nonoverlapping: bool,
1399 ) -> InterpResult<'tcx> {
1400 self.mem_copy_repeatedly(src, dest, size, 1, nonoverlapping)
1401 }
1402
1403 pub fn mem_copy_repeatedly(
1409 &mut self,
1410 src: Pointer<Option<M::Provenance>>,
1411 dest: Pointer<Option<M::Provenance>>,
1412 size: Size,
1413 num_copies: u64,
1414 nonoverlapping: bool,
1415 ) -> InterpResult<'tcx> {
1416 let tcx = self.tcx;
1417 let src_parts = self.get_ptr_access(src, size)?;
1419 let dest_parts = self.get_ptr_access(dest, size * num_copies)?; if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(src, size.bytes().try_into().unwrap())
1425 {
1426 M::before_alloc_access(tcx, &self.machine, alloc_id)?;
1427 }
1428
1429 let Some((src_alloc_id, src_offset, src_prov)) = src_parts else {
1434 return interp_ok(());
1436 };
1437 let src_alloc = self.get_alloc_raw(src_alloc_id)?;
1438 let src_range = alloc_range(src_offset, size);
1439 assert!(!self.memory.validation_in_progress.get(), "we can't be copying during validation");
1440
1441 M::before_memory_read(
1445 tcx,
1446 &self.machine,
1447 &src_alloc.extra,
1448 src,
1449 (src_alloc_id, src_prov),
1450 src_range,
1451 )?;
1452 let Some((dest_alloc_id, dest_offset, dest_prov)) = dest_parts else {
1455 return interp_ok(());
1457 };
1458
1459 let src_bytes = src_alloc.get_bytes_unchecked(src_range).as_ptr(); let provenance = src_alloc
1466 .provenance()
1467 .prepare_copy(src_range, dest_offset, num_copies, self)
1468 .map_err(|e| e.to_interp_error(src_alloc_id))?;
1469 let init = src_alloc.init_mask().prepare_copy(src_range);
1471
1472 let (dest_alloc, machine) = self.get_alloc_raw_mut(dest_alloc_id)?;
1474 let dest_range = alloc_range(dest_offset, size * num_copies);
1475 M::before_alloc_access(tcx, machine, dest_alloc_id)?;
1477 M::before_memory_write(
1478 tcx,
1479 machine,
1480 &mut dest_alloc.extra,
1481 dest,
1482 (dest_alloc_id, dest_prov),
1483 dest_range,
1484 )?;
1485 let dest_bytes = dest_alloc
1487 .get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range)
1488 .map_err(|e| e.to_interp_error(dest_alloc_id))?
1489 .as_mut_ptr();
1490
1491 if init.no_bytes_init() {
1492 dest_alloc
1499 .write_uninit(&tcx, dest_range)
1500 .map_err(|e| e.to_interp_error(dest_alloc_id))?;
1501 return interp_ok(());
1503 }
1504
1505 unsafe {
1511 if src_alloc_id == dest_alloc_id {
1512 if nonoverlapping {
1513 if (src_offset <= dest_offset && src_offset + size > dest_offset)
1515 || (dest_offset <= src_offset && dest_offset + size > src_offset)
1516 {
1517 throw_ub_custom!(fluent::const_eval_copy_nonoverlapping_overlapping);
1518 }
1519 }
1520 }
1521 if num_copies > 1 {
1522 assert!(nonoverlapping, "multi-copy only supported in non-overlapping mode");
1523 }
1524
1525 let size_in_bytes = size.bytes_usize();
1526 if size_in_bytes == 1 {
1529 debug_assert!(num_copies >= 1); let value = *src_bytes;
1532 dest_bytes.write_bytes(value, (size * num_copies).bytes_usize());
1533 } else if src_alloc_id == dest_alloc_id {
1534 let mut dest_ptr = dest_bytes;
1535 for _ in 0..num_copies {
1536 ptr::copy(src_bytes, dest_ptr, size_in_bytes);
1539 dest_ptr = dest_ptr.add(size_in_bytes);
1540 }
1541 } else {
1542 let mut dest_ptr = dest_bytes;
1543 for _ in 0..num_copies {
1544 ptr::copy_nonoverlapping(src_bytes, dest_ptr, size_in_bytes);
1545 dest_ptr = dest_ptr.add(size_in_bytes);
1546 }
1547 }
1548 }
1549
1550 dest_alloc.init_mask_apply_copy(
1552 init,
1553 alloc_range(dest_offset, size), num_copies,
1555 );
1556 dest_alloc.provenance_apply_copy(provenance);
1558
1559 interp_ok(())
1560 }
1561}
1562
1563impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1565 pub fn scalar_may_be_null(&self, scalar: Scalar<M::Provenance>) -> InterpResult<'tcx, bool> {
1568 match scalar.try_to_scalar_int() {
1569 Ok(int) => interp_ok(int.is_null()),
1570 Err(_) => {
1571 let ptr = scalar.to_pointer(self)?;
1573 match self.ptr_try_get_alloc_id(ptr, 0) {
1574 Ok((alloc_id, offset, _)) => {
1575 let info = self.get_alloc_info(alloc_id);
1576 if offset <= info.size {
1578 return interp_ok(false);
1579 }
1580 if !offset.bytes().is_multiple_of(info.align.bytes()) {
1584 return interp_ok(false);
1585 }
1586 interp_ok(true)
1588 }
1589 Err(_offset) => bug!("a non-int scalar is always a pointer"),
1590 }
1591 }
1592 }
1593 }
1594
1595 pub fn ptr_try_get_alloc_id(
1609 &self,
1610 ptr: Pointer<Option<M::Provenance>>,
1611 size: i64,
1612 ) -> Result<(AllocId, Size, M::ProvenanceExtra), u64> {
1613 match ptr.into_pointer_or_addr() {
1614 Ok(ptr) => match M::ptr_get_alloc(self, ptr, size) {
1615 Some((alloc_id, offset, extra)) => Ok((alloc_id, offset, extra)),
1616 None => {
1617 assert!(M::Provenance::OFFSET_IS_ADDR);
1618 let (_, addr) = ptr.into_raw_parts();
1620 Err(addr.bytes())
1621 }
1622 },
1623 Err(addr) => Err(addr.bytes()),
1624 }
1625 }
1626
1627 #[inline(always)]
1640 pub fn ptr_get_alloc_id(
1641 &self,
1642 ptr: Pointer<Option<M::Provenance>>,
1643 size: i64,
1644 ) -> InterpResult<'tcx, (AllocId, Size, M::ProvenanceExtra)> {
1645 self.ptr_try_get_alloc_id(ptr, size)
1646 .map_err(|offset| {
1647 err_ub!(DanglingIntPointer {
1648 addr: offset,
1649 inbounds_size: size,
1650 msg: CheckInAllocMsg::Dereferenceable
1651 })
1652 })
1653 .into()
1654 }
1655}