1use std::assert_matches::assert_matches;
10use std::borrow::{Borrow, Cow};
11use std::collections::VecDeque;
12use std::{fmt, mem, ptr};
13
14use rustc_abi::{Align, HasDataLayout, Size};
15use rustc_ast::Mutability;
16use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
17use rustc_middle::bug;
18use rustc_middle::mir::display_allocation;
19use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
20use tracing::{debug, instrument, trace};
21
22use super::{
23 AllocBytes, AllocId, AllocInit, AllocMap, AllocRange, Allocation, CheckAlignMsg,
24 CheckInAllocMsg, CtfeProvenance, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak,
25 Misalignment, Pointer, PointerArithmetic, Provenance, Scalar, alloc_range, err_ub,
26 err_ub_custom, interp_ok, throw_ub, throw_ub_custom, throw_unsup, throw_unsup_format,
27};
28use crate::fluent_generated as fluent;
29
30#[derive(Debug, PartialEq, Copy, Clone)]
31pub enum MemoryKind<T> {
32 Stack,
34 CallerLocation,
36 Machine(T),
38}
39
40impl<T: MayLeak> MayLeak for MemoryKind<T> {
41 #[inline]
42 fn may_leak(self) -> bool {
43 match self {
44 MemoryKind::Stack => false,
45 MemoryKind::CallerLocation => true,
46 MemoryKind::Machine(k) => k.may_leak(),
47 }
48 }
49}
50
51impl<T: fmt::Display> fmt::Display for MemoryKind<T> {
52 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
53 match self {
54 MemoryKind::Stack => write!(f, "stack variable"),
55 MemoryKind::CallerLocation => write!(f, "caller location"),
56 MemoryKind::Machine(m) => write!(f, "{m}"),
57 }
58 }
59}
60
61#[derive(Copy, Clone, PartialEq, Debug)]
63pub enum AllocKind {
64 LiveData,
66 Function,
68 VTable,
70 Dead,
72}
73
74#[derive(Copy, Clone, PartialEq, Debug)]
76pub struct AllocInfo {
77 pub size: Size,
78 pub align: Align,
79 pub kind: AllocKind,
80 pub mutbl: Mutability,
81}
82
83impl AllocInfo {
84 fn new(size: Size, align: Align, kind: AllocKind, mutbl: Mutability) -> Self {
85 Self { size, align, kind, mutbl }
86 }
87}
88
89#[derive(Debug, Copy, Clone)]
91pub enum FnVal<'tcx, Other> {
92 Instance(Instance<'tcx>),
93 Other(Other),
94}
95
96impl<'tcx, Other> FnVal<'tcx, Other> {
97 pub fn as_instance(self) -> InterpResult<'tcx, Instance<'tcx>> {
98 match self {
99 FnVal::Instance(instance) => interp_ok(instance),
100 FnVal::Other(_) => {
101 throw_unsup_format!("'foreign' function pointers are not supported in this context")
102 }
103 }
104 }
105}
106
107pub struct Memory<'tcx, M: Machine<'tcx>> {
110 pub(super) alloc_map: M::MemoryMap,
121
122 extra_fn_ptr_map: FxIndexMap<AllocId, M::ExtraFnVal>,
124
125 pub(super) dead_alloc_map: FxIndexMap<AllocId, (Size, Align)>,
130
131 validation_in_progress: bool,
135}
136
137#[derive(Copy, Clone)]
140pub struct AllocRef<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
141 alloc: &'a Allocation<Prov, Extra, Bytes>,
142 range: AllocRange,
143 tcx: TyCtxt<'tcx>,
144 alloc_id: AllocId,
145}
146pub struct AllocRefMut<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
149 alloc: &'a mut Allocation<Prov, Extra, Bytes>,
150 range: AllocRange,
151 tcx: TyCtxt<'tcx>,
152 alloc_id: AllocId,
153}
154
155impl<'tcx, M: Machine<'tcx>> Memory<'tcx, M> {
156 pub fn new() -> Self {
157 Memory {
158 alloc_map: M::MemoryMap::default(),
159 extra_fn_ptr_map: FxIndexMap::default(),
160 dead_alloc_map: FxIndexMap::default(),
161 validation_in_progress: false,
162 }
163 }
164
165 pub fn alloc_map(&self) -> &M::MemoryMap {
167 &self.alloc_map
168 }
169}
170
171impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
172 #[inline]
182 pub fn global_root_pointer(
183 &self,
184 ptr: Pointer<CtfeProvenance>,
185 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
186 let alloc_id = ptr.provenance.alloc_id();
187 match self.tcx.try_get_global_alloc(alloc_id) {
189 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_thread_local_static(def_id) => {
190 bug!("global memory cannot point to thread-local static")
193 }
194 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_foreign_item(def_id) => {
195 return M::extern_static_pointer(self, def_id);
196 }
197 None => {
198 assert!(
199 self.memory.extra_fn_ptr_map.contains_key(&alloc_id),
200 "{alloc_id:?} is neither global nor a function pointer"
201 );
202 }
203 _ => {}
204 }
205 M::adjust_alloc_root_pointer(self, ptr, M::GLOBAL_KIND.map(MemoryKind::Machine))
207 }
208
209 pub fn fn_ptr(&mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>) -> Pointer<M::Provenance> {
210 let id = match fn_val {
211 FnVal::Instance(instance) => {
212 let salt = M::get_global_alloc_salt(self, Some(instance));
213 self.tcx.reserve_and_set_fn_alloc(instance, salt)
214 }
215 FnVal::Other(extra) => {
216 let id = self.tcx.reserve_alloc_id();
218 let old = self.memory.extra_fn_ptr_map.insert(id, extra);
219 assert!(old.is_none());
220 id
221 }
222 };
223 self.global_root_pointer(Pointer::from(id)).unwrap()
226 }
227
228 pub fn allocate_ptr(
229 &mut self,
230 size: Size,
231 align: Align,
232 kind: MemoryKind<M::MemoryKind>,
233 init: AllocInit,
234 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
235 let alloc = if M::PANIC_ON_ALLOC_FAIL {
236 Allocation::new(size, align, init)
237 } else {
238 Allocation::try_new(size, align, init)?
239 };
240 self.insert_allocation(alloc, kind)
241 }
242
243 pub fn allocate_bytes_ptr(
244 &mut self,
245 bytes: &[u8],
246 align: Align,
247 kind: MemoryKind<M::MemoryKind>,
248 mutability: Mutability,
249 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
250 let alloc = Allocation::from_bytes(bytes, align, mutability);
251 self.insert_allocation(alloc, kind)
252 }
253
254 pub fn insert_allocation(
255 &mut self,
256 alloc: Allocation<M::Provenance, (), M::Bytes>,
257 kind: MemoryKind<M::MemoryKind>,
258 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
259 assert!(alloc.size() <= self.max_size_of_val());
260 let id = self.tcx.reserve_alloc_id();
261 debug_assert_ne!(
262 Some(kind),
263 M::GLOBAL_KIND.map(MemoryKind::Machine),
264 "dynamically allocating global memory"
265 );
266 let extra = M::init_local_allocation(self, id, kind, alloc.size(), alloc.align)?;
269 let alloc = alloc.with_extra(extra);
270 self.memory.alloc_map.insert(id, (kind, alloc));
271 M::adjust_alloc_root_pointer(self, Pointer::from(id), Some(kind))
272 }
273
274 pub fn reallocate_ptr(
277 &mut self,
278 ptr: Pointer<Option<M::Provenance>>,
279 old_size_and_align: Option<(Size, Align)>,
280 new_size: Size,
281 new_align: Align,
282 kind: MemoryKind<M::MemoryKind>,
283 init_growth: AllocInit,
284 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
285 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
286 if offset.bytes() != 0 {
287 throw_ub_custom!(
288 fluent::const_eval_realloc_or_alloc_with_offset,
289 ptr = format!("{ptr:?}"),
290 kind = "realloc"
291 );
292 }
293
294 let new_ptr = self.allocate_ptr(new_size, new_align, kind, init_growth)?;
300 let old_size = match old_size_and_align {
301 Some((size, _align)) => size,
302 None => self.get_alloc_raw(alloc_id)?.size(),
303 };
304 self.mem_copy(ptr, new_ptr.into(), old_size.min(new_size), true)?;
306 self.deallocate_ptr(ptr, old_size_and_align, kind)?;
307
308 interp_ok(new_ptr)
309 }
310
311 #[instrument(skip(self), level = "debug")]
312 pub fn deallocate_ptr(
313 &mut self,
314 ptr: Pointer<Option<M::Provenance>>,
315 old_size_and_align: Option<(Size, Align)>,
316 kind: MemoryKind<M::MemoryKind>,
317 ) -> InterpResult<'tcx> {
318 let (alloc_id, offset, prov) = self.ptr_get_alloc_id(ptr, 0)?;
319 trace!("deallocating: {alloc_id:?}");
320
321 if offset.bytes() != 0 {
322 throw_ub_custom!(
323 fluent::const_eval_realloc_or_alloc_with_offset,
324 ptr = format!("{ptr:?}"),
325 kind = "dealloc",
326 );
327 }
328
329 let Some((alloc_kind, mut alloc)) = self.memory.alloc_map.remove(&alloc_id) else {
330 return Err(match self.tcx.try_get_global_alloc(alloc_id) {
332 Some(GlobalAlloc::Function { .. }) => {
333 err_ub_custom!(
334 fluent::const_eval_invalid_dealloc,
335 alloc_id = alloc_id,
336 kind = "fn",
337 )
338 }
339 Some(GlobalAlloc::VTable(..)) => {
340 err_ub_custom!(
341 fluent::const_eval_invalid_dealloc,
342 alloc_id = alloc_id,
343 kind = "vtable",
344 )
345 }
346 Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => {
347 err_ub_custom!(
348 fluent::const_eval_invalid_dealloc,
349 alloc_id = alloc_id,
350 kind = "static_mem"
351 )
352 }
353 None => err_ub!(PointerUseAfterFree(alloc_id, CheckInAllocMsg::MemoryAccessTest)),
354 })
355 .into();
356 };
357
358 if alloc.mutability.is_not() {
359 throw_ub_custom!(fluent::const_eval_dealloc_immutable, alloc = alloc_id,);
360 }
361 if alloc_kind != kind {
362 throw_ub_custom!(
363 fluent::const_eval_dealloc_kind_mismatch,
364 alloc = alloc_id,
365 alloc_kind = format!("{alloc_kind}"),
366 kind = format!("{kind}"),
367 );
368 }
369 if let Some((size, align)) = old_size_and_align {
370 if size != alloc.size() || align != alloc.align {
371 throw_ub_custom!(
372 fluent::const_eval_dealloc_incorrect_layout,
373 alloc = alloc_id,
374 size = alloc.size().bytes(),
375 align = alloc.align.bytes(),
376 size_found = size.bytes(),
377 align_found = align.bytes(),
378 )
379 }
380 }
381
382 let size = alloc.size();
384 M::before_memory_deallocation(
385 self.tcx,
386 &mut self.machine,
387 &mut alloc.extra,
388 ptr,
389 (alloc_id, prov),
390 size,
391 alloc.align,
392 kind,
393 )?;
394
395 let old = self.memory.dead_alloc_map.insert(alloc_id, (size, alloc.align));
397 if old.is_some() {
398 bug!("Nothing can be deallocated twice");
399 }
400
401 interp_ok(())
402 }
403
404 #[inline(always)]
406 fn get_ptr_access(
407 &self,
408 ptr: Pointer<Option<M::Provenance>>,
409 size: Size,
410 ) -> InterpResult<'tcx, Option<(AllocId, Size, M::ProvenanceExtra)>> {
411 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(
413 self,
414 ptr,
415 size,
416 CheckInAllocMsg::MemoryAccessTest,
417 |this, alloc_id, offset, prov| {
418 let (size, align) = this
419 .get_live_alloc_size_and_align(alloc_id, CheckInAllocMsg::MemoryAccessTest)?;
420 interp_ok((size, align, (alloc_id, offset, prov)))
421 },
422 )
423 }
424
425 #[inline(always)]
428 pub fn check_ptr_access(
429 &self,
430 ptr: Pointer<Option<M::Provenance>>,
431 size: Size,
432 msg: CheckInAllocMsg,
433 ) -> InterpResult<'tcx> {
434 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
436 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
437 interp_ok((size, align, ()))
438 })?;
439 interp_ok(())
440 }
441
442 pub fn check_ptr_access_signed(
446 &self,
447 ptr: Pointer<Option<M::Provenance>>,
448 size: i64,
449 msg: CheckInAllocMsg,
450 ) -> InterpResult<'tcx> {
451 Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
452 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
453 interp_ok((size, align, ()))
454 })?;
455 interp_ok(())
456 }
457
458 fn check_and_deref_ptr<T, R: Borrow<Self>>(
467 this: R,
468 ptr: Pointer<Option<M::Provenance>>,
469 size: i64,
470 msg: CheckInAllocMsg,
471 alloc_size: impl FnOnce(
472 R,
473 AllocId,
474 Size,
475 M::ProvenanceExtra,
476 ) -> InterpResult<'tcx, (Size, Align, T)>,
477 ) -> InterpResult<'tcx, Option<T>> {
478 if size == 0 {
480 return interp_ok(None);
481 }
482
483 interp_ok(match this.borrow().ptr_try_get_alloc_id(ptr, size) {
484 Err(addr) => {
485 throw_ub!(DanglingIntPointer { addr, inbounds_size: size, msg });
487 }
488 Ok((alloc_id, offset, prov)) => {
489 let tcx = this.borrow().tcx;
490 let (alloc_size, _alloc_align, ret_val) = alloc_size(this, alloc_id, offset, prov)?;
491 let offset = offset.bytes();
492 let (begin, end) = if size >= 0 {
494 (Some(offset), offset.checked_add(size as u64))
495 } else {
496 (offset.checked_sub(size.unsigned_abs()), Some(offset))
497 };
498 let in_bounds = begin.is_some() && end.is_some_and(|e| e <= alloc_size.bytes());
500 if !in_bounds {
501 throw_ub!(PointerOutOfBounds {
502 alloc_id,
503 alloc_size,
504 ptr_offset: tcx.sign_extend_to_target_isize(offset),
505 inbounds_size: size,
506 msg,
507 })
508 }
509
510 Some(ret_val)
511 }
512 })
513 }
514
515 pub(super) fn check_misalign(
516 &self,
517 misaligned: Option<Misalignment>,
518 msg: CheckAlignMsg,
519 ) -> InterpResult<'tcx> {
520 if let Some(misaligned) = misaligned {
521 throw_ub!(AlignmentCheckFailed(misaligned, msg))
522 }
523 interp_ok(())
524 }
525
526 pub(super) fn is_ptr_misaligned(
527 &self,
528 ptr: Pointer<Option<M::Provenance>>,
529 align: Align,
530 ) -> Option<Misalignment> {
531 if !M::enforce_alignment(self) || align.bytes() == 1 {
532 return None;
533 }
534
535 #[inline]
536 fn is_offset_misaligned(offset: u64, align: Align) -> Option<Misalignment> {
537 if offset % align.bytes() == 0 {
538 None
539 } else {
540 let offset_pow2 = 1 << offset.trailing_zeros();
542 Some(Misalignment { has: Align::from_bytes(offset_pow2).unwrap(), required: align })
543 }
544 }
545
546 match self.ptr_try_get_alloc_id(ptr, 0) {
547 Err(addr) => is_offset_misaligned(addr, align),
548 Ok((alloc_id, offset, _prov)) => {
549 let alloc_info = self.get_alloc_info(alloc_id);
550 if let Some(misalign) = M::alignment_check(
551 self,
552 alloc_id,
553 alloc_info.align,
554 alloc_info.kind,
555 offset,
556 align,
557 ) {
558 Some(misalign)
559 } else if M::Provenance::OFFSET_IS_ADDR {
560 is_offset_misaligned(ptr.addr().bytes(), align)
561 } else {
562 if alloc_info.align.bytes() < align.bytes() {
564 Some(Misalignment { has: alloc_info.align, required: align })
565 } else {
566 is_offset_misaligned(offset.bytes(), align)
567 }
568 }
569 }
570 }
571 }
572
573 pub fn check_ptr_align(
577 &self,
578 ptr: Pointer<Option<M::Provenance>>,
579 align: Align,
580 ) -> InterpResult<'tcx> {
581 self.check_misalign(self.is_ptr_misaligned(ptr, align), CheckAlignMsg::AccessedPtr)
582 }
583}
584
585impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
586 pub fn remove_unreachable_allocs(&mut self, reachable_allocs: &FxHashSet<AllocId>) {
588 #[allow(rustc::potential_query_instability)] self.memory.dead_alloc_map.retain(|id, _| reachable_allocs.contains(id));
593 }
594}
595
596impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
598 fn get_global_alloc(
604 &self,
605 id: AllocId,
606 is_write: bool,
607 ) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::Provenance, M::AllocExtra, M::Bytes>>> {
608 let (alloc, def_id) = match self.tcx.try_get_global_alloc(id) {
609 Some(GlobalAlloc::Memory(mem)) => {
610 (mem, None)
612 }
613 Some(GlobalAlloc::Function { .. }) => throw_ub!(DerefFunctionPointer(id)),
614 Some(GlobalAlloc::VTable(..)) => throw_ub!(DerefVTablePointer(id)),
615 None => throw_ub!(PointerUseAfterFree(id, CheckInAllocMsg::MemoryAccessTest)),
616 Some(GlobalAlloc::Static(def_id)) => {
617 assert!(self.tcx.is_static(def_id));
618 assert!(!self.tcx.is_thread_local_static(def_id));
621 if self.tcx.is_foreign_item(def_id) {
632 throw_unsup!(ExternStatic(def_id));
635 }
636
637 let val = self.ctfe_query(|tcx| tcx.eval_static_initializer(def_id))?;
639 (val, Some(def_id))
640 }
641 };
642 M::before_access_global(self.tcx, &self.machine, id, alloc, def_id, is_write)?;
643 M::adjust_global_allocation(
645 self,
646 id, alloc.inner(),
648 )
649 }
650
651 fn get_alloc_raw(
656 &self,
657 id: AllocId,
658 ) -> InterpResult<'tcx, &Allocation<M::Provenance, M::AllocExtra, M::Bytes>> {
659 let a = self.memory.alloc_map.get_or(id, || {
664 let alloc = self.get_global_alloc(id, false).report_err().map_err(Err)?;
667 match alloc {
668 Cow::Borrowed(alloc) => {
669 Err(Ok(alloc))
672 }
673 Cow::Owned(alloc) => {
674 let kind = M::GLOBAL_KIND.expect(
676 "I got a global allocation that I have to copy but the machine does \
677 not expect that to happen",
678 );
679 Ok((MemoryKind::Machine(kind), alloc))
680 }
681 }
682 });
683 match a {
685 Ok(a) => interp_ok(&a.1),
686 Err(a) => a.into(),
687 }
688 }
689
690 pub fn get_alloc_bytes_unchecked_raw(&self, id: AllocId) -> InterpResult<'tcx, *const u8> {
693 let alloc = self.get_alloc_raw(id)?;
694 interp_ok(alloc.get_bytes_unchecked_raw())
695 }
696
697 pub fn get_ptr_alloc<'a>(
699 &'a self,
700 ptr: Pointer<Option<M::Provenance>>,
701 size: Size,
702 ) -> InterpResult<'tcx, Option<AllocRef<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
703 {
704 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
706 self,
707 ptr,
708 size_i64,
709 CheckInAllocMsg::MemoryAccessTest,
710 |this, alloc_id, offset, prov| {
711 let alloc = this.get_alloc_raw(alloc_id)?;
712 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc)))
713 },
714 )?;
715 if !self.memory.validation_in_progress {
719 if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(ptr, size_i64) {
720 M::before_alloc_read(self, alloc_id)?;
721 }
722 }
723
724 if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc {
725 let range = alloc_range(offset, size);
726 if !self.memory.validation_in_progress {
727 M::before_memory_read(
728 self.tcx,
729 &self.machine,
730 &alloc.extra,
731 ptr,
732 (alloc_id, prov),
733 range,
734 )?;
735 }
736 interp_ok(Some(AllocRef { alloc, range, tcx: *self.tcx, alloc_id }))
737 } else {
738 interp_ok(None)
739 }
740 }
741
742 pub fn get_alloc_extra<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, &'a M::AllocExtra> {
744 interp_ok(&self.get_alloc_raw(id)?.extra)
745 }
746
747 pub fn get_alloc_mutability<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, Mutability> {
749 interp_ok(self.get_alloc_raw(id)?.mutability)
750 }
751
752 fn get_alloc_raw_mut(
758 &mut self,
759 id: AllocId,
760 ) -> InterpResult<'tcx, (&mut Allocation<M::Provenance, M::AllocExtra, M::Bytes>, &mut M)> {
761 if self.memory.alloc_map.get_mut(id).is_none() {
769 let alloc = self.get_global_alloc(id, true)?;
772 let kind = M::GLOBAL_KIND.expect(
773 "I got a global allocation that I have to copy but the machine does \
774 not expect that to happen",
775 );
776 self.memory.alloc_map.insert(id, (MemoryKind::Machine(kind), alloc.into_owned()));
777 }
778
779 let (_kind, alloc) = self.memory.alloc_map.get_mut(id).unwrap();
780 if alloc.mutability.is_not() {
781 throw_ub!(WriteToReadOnly(id))
782 }
783 interp_ok((alloc, &mut self.machine))
784 }
785
786 pub fn get_alloc_bytes_unchecked_raw_mut(
789 &mut self,
790 id: AllocId,
791 ) -> InterpResult<'tcx, *mut u8> {
792 let alloc = self.get_alloc_raw_mut(id)?.0;
793 interp_ok(alloc.get_bytes_unchecked_raw_mut())
794 }
795
796 pub fn get_ptr_alloc_mut<'a>(
798 &'a mut self,
799 ptr: Pointer<Option<M::Provenance>>,
800 size: Size,
801 ) -> InterpResult<'tcx, Option<AllocRefMut<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
802 {
803 let tcx = self.tcx;
804 let validation_in_progress = self.memory.validation_in_progress;
805
806 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
808 self,
809 ptr,
810 size_i64,
811 CheckInAllocMsg::MemoryAccessTest,
812 |this, alloc_id, offset, prov| {
813 let (alloc, machine) = this.get_alloc_raw_mut(alloc_id)?;
814 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc, machine)))
815 },
816 )?;
817
818 if let Some((alloc_id, offset, prov, alloc, machine)) = ptr_and_alloc {
819 let range = alloc_range(offset, size);
820 if !validation_in_progress {
821 M::before_memory_write(
822 tcx,
823 machine,
824 &mut alloc.extra,
825 ptr,
826 (alloc_id, prov),
827 range,
828 )?;
829 }
830 interp_ok(Some(AllocRefMut { alloc, range, tcx: *tcx, alloc_id }))
831 } else {
832 interp_ok(None)
833 }
834 }
835
836 pub fn get_alloc_extra_mut<'a>(
838 &'a mut self,
839 id: AllocId,
840 ) -> InterpResult<'tcx, (&'a mut M::AllocExtra, &'a mut M)> {
841 let (alloc, machine) = self.get_alloc_raw_mut(id)?;
842 interp_ok((&mut alloc.extra, machine))
843 }
844
845 pub fn is_alloc_live(&self, id: AllocId) -> bool {
849 self.memory.alloc_map.contains_key_ref(&id)
850 || self.memory.extra_fn_ptr_map.contains_key(&id)
851 || self.tcx.try_get_global_alloc(id).is_some()
854 }
855
856 pub fn get_alloc_info(&self, id: AllocId) -> AllocInfo {
859 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
864 return AllocInfo::new(
865 alloc.size(),
866 alloc.align,
867 AllocKind::LiveData,
868 alloc.mutability,
869 );
870 }
871
872 if self.get_fn_alloc(id).is_some() {
875 return AllocInfo::new(Size::ZERO, Align::ONE, AllocKind::Function, Mutability::Not);
876 }
877
878 if let Some(global_alloc) = self.tcx.try_get_global_alloc(id) {
880 let (size, align) = global_alloc.size_and_align(*self.tcx, self.typing_env);
881 let mutbl = global_alloc.mutability(*self.tcx, self.typing_env);
882 let kind = match global_alloc {
883 GlobalAlloc::Static { .. } | GlobalAlloc::Memory { .. } => AllocKind::LiveData,
884 GlobalAlloc::Function { .. } => bug!("We already checked function pointers above"),
885 GlobalAlloc::VTable { .. } => AllocKind::VTable,
886 };
887 return AllocInfo::new(size, align, kind, mutbl);
888 }
889
890 let (size, align) = *self
892 .memory
893 .dead_alloc_map
894 .get(&id)
895 .expect("deallocated pointers should all be recorded in `dead_alloc_map`");
896 AllocInfo::new(size, align, AllocKind::Dead, Mutability::Not)
897 }
898
899 fn get_live_alloc_size_and_align(
901 &self,
902 id: AllocId,
903 msg: CheckInAllocMsg,
904 ) -> InterpResult<'tcx, (Size, Align)> {
905 let info = self.get_alloc_info(id);
906 if matches!(info.kind, AllocKind::Dead) {
907 throw_ub!(PointerUseAfterFree(id, msg))
908 }
909 interp_ok((info.size, info.align))
910 }
911
912 fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> {
913 if let Some(extra) = self.memory.extra_fn_ptr_map.get(&id) {
914 Some(FnVal::Other(*extra))
915 } else {
916 match self.tcx.try_get_global_alloc(id) {
917 Some(GlobalAlloc::Function { instance, .. }) => Some(FnVal::Instance(instance)),
918 _ => None,
919 }
920 }
921 }
922
923 pub fn get_ptr_fn(
924 &self,
925 ptr: Pointer<Option<M::Provenance>>,
926 ) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
927 trace!("get_ptr_fn({:?})", ptr);
928 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
929 if offset.bytes() != 0 {
930 throw_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset)))
931 }
932 self.get_fn_alloc(alloc_id)
933 .ok_or_else(|| err_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset))))
934 .into()
935 }
936
937 pub fn get_ptr_vtable_ty(
940 &self,
941 ptr: Pointer<Option<M::Provenance>>,
942 expected_trait: Option<&'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>>,
943 ) -> InterpResult<'tcx, Ty<'tcx>> {
944 trace!("get_ptr_vtable({:?})", ptr);
945 let (alloc_id, offset, _tag) = self.ptr_get_alloc_id(ptr, 0)?;
946 if offset.bytes() != 0 {
947 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
948 }
949 let Some(GlobalAlloc::VTable(ty, vtable_dyn_type)) =
950 self.tcx.try_get_global_alloc(alloc_id)
951 else {
952 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
953 };
954 if let Some(expected_dyn_type) = expected_trait {
955 self.check_vtable_for_type(vtable_dyn_type, expected_dyn_type)?;
956 }
957 interp_ok(ty)
958 }
959
960 pub fn alloc_mark_immutable(&mut self, id: AllocId) -> InterpResult<'tcx> {
961 self.get_alloc_raw_mut(id)?.0.mutability = Mutability::Not;
962 interp_ok(())
963 }
964
965 pub fn prepare_for_native_call(&mut self, ids: Vec<AllocId>) -> InterpResult<'tcx> {
972 let mut done = FxHashSet::default();
973 let mut todo = ids;
974 while let Some(id) = todo.pop() {
975 if !done.insert(id) {
976 continue;
978 }
979 let info = self.get_alloc_info(id);
980
981 if !matches!(info.kind, AllocKind::LiveData) {
983 continue;
984 }
985
986 let alloc = self.get_alloc_raw(id)?;
988 for prov in alloc.provenance().provenances() {
989 M::expose_provenance(self, prov)?;
990 if let Some(id) = prov.get_alloc_id() {
991 todo.push(id);
992 }
993 }
994 std::hint::black_box(alloc.get_bytes_unchecked_raw().expose_provenance());
998
999 if info.mutbl.is_mut() {
1001 self.get_alloc_raw_mut(id)?
1002 .0
1003 .prepare_for_native_write()
1004 .map_err(|e| e.to_interp_error(id))?;
1005 }
1006 }
1007 interp_ok(())
1008 }
1009
1010 #[must_use]
1013 pub fn dump_alloc<'a>(&'a self, id: AllocId) -> DumpAllocs<'a, 'tcx, M> {
1014 self.dump_allocs(vec![id])
1015 }
1016
1017 #[must_use]
1020 pub fn dump_allocs<'a>(&'a self, mut allocs: Vec<AllocId>) -> DumpAllocs<'a, 'tcx, M> {
1021 allocs.sort();
1022 allocs.dedup();
1023 DumpAllocs { ecx: self, allocs }
1024 }
1025
1026 pub fn print_alloc_bytes_for_diagnostics(&self, id: AllocId) -> String {
1028 let alloc = self.get_alloc_raw(id).unwrap();
1031 let mut bytes = String::new();
1032 if alloc.size() != Size::ZERO {
1033 bytes = "\n".into();
1034 rustc_middle::mir::pretty::write_allocation_bytes(*self.tcx, alloc, &mut bytes, " ")
1036 .unwrap();
1037 }
1038 bytes
1039 }
1040
1041 pub fn take_leaked_allocations(
1047 &mut self,
1048 static_roots: impl FnOnce(&Self) -> &[AllocId],
1049 ) -> Vec<(AllocId, MemoryKind<M::MemoryKind>, Allocation<M::Provenance, M::AllocExtra, M::Bytes>)>
1050 {
1051 let reachable = {
1053 let mut reachable = FxHashSet::default();
1054 let global_kind = M::GLOBAL_KIND.map(MemoryKind::Machine);
1055 let mut todo: Vec<_> =
1056 self.memory.alloc_map.filter_map_collect(move |&id, &(kind, _)| {
1057 if Some(kind) == global_kind { Some(id) } else { None }
1058 });
1059 todo.extend(static_roots(self));
1060 while let Some(id) = todo.pop() {
1061 if reachable.insert(id) {
1062 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
1064 todo.extend(
1065 alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id()),
1066 );
1067 }
1068 }
1069 }
1070 reachable
1071 };
1072
1073 let leaked: Vec<_> = self.memory.alloc_map.filter_map_collect(|&id, &(kind, _)| {
1075 if kind.may_leak() || reachable.contains(&id) { None } else { Some(id) }
1076 });
1077 let mut result = Vec::new();
1078 for &id in leaked.iter() {
1079 let (kind, alloc) = self.memory.alloc_map.remove(&id).unwrap();
1080 result.push((id, kind, alloc));
1081 }
1082 result
1083 }
1084
1085 pub fn run_for_validation<R>(&mut self, f: impl FnOnce(&mut Self) -> R) -> R {
1091 assert!(
1094 mem::replace(&mut self.memory.validation_in_progress, true) == false,
1095 "`validation_in_progress` was already set"
1096 );
1097 let res = f(self);
1098 assert!(
1099 mem::replace(&mut self.memory.validation_in_progress, false) == true,
1100 "`validation_in_progress` was unset by someone else"
1101 );
1102 res
1103 }
1104
1105 pub(super) fn validation_in_progress(&self) -> bool {
1106 self.memory.validation_in_progress
1107 }
1108}
1109
1110#[doc(hidden)]
1111pub struct DumpAllocs<'a, 'tcx, M: Machine<'tcx>> {
1113 ecx: &'a InterpCx<'tcx, M>,
1114 allocs: Vec<AllocId>,
1115}
1116
1117impl<'a, 'tcx, M: Machine<'tcx>> std::fmt::Debug for DumpAllocs<'a, 'tcx, M> {
1118 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1119 fn write_allocation_track_relocs<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
1121 fmt: &mut std::fmt::Formatter<'_>,
1122 tcx: TyCtxt<'tcx>,
1123 allocs_to_print: &mut VecDeque<AllocId>,
1124 alloc: &Allocation<Prov, Extra, Bytes>,
1125 ) -> std::fmt::Result {
1126 for alloc_id in alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id())
1127 {
1128 allocs_to_print.push_back(alloc_id);
1129 }
1130 write!(fmt, "{}", display_allocation(tcx, alloc))
1131 }
1132
1133 let mut allocs_to_print: VecDeque<_> = self.allocs.iter().copied().collect();
1134 let mut allocs_printed = FxHashSet::default();
1136
1137 while let Some(id) = allocs_to_print.pop_front() {
1138 if !allocs_printed.insert(id) {
1139 continue;
1141 }
1142
1143 write!(fmt, "{id:?}")?;
1144 match self.ecx.memory.alloc_map.get(id) {
1145 Some((kind, alloc)) => {
1146 write!(fmt, " ({kind}, ")?;
1148 write_allocation_track_relocs(
1149 &mut *fmt,
1150 *self.ecx.tcx,
1151 &mut allocs_to_print,
1152 alloc,
1153 )?;
1154 }
1155 None => {
1156 match self.ecx.tcx.try_get_global_alloc(id) {
1158 Some(GlobalAlloc::Memory(alloc)) => {
1159 write!(fmt, " (unchanged global, ")?;
1160 write_allocation_track_relocs(
1161 &mut *fmt,
1162 *self.ecx.tcx,
1163 &mut allocs_to_print,
1164 alloc.inner(),
1165 )?;
1166 }
1167 Some(GlobalAlloc::Function { instance, .. }) => {
1168 write!(fmt, " (fn: {instance})")?;
1169 }
1170 Some(GlobalAlloc::VTable(ty, dyn_ty)) => {
1171 write!(fmt, " (vtable: impl {dyn_ty} for {ty})")?;
1172 }
1173 Some(GlobalAlloc::Static(did)) => {
1174 write!(fmt, " (static: {})", self.ecx.tcx.def_path_str(did))?;
1175 }
1176 None => {
1177 write!(fmt, " (deallocated)")?;
1178 }
1179 }
1180 }
1181 }
1182 writeln!(fmt)?;
1183 }
1184 Ok(())
1185 }
1186}
1187
1188impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>
1190 AllocRefMut<'a, 'tcx, Prov, Extra, Bytes>
1191{
1192 pub fn as_ref<'b>(&'b self) -> AllocRef<'b, 'tcx, Prov, Extra, Bytes> {
1193 AllocRef { alloc: self.alloc, range: self.range, tcx: self.tcx, alloc_id: self.alloc_id }
1194 }
1195
1196 pub fn write_scalar(&mut self, range: AllocRange, val: Scalar<Prov>) -> InterpResult<'tcx> {
1198 let range = self.range.subrange(range);
1199 debug!("write_scalar at {:?}{range:?}: {val:?}", self.alloc_id);
1200
1201 self.alloc
1202 .write_scalar(&self.tcx, range, val)
1203 .map_err(|e| e.to_interp_error(self.alloc_id))
1204 .into()
1205 }
1206
1207 pub fn write_ptr_sized(&mut self, offset: Size, val: Scalar<Prov>) -> InterpResult<'tcx> {
1209 self.write_scalar(alloc_range(offset, self.tcx.data_layout().pointer_size), val)
1210 }
1211
1212 pub fn write_uninit(&mut self, range: AllocRange) -> InterpResult<'tcx> {
1214 let range = self.range.subrange(range);
1215
1216 self.alloc
1217 .write_uninit(&self.tcx, range)
1218 .map_err(|e| e.to_interp_error(self.alloc_id))
1219 .into()
1220 }
1221
1222 pub fn write_uninit_full(&mut self) -> InterpResult<'tcx> {
1224 self.alloc
1225 .write_uninit(&self.tcx, self.range)
1226 .map_err(|e| e.to_interp_error(self.alloc_id))
1227 .into()
1228 }
1229
1230 pub fn clear_provenance(&mut self) -> InterpResult<'tcx> {
1232 self.alloc
1233 .clear_provenance(&self.tcx, self.range)
1234 .map_err(|e| e.to_interp_error(self.alloc_id))
1235 .into()
1236 }
1237}
1238
1239impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes> AllocRef<'a, 'tcx, Prov, Extra, Bytes> {
1240 pub fn read_scalar(
1242 &self,
1243 range: AllocRange,
1244 read_provenance: bool,
1245 ) -> InterpResult<'tcx, Scalar<Prov>> {
1246 let range = self.range.subrange(range);
1247 self.alloc
1248 .read_scalar(&self.tcx, range, read_provenance)
1249 .map_err(|e| e.to_interp_error(self.alloc_id))
1250 .into()
1251 }
1252
1253 pub fn read_integer(&self, range: AllocRange) -> InterpResult<'tcx, Scalar<Prov>> {
1255 self.read_scalar(range, false)
1256 }
1257
1258 pub fn read_pointer(&self, offset: Size) -> InterpResult<'tcx, Scalar<Prov>> {
1260 self.read_scalar(
1261 alloc_range(offset, self.tcx.data_layout().pointer_size),
1262 true,
1263 )
1264 }
1265
1266 pub fn get_bytes_strip_provenance<'b>(&'b self) -> InterpResult<'tcx, &'a [u8]> {
1268 self.alloc
1269 .get_bytes_strip_provenance(&self.tcx, self.range)
1270 .map_err(|e| e.to_interp_error(self.alloc_id))
1271 .into()
1272 }
1273
1274 pub fn has_provenance(&self) -> bool {
1276 !self.alloc.provenance().range_empty(self.range, &self.tcx)
1277 }
1278}
1279
1280impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1281 pub fn read_bytes_ptr_strip_provenance(
1286 &self,
1287 ptr: Pointer<Option<M::Provenance>>,
1288 size: Size,
1289 ) -> InterpResult<'tcx, &[u8]> {
1290 let Some(alloc_ref) = self.get_ptr_alloc(ptr, size)? else {
1291 return interp_ok(&[]);
1293 };
1294 interp_ok(
1297 alloc_ref
1298 .alloc
1299 .get_bytes_strip_provenance(&alloc_ref.tcx, alloc_ref.range)
1300 .map_err(|e| e.to_interp_error(alloc_ref.alloc_id))?,
1301 )
1302 }
1303
1304 pub fn write_bytes_ptr(
1308 &mut self,
1309 ptr: Pointer<Option<M::Provenance>>,
1310 src: impl IntoIterator<Item = u8>,
1311 ) -> InterpResult<'tcx> {
1312 let mut src = src.into_iter();
1313 let (lower, upper) = src.size_hint();
1314 let len = upper.expect("can only write bounded iterators");
1315 assert_eq!(lower, len, "can only write iterators with a precise length");
1316
1317 let size = Size::from_bytes(len);
1318 let Some(alloc_ref) = self.get_ptr_alloc_mut(ptr, size)? else {
1319 assert_matches!(src.next(), None, "iterator said it was empty but returned an element");
1321 return interp_ok(());
1322 };
1323
1324 let alloc_id = alloc_ref.alloc_id;
1327 let bytes = alloc_ref
1328 .alloc
1329 .get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range)
1330 .map_err(move |e| e.to_interp_error(alloc_id))?;
1331 for dest in bytes {
1334 *dest = src.next().expect("iterator was shorter than it said it would be");
1335 }
1336 assert_matches!(src.next(), None, "iterator was longer than it said it would be");
1337 interp_ok(())
1338 }
1339
1340 pub fn mem_copy(
1341 &mut self,
1342 src: Pointer<Option<M::Provenance>>,
1343 dest: Pointer<Option<M::Provenance>>,
1344 size: Size,
1345 nonoverlapping: bool,
1346 ) -> InterpResult<'tcx> {
1347 self.mem_copy_repeatedly(src, dest, size, 1, nonoverlapping)
1348 }
1349
1350 pub fn mem_copy_repeatedly(
1356 &mut self,
1357 src: Pointer<Option<M::Provenance>>,
1358 dest: Pointer<Option<M::Provenance>>,
1359 size: Size,
1360 num_copies: u64,
1361 nonoverlapping: bool,
1362 ) -> InterpResult<'tcx> {
1363 let tcx = self.tcx;
1364 let src_parts = self.get_ptr_access(src, size)?;
1366 let dest_parts = self.get_ptr_access(dest, size * num_copies)?; let Some((src_alloc_id, src_offset, src_prov)) = src_parts else {
1373 return interp_ok(());
1375 };
1376 let src_alloc = self.get_alloc_raw(src_alloc_id)?;
1377 let src_range = alloc_range(src_offset, size);
1378 assert!(!self.memory.validation_in_progress, "we can't be copying during validation");
1379 M::before_memory_read(
1382 tcx,
1383 &self.machine,
1384 &src_alloc.extra,
1385 src,
1386 (src_alloc_id, src_prov),
1387 src_range,
1388 )?;
1389 let Some((dest_alloc_id, dest_offset, dest_prov)) = dest_parts else {
1392 return interp_ok(());
1394 };
1395
1396 let src_bytes = src_alloc.get_bytes_unchecked(src_range).as_ptr(); let provenance = src_alloc
1403 .provenance()
1404 .prepare_copy(src_range, dest_offset, num_copies, self)
1405 .map_err(|e| e.to_interp_error(dest_alloc_id))?;
1406 let init = src_alloc.init_mask().prepare_copy(src_range);
1408
1409 let (dest_alloc, extra) = self.get_alloc_raw_mut(dest_alloc_id)?;
1411 let dest_range = alloc_range(dest_offset, size * num_copies);
1412 M::before_memory_write(
1413 tcx,
1414 extra,
1415 &mut dest_alloc.extra,
1416 dest,
1417 (dest_alloc_id, dest_prov),
1418 dest_range,
1419 )?;
1420 let dest_bytes = dest_alloc
1422 .get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range)
1423 .map_err(|e| e.to_interp_error(dest_alloc_id))?
1424 .as_mut_ptr();
1425
1426 if init.no_bytes_init() {
1427 dest_alloc
1434 .write_uninit(&tcx, dest_range)
1435 .map_err(|e| e.to_interp_error(dest_alloc_id))?;
1436 return interp_ok(());
1438 }
1439
1440 unsafe {
1446 if src_alloc_id == dest_alloc_id {
1447 if nonoverlapping {
1448 if (src_offset <= dest_offset && src_offset + size > dest_offset)
1450 || (dest_offset <= src_offset && dest_offset + size > src_offset)
1451 {
1452 throw_ub_custom!(fluent::const_eval_copy_nonoverlapping_overlapping);
1453 }
1454 }
1455 }
1456 if num_copies > 1 {
1457 assert!(nonoverlapping, "multi-copy only supported in non-overlapping mode");
1458 }
1459
1460 let size_in_bytes = size.bytes_usize();
1461 if size_in_bytes == 1 {
1464 debug_assert!(num_copies >= 1); let value = *src_bytes;
1467 dest_bytes.write_bytes(value, (size * num_copies).bytes_usize());
1468 } else if src_alloc_id == dest_alloc_id {
1469 let mut dest_ptr = dest_bytes;
1470 for _ in 0..num_copies {
1471 ptr::copy(src_bytes, dest_ptr, size_in_bytes);
1474 dest_ptr = dest_ptr.add(size_in_bytes);
1475 }
1476 } else {
1477 let mut dest_ptr = dest_bytes;
1478 for _ in 0..num_copies {
1479 ptr::copy_nonoverlapping(src_bytes, dest_ptr, size_in_bytes);
1480 dest_ptr = dest_ptr.add(size_in_bytes);
1481 }
1482 }
1483 }
1484
1485 dest_alloc.init_mask_apply_copy(
1487 init,
1488 alloc_range(dest_offset, size), num_copies,
1490 );
1491 dest_alloc.provenance_apply_copy(provenance);
1493
1494 interp_ok(())
1495 }
1496}
1497
1498impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1500 pub fn scalar_may_be_null(&self, scalar: Scalar<M::Provenance>) -> InterpResult<'tcx, bool> {
1503 match scalar.try_to_scalar_int() {
1504 Ok(int) => interp_ok(int.is_null()),
1505 Err(_) => {
1506 let ptr = scalar.to_pointer(self)?;
1508 match self.ptr_try_get_alloc_id(ptr, 0) {
1509 Ok((alloc_id, offset, _)) => {
1510 let info = self.get_alloc_info(alloc_id);
1511 if offset <= info.size {
1513 return interp_ok(false);
1514 }
1515 if offset.bytes() % info.align.bytes() != 0 {
1519 return interp_ok(false);
1520 }
1521 interp_ok(true)
1523 }
1524 Err(_offset) => bug!("a non-int scalar is always a pointer"),
1525 }
1526 }
1527 }
1528 }
1529
1530 pub fn ptr_try_get_alloc_id(
1544 &self,
1545 ptr: Pointer<Option<M::Provenance>>,
1546 size: i64,
1547 ) -> Result<(AllocId, Size, M::ProvenanceExtra), u64> {
1548 match ptr.into_pointer_or_addr() {
1549 Ok(ptr) => match M::ptr_get_alloc(self, ptr, size) {
1550 Some((alloc_id, offset, extra)) => Ok((alloc_id, offset, extra)),
1551 None => {
1552 assert!(M::Provenance::OFFSET_IS_ADDR);
1553 let (_, addr) = ptr.into_parts();
1554 Err(addr.bytes())
1555 }
1556 },
1557 Err(addr) => Err(addr.bytes()),
1558 }
1559 }
1560
1561 #[inline(always)]
1574 pub fn ptr_get_alloc_id(
1575 &self,
1576 ptr: Pointer<Option<M::Provenance>>,
1577 size: i64,
1578 ) -> InterpResult<'tcx, (AllocId, Size, M::ProvenanceExtra)> {
1579 self.ptr_try_get_alloc_id(ptr, size)
1580 .map_err(|offset| {
1581 err_ub!(DanglingIntPointer {
1582 addr: offset,
1583 inbounds_size: size,
1584 msg: CheckInAllocMsg::InboundsTest
1585 })
1586 })
1587 .into()
1588 }
1589}