1use std::assert_matches::assert_matches;
10use std::borrow::{Borrow, Cow};
11use std::collections::VecDeque;
12use std::{fmt, mem, ptr};
13
14use rustc_abi::{Align, HasDataLayout, Size};
15use rustc_ast::Mutability;
16use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
17use rustc_middle::bug;
18use rustc_middle::mir::display_allocation;
19use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
20use tracing::{debug, instrument, trace};
21
22use super::{
23 AllocBytes, AllocId, AllocInit, AllocMap, AllocRange, Allocation, CheckAlignMsg,
24 CheckInAllocMsg, CtfeProvenance, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak,
25 Misalignment, Pointer, PointerArithmetic, Provenance, Scalar, alloc_range, err_ub,
26 err_ub_custom, interp_ok, throw_ub, throw_ub_custom, throw_unsup, throw_unsup_format,
27};
28use crate::fluent_generated as fluent;
29
30#[derive(Debug, PartialEq, Copy, Clone)]
31pub enum MemoryKind<T> {
32 Stack,
34 CallerLocation,
36 Machine(T),
38}
39
40impl<T: MayLeak> MayLeak for MemoryKind<T> {
41 #[inline]
42 fn may_leak(self) -> bool {
43 match self {
44 MemoryKind::Stack => false,
45 MemoryKind::CallerLocation => true,
46 MemoryKind::Machine(k) => k.may_leak(),
47 }
48 }
49}
50
51impl<T: fmt::Display> fmt::Display for MemoryKind<T> {
52 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
53 match self {
54 MemoryKind::Stack => write!(f, "stack variable"),
55 MemoryKind::CallerLocation => write!(f, "caller location"),
56 MemoryKind::Machine(m) => write!(f, "{m}"),
57 }
58 }
59}
60
61#[derive(Copy, Clone, PartialEq, Debug)]
63pub enum AllocKind {
64 LiveData,
66 Function,
68 VTable,
70 Dead,
72}
73
74#[derive(Copy, Clone, PartialEq, Debug)]
76pub struct AllocInfo {
77 pub size: Size,
78 pub align: Align,
79 pub kind: AllocKind,
80 pub mutbl: Mutability,
81}
82
83impl AllocInfo {
84 fn new(size: Size, align: Align, kind: AllocKind, mutbl: Mutability) -> Self {
85 Self { size, align, kind, mutbl }
86 }
87}
88
89#[derive(Debug, Copy, Clone)]
91pub enum FnVal<'tcx, Other> {
92 Instance(Instance<'tcx>),
93 Other(Other),
94}
95
96impl<'tcx, Other> FnVal<'tcx, Other> {
97 pub fn as_instance(self) -> InterpResult<'tcx, Instance<'tcx>> {
98 match self {
99 FnVal::Instance(instance) => interp_ok(instance),
100 FnVal::Other(_) => {
101 throw_unsup_format!("'foreign' function pointers are not supported in this context")
102 }
103 }
104 }
105}
106
107pub struct Memory<'tcx, M: Machine<'tcx>> {
110 pub(super) alloc_map: M::MemoryMap,
121
122 extra_fn_ptr_map: FxIndexMap<AllocId, M::ExtraFnVal>,
124
125 pub(super) dead_alloc_map: FxIndexMap<AllocId, (Size, Align)>,
130
131 validation_in_progress: bool,
135}
136
137#[derive(Copy, Clone)]
140pub struct AllocRef<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
141 alloc: &'a Allocation<Prov, Extra, Bytes>,
142 range: AllocRange,
143 tcx: TyCtxt<'tcx>,
144 alloc_id: AllocId,
145}
146pub struct AllocRefMut<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
149 alloc: &'a mut Allocation<Prov, Extra, Bytes>,
150 range: AllocRange,
151 tcx: TyCtxt<'tcx>,
152 alloc_id: AllocId,
153}
154
155impl<'tcx, M: Machine<'tcx>> Memory<'tcx, M> {
156 pub fn new() -> Self {
157 Memory {
158 alloc_map: M::MemoryMap::default(),
159 extra_fn_ptr_map: FxIndexMap::default(),
160 dead_alloc_map: FxIndexMap::default(),
161 validation_in_progress: false,
162 }
163 }
164
165 pub fn alloc_map(&self) -> &M::MemoryMap {
167 &self.alloc_map
168 }
169}
170
171impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
172 #[inline]
182 pub fn global_root_pointer(
183 &self,
184 ptr: Pointer<CtfeProvenance>,
185 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
186 let alloc_id = ptr.provenance.alloc_id();
187 match self.tcx.try_get_global_alloc(alloc_id) {
189 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_thread_local_static(def_id) => {
190 bug!("global memory cannot point to thread-local static")
193 }
194 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_foreign_item(def_id) => {
195 return M::extern_static_pointer(self, def_id);
196 }
197 None => {
198 assert!(
199 self.memory.extra_fn_ptr_map.contains_key(&alloc_id),
200 "{alloc_id:?} is neither global nor a function pointer"
201 );
202 }
203 _ => {}
204 }
205 M::adjust_alloc_root_pointer(self, ptr, M::GLOBAL_KIND.map(MemoryKind::Machine))
207 }
208
209 pub fn fn_ptr(&mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>) -> Pointer<M::Provenance> {
210 let id = match fn_val {
211 FnVal::Instance(instance) => {
212 let salt = M::get_global_alloc_salt(self, Some(instance));
213 self.tcx.reserve_and_set_fn_alloc(instance, salt)
214 }
215 FnVal::Other(extra) => {
216 let id = self.tcx.reserve_alloc_id();
218 let old = self.memory.extra_fn_ptr_map.insert(id, extra);
219 assert!(old.is_none());
220 id
221 }
222 };
223 self.global_root_pointer(Pointer::from(id)).unwrap()
226 }
227
228 pub fn allocate_ptr(
229 &mut self,
230 size: Size,
231 align: Align,
232 kind: MemoryKind<M::MemoryKind>,
233 init: AllocInit,
234 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
235 let alloc = if M::PANIC_ON_ALLOC_FAIL {
236 Allocation::new(size, align, init)
237 } else {
238 Allocation::try_new(size, align, init)?
239 };
240 self.insert_allocation(alloc, kind)
241 }
242
243 pub fn allocate_bytes_ptr(
244 &mut self,
245 bytes: &[u8],
246 align: Align,
247 kind: MemoryKind<M::MemoryKind>,
248 mutability: Mutability,
249 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
250 let alloc = Allocation::from_bytes(bytes, align, mutability);
251 self.insert_allocation(alloc, kind)
252 }
253
254 pub fn insert_allocation(
255 &mut self,
256 alloc: Allocation<M::Provenance, (), M::Bytes>,
257 kind: MemoryKind<M::MemoryKind>,
258 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
259 assert!(alloc.size() <= self.max_size_of_val());
260 let id = self.tcx.reserve_alloc_id();
261 debug_assert_ne!(
262 Some(kind),
263 M::GLOBAL_KIND.map(MemoryKind::Machine),
264 "dynamically allocating global memory"
265 );
266 let extra = M::init_alloc_extra(self, id, kind, alloc.size(), alloc.align)?;
269 let alloc = alloc.with_extra(extra);
270 self.memory.alloc_map.insert(id, (kind, alloc));
271 M::adjust_alloc_root_pointer(self, Pointer::from(id), Some(kind))
272 }
273
274 pub fn reallocate_ptr(
277 &mut self,
278 ptr: Pointer<Option<M::Provenance>>,
279 old_size_and_align: Option<(Size, Align)>,
280 new_size: Size,
281 new_align: Align,
282 kind: MemoryKind<M::MemoryKind>,
283 init_growth: AllocInit,
284 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
285 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
286 if offset.bytes() != 0 {
287 throw_ub_custom!(
288 fluent::const_eval_realloc_or_alloc_with_offset,
289 ptr = format!("{ptr:?}"),
290 kind = "realloc"
291 );
292 }
293
294 let new_ptr = self.allocate_ptr(new_size, new_align, kind, init_growth)?;
300 let old_size = match old_size_and_align {
301 Some((size, _align)) => size,
302 None => self.get_alloc_raw(alloc_id)?.size(),
303 };
304 self.mem_copy(ptr, new_ptr.into(), old_size.min(new_size), true)?;
306 self.deallocate_ptr(ptr, old_size_and_align, kind)?;
307
308 interp_ok(new_ptr)
309 }
310
311 #[instrument(skip(self), level = "debug")]
312 pub fn deallocate_ptr(
313 &mut self,
314 ptr: Pointer<Option<M::Provenance>>,
315 old_size_and_align: Option<(Size, Align)>,
316 kind: MemoryKind<M::MemoryKind>,
317 ) -> InterpResult<'tcx> {
318 let (alloc_id, offset, prov) = self.ptr_get_alloc_id(ptr, 0)?;
319 trace!("deallocating: {alloc_id:?}");
320
321 if offset.bytes() != 0 {
322 throw_ub_custom!(
323 fluent::const_eval_realloc_or_alloc_with_offset,
324 ptr = format!("{ptr:?}"),
325 kind = "dealloc",
326 );
327 }
328
329 let Some((alloc_kind, mut alloc)) = self.memory.alloc_map.remove(&alloc_id) else {
330 return Err(match self.tcx.try_get_global_alloc(alloc_id) {
332 Some(GlobalAlloc::Function { .. }) => {
333 err_ub_custom!(
334 fluent::const_eval_invalid_dealloc,
335 alloc_id = alloc_id,
336 kind = "fn",
337 )
338 }
339 Some(GlobalAlloc::VTable(..)) => {
340 err_ub_custom!(
341 fluent::const_eval_invalid_dealloc,
342 alloc_id = alloc_id,
343 kind = "vtable",
344 )
345 }
346 Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => {
347 err_ub_custom!(
348 fluent::const_eval_invalid_dealloc,
349 alloc_id = alloc_id,
350 kind = "static_mem"
351 )
352 }
353 None => err_ub!(PointerUseAfterFree(alloc_id, CheckInAllocMsg::MemoryAccessTest)),
354 })
355 .into();
356 };
357
358 if alloc.mutability.is_not() {
359 throw_ub_custom!(fluent::const_eval_dealloc_immutable, alloc = alloc_id,);
360 }
361 if alloc_kind != kind {
362 throw_ub_custom!(
363 fluent::const_eval_dealloc_kind_mismatch,
364 alloc = alloc_id,
365 alloc_kind = format!("{alloc_kind}"),
366 kind = format!("{kind}"),
367 );
368 }
369 if let Some((size, align)) = old_size_and_align {
370 if size != alloc.size() || align != alloc.align {
371 throw_ub_custom!(
372 fluent::const_eval_dealloc_incorrect_layout,
373 alloc = alloc_id,
374 size = alloc.size().bytes(),
375 align = alloc.align.bytes(),
376 size_found = size.bytes(),
377 align_found = align.bytes(),
378 )
379 }
380 }
381
382 let size = alloc.size();
384 M::before_memory_deallocation(
385 self.tcx,
386 &mut self.machine,
387 &mut alloc.extra,
388 (alloc_id, prov),
389 size,
390 alloc.align,
391 kind,
392 )?;
393
394 let old = self.memory.dead_alloc_map.insert(alloc_id, (size, alloc.align));
396 if old.is_some() {
397 bug!("Nothing can be deallocated twice");
398 }
399
400 interp_ok(())
401 }
402
403 #[inline(always)]
405 fn get_ptr_access(
406 &self,
407 ptr: Pointer<Option<M::Provenance>>,
408 size: Size,
409 ) -> InterpResult<'tcx, Option<(AllocId, Size, M::ProvenanceExtra)>> {
410 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(
412 self,
413 ptr,
414 size,
415 CheckInAllocMsg::MemoryAccessTest,
416 |this, alloc_id, offset, prov| {
417 let (size, align) = this
418 .get_live_alloc_size_and_align(alloc_id, CheckInAllocMsg::MemoryAccessTest)?;
419 interp_ok((size, align, (alloc_id, offset, prov)))
420 },
421 )
422 }
423
424 #[inline(always)]
427 pub fn check_ptr_access(
428 &self,
429 ptr: Pointer<Option<M::Provenance>>,
430 size: Size,
431 msg: CheckInAllocMsg,
432 ) -> InterpResult<'tcx> {
433 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
435 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
436 interp_ok((size, align, ()))
437 })?;
438 interp_ok(())
439 }
440
441 pub fn check_ptr_access_signed(
445 &self,
446 ptr: Pointer<Option<M::Provenance>>,
447 size: i64,
448 msg: CheckInAllocMsg,
449 ) -> InterpResult<'tcx> {
450 Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
451 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
452 interp_ok((size, align, ()))
453 })?;
454 interp_ok(())
455 }
456
457 fn check_and_deref_ptr<T, R: Borrow<Self>>(
466 this: R,
467 ptr: Pointer<Option<M::Provenance>>,
468 size: i64,
469 msg: CheckInAllocMsg,
470 alloc_size: impl FnOnce(
471 R,
472 AllocId,
473 Size,
474 M::ProvenanceExtra,
475 ) -> InterpResult<'tcx, (Size, Align, T)>,
476 ) -> InterpResult<'tcx, Option<T>> {
477 if size == 0 {
479 return interp_ok(None);
480 }
481
482 interp_ok(match this.borrow().ptr_try_get_alloc_id(ptr, size) {
483 Err(addr) => {
484 throw_ub!(DanglingIntPointer { addr, inbounds_size: size, msg });
486 }
487 Ok((alloc_id, offset, prov)) => {
488 let tcx = this.borrow().tcx;
489 let (alloc_size, _alloc_align, ret_val) = alloc_size(this, alloc_id, offset, prov)?;
490 let offset = offset.bytes();
491 let (begin, end) = if size >= 0 {
493 (Some(offset), offset.checked_add(size as u64))
494 } else {
495 (offset.checked_sub(size.unsigned_abs()), Some(offset))
496 };
497 let in_bounds = begin.is_some() && end.is_some_and(|e| e <= alloc_size.bytes());
499 if !in_bounds {
500 throw_ub!(PointerOutOfBounds {
501 alloc_id,
502 alloc_size,
503 ptr_offset: tcx.sign_extend_to_target_isize(offset),
504 inbounds_size: size,
505 msg,
506 })
507 }
508
509 Some(ret_val)
510 }
511 })
512 }
513
514 pub(super) fn check_misalign(
515 &self,
516 misaligned: Option<Misalignment>,
517 msg: CheckAlignMsg,
518 ) -> InterpResult<'tcx> {
519 if let Some(misaligned) = misaligned {
520 throw_ub!(AlignmentCheckFailed(misaligned, msg))
521 }
522 interp_ok(())
523 }
524
525 pub(super) fn is_ptr_misaligned(
526 &self,
527 ptr: Pointer<Option<M::Provenance>>,
528 align: Align,
529 ) -> Option<Misalignment> {
530 if !M::enforce_alignment(self) || align.bytes() == 1 {
531 return None;
532 }
533
534 #[inline]
535 fn is_offset_misaligned(offset: u64, align: Align) -> Option<Misalignment> {
536 if offset % align.bytes() == 0 {
537 None
538 } else {
539 let offset_pow2 = 1 << offset.trailing_zeros();
541 Some(Misalignment { has: Align::from_bytes(offset_pow2).unwrap(), required: align })
542 }
543 }
544
545 match self.ptr_try_get_alloc_id(ptr, 0) {
546 Err(addr) => is_offset_misaligned(addr, align),
547 Ok((alloc_id, offset, _prov)) => {
548 let alloc_info = self.get_alloc_info(alloc_id);
549 if let Some(misalign) = M::alignment_check(
550 self,
551 alloc_id,
552 alloc_info.align,
553 alloc_info.kind,
554 offset,
555 align,
556 ) {
557 Some(misalign)
558 } else if M::Provenance::OFFSET_IS_ADDR {
559 is_offset_misaligned(ptr.addr().bytes(), align)
560 } else {
561 if alloc_info.align.bytes() < align.bytes() {
563 Some(Misalignment { has: alloc_info.align, required: align })
564 } else {
565 is_offset_misaligned(offset.bytes(), align)
566 }
567 }
568 }
569 }
570 }
571
572 pub fn check_ptr_align(
576 &self,
577 ptr: Pointer<Option<M::Provenance>>,
578 align: Align,
579 ) -> InterpResult<'tcx> {
580 self.check_misalign(self.is_ptr_misaligned(ptr, align), CheckAlignMsg::AccessedPtr)
581 }
582}
583
584impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
585 pub fn remove_unreachable_allocs(&mut self, reachable_allocs: &FxHashSet<AllocId>) {
587 #[allow(rustc::potential_query_instability)] self.memory.dead_alloc_map.retain(|id, _| reachable_allocs.contains(id));
592 }
593}
594
595impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
597 fn get_global_alloc(
603 &self,
604 id: AllocId,
605 is_write: bool,
606 ) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::Provenance, M::AllocExtra, M::Bytes>>> {
607 let (alloc, def_id) = match self.tcx.try_get_global_alloc(id) {
608 Some(GlobalAlloc::Memory(mem)) => {
609 (mem, None)
611 }
612 Some(GlobalAlloc::Function { .. }) => throw_ub!(DerefFunctionPointer(id)),
613 Some(GlobalAlloc::VTable(..)) => throw_ub!(DerefVTablePointer(id)),
614 None => throw_ub!(PointerUseAfterFree(id, CheckInAllocMsg::MemoryAccessTest)),
615 Some(GlobalAlloc::Static(def_id)) => {
616 assert!(self.tcx.is_static(def_id));
617 assert!(!self.tcx.is_thread_local_static(def_id));
620 if self.tcx.is_foreign_item(def_id) {
631 throw_unsup!(ExternStatic(def_id));
634 }
635
636 let val = self.ctfe_query(|tcx| tcx.eval_static_initializer(def_id))?;
638 (val, Some(def_id))
639 }
640 };
641 M::before_access_global(self.tcx, &self.machine, id, alloc, def_id, is_write)?;
642 M::adjust_global_allocation(
644 self,
645 id, alloc.inner(),
647 )
648 }
649
650 fn get_alloc_raw(
655 &self,
656 id: AllocId,
657 ) -> InterpResult<'tcx, &Allocation<M::Provenance, M::AllocExtra, M::Bytes>> {
658 let a = self.memory.alloc_map.get_or(id, || {
663 let alloc = self.get_global_alloc(id, false).report_err().map_err(Err)?;
666 match alloc {
667 Cow::Borrowed(alloc) => {
668 Err(Ok(alloc))
671 }
672 Cow::Owned(alloc) => {
673 let kind = M::GLOBAL_KIND.expect(
675 "I got a global allocation that I have to copy but the machine does \
676 not expect that to happen",
677 );
678 Ok((MemoryKind::Machine(kind), alloc))
679 }
680 }
681 });
682 match a {
684 Ok(a) => interp_ok(&a.1),
685 Err(a) => a.into(),
686 }
687 }
688
689 pub fn get_alloc_bytes_unchecked_raw(&self, id: AllocId) -> InterpResult<'tcx, *const u8> {
692 let alloc = self.get_alloc_raw(id)?;
693 interp_ok(alloc.get_bytes_unchecked_raw())
694 }
695
696 pub fn get_ptr_alloc<'a>(
698 &'a self,
699 ptr: Pointer<Option<M::Provenance>>,
700 size: Size,
701 ) -> InterpResult<'tcx, Option<AllocRef<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
702 {
703 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
705 self,
706 ptr,
707 size_i64,
708 CheckInAllocMsg::MemoryAccessTest,
709 |this, alloc_id, offset, prov| {
710 let alloc = this.get_alloc_raw(alloc_id)?;
711 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc)))
712 },
713 )?;
714 if !self.memory.validation_in_progress {
718 if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(ptr, size_i64) {
719 M::before_alloc_read(self, alloc_id)?;
720 }
721 }
722
723 if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc {
724 let range = alloc_range(offset, size);
725 if !self.memory.validation_in_progress {
726 M::before_memory_read(
727 self.tcx,
728 &self.machine,
729 &alloc.extra,
730 (alloc_id, prov),
731 range,
732 )?;
733 }
734 interp_ok(Some(AllocRef { alloc, range, tcx: *self.tcx, alloc_id }))
735 } else {
736 interp_ok(None)
737 }
738 }
739
740 pub fn get_alloc_extra<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, &'a M::AllocExtra> {
742 interp_ok(&self.get_alloc_raw(id)?.extra)
743 }
744
745 pub fn get_alloc_mutability<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, Mutability> {
747 interp_ok(self.get_alloc_raw(id)?.mutability)
748 }
749
750 fn get_alloc_raw_mut(
756 &mut self,
757 id: AllocId,
758 ) -> InterpResult<'tcx, (&mut Allocation<M::Provenance, M::AllocExtra, M::Bytes>, &mut M)> {
759 if self.memory.alloc_map.get_mut(id).is_none() {
767 let alloc = self.get_global_alloc(id, true)?;
770 let kind = M::GLOBAL_KIND.expect(
771 "I got a global allocation that I have to copy but the machine does \
772 not expect that to happen",
773 );
774 self.memory.alloc_map.insert(id, (MemoryKind::Machine(kind), alloc.into_owned()));
775 }
776
777 let (_kind, alloc) = self.memory.alloc_map.get_mut(id).unwrap();
778 if alloc.mutability.is_not() {
779 throw_ub!(WriteToReadOnly(id))
780 }
781 interp_ok((alloc, &mut self.machine))
782 }
783
784 pub fn get_alloc_bytes_unchecked_raw_mut(
787 &mut self,
788 id: AllocId,
789 ) -> InterpResult<'tcx, *mut u8> {
790 let alloc = self.get_alloc_raw_mut(id)?.0;
791 interp_ok(alloc.get_bytes_unchecked_raw_mut())
792 }
793
794 pub fn get_ptr_alloc_mut<'a>(
796 &'a mut self,
797 ptr: Pointer<Option<M::Provenance>>,
798 size: Size,
799 ) -> InterpResult<'tcx, Option<AllocRefMut<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
800 {
801 let tcx = self.tcx;
802 let validation_in_progress = self.memory.validation_in_progress;
803
804 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
806 self,
807 ptr,
808 size_i64,
809 CheckInAllocMsg::MemoryAccessTest,
810 |this, alloc_id, offset, prov| {
811 let (alloc, machine) = this.get_alloc_raw_mut(alloc_id)?;
812 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc, machine)))
813 },
814 )?;
815
816 if let Some((alloc_id, offset, prov, alloc, machine)) = ptr_and_alloc {
817 let range = alloc_range(offset, size);
818 if !validation_in_progress {
819 M::before_memory_write(tcx, machine, &mut alloc.extra, (alloc_id, prov), range)?;
820 }
821 interp_ok(Some(AllocRefMut { alloc, range, tcx: *tcx, alloc_id }))
822 } else {
823 interp_ok(None)
824 }
825 }
826
827 pub fn get_alloc_extra_mut<'a>(
829 &'a mut self,
830 id: AllocId,
831 ) -> InterpResult<'tcx, (&'a mut M::AllocExtra, &'a mut M)> {
832 let (alloc, machine) = self.get_alloc_raw_mut(id)?;
833 interp_ok((&mut alloc.extra, machine))
834 }
835
836 pub fn is_alloc_live(&self, id: AllocId) -> bool {
840 self.memory.alloc_map.contains_key_ref(&id)
841 || self.memory.extra_fn_ptr_map.contains_key(&id)
842 || self.tcx.try_get_global_alloc(id).is_some()
845 }
846
847 pub fn get_alloc_info(&self, id: AllocId) -> AllocInfo {
850 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
855 return AllocInfo::new(
856 alloc.size(),
857 alloc.align,
858 AllocKind::LiveData,
859 alloc.mutability,
860 );
861 }
862
863 if self.get_fn_alloc(id).is_some() {
866 return AllocInfo::new(Size::ZERO, Align::ONE, AllocKind::Function, Mutability::Not);
867 }
868
869 if let Some(global_alloc) = self.tcx.try_get_global_alloc(id) {
871 let (size, align) = global_alloc.size_and_align(*self.tcx, self.typing_env);
872 let mutbl = global_alloc.mutability(*self.tcx, self.typing_env);
873 let kind = match global_alloc {
874 GlobalAlloc::Static { .. } | GlobalAlloc::Memory { .. } => AllocKind::LiveData,
875 GlobalAlloc::Function { .. } => bug!("We already checked function pointers above"),
876 GlobalAlloc::VTable { .. } => AllocKind::VTable,
877 };
878 return AllocInfo::new(size, align, kind, mutbl);
879 }
880
881 let (size, align) = *self
883 .memory
884 .dead_alloc_map
885 .get(&id)
886 .expect("deallocated pointers should all be recorded in `dead_alloc_map`");
887 AllocInfo::new(size, align, AllocKind::Dead, Mutability::Not)
888 }
889
890 fn get_live_alloc_size_and_align(
892 &self,
893 id: AllocId,
894 msg: CheckInAllocMsg,
895 ) -> InterpResult<'tcx, (Size, Align)> {
896 let info = self.get_alloc_info(id);
897 if matches!(info.kind, AllocKind::Dead) {
898 throw_ub!(PointerUseAfterFree(id, msg))
899 }
900 interp_ok((info.size, info.align))
901 }
902
903 fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> {
904 if let Some(extra) = self.memory.extra_fn_ptr_map.get(&id) {
905 Some(FnVal::Other(*extra))
906 } else {
907 match self.tcx.try_get_global_alloc(id) {
908 Some(GlobalAlloc::Function { instance, .. }) => Some(FnVal::Instance(instance)),
909 _ => None,
910 }
911 }
912 }
913
914 pub fn get_ptr_fn(
915 &self,
916 ptr: Pointer<Option<M::Provenance>>,
917 ) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
918 trace!("get_ptr_fn({:?})", ptr);
919 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
920 if offset.bytes() != 0 {
921 throw_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset)))
922 }
923 self.get_fn_alloc(alloc_id)
924 .ok_or_else(|| err_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset))))
925 .into()
926 }
927
928 pub fn get_ptr_vtable_ty(
931 &self,
932 ptr: Pointer<Option<M::Provenance>>,
933 expected_trait: Option<&'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>>,
934 ) -> InterpResult<'tcx, Ty<'tcx>> {
935 trace!("get_ptr_vtable({:?})", ptr);
936 let (alloc_id, offset, _tag) = self.ptr_get_alloc_id(ptr, 0)?;
937 if offset.bytes() != 0 {
938 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
939 }
940 let Some(GlobalAlloc::VTable(ty, vtable_dyn_type)) =
941 self.tcx.try_get_global_alloc(alloc_id)
942 else {
943 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
944 };
945 if let Some(expected_dyn_type) = expected_trait {
946 self.check_vtable_for_type(vtable_dyn_type, expected_dyn_type)?;
947 }
948 interp_ok(ty)
949 }
950
951 pub fn alloc_mark_immutable(&mut self, id: AllocId) -> InterpResult<'tcx> {
952 self.get_alloc_raw_mut(id)?.0.mutability = Mutability::Not;
953 interp_ok(())
954 }
955
956 pub fn prepare_for_native_call(
961 &mut self,
962 id: AllocId,
963 initial_prov: M::Provenance,
964 ) -> InterpResult<'tcx> {
965 M::expose_provenance(self, initial_prov)?;
967
968 let mut done = FxHashSet::default();
969 let mut todo = vec![id];
970 while let Some(id) = todo.pop() {
971 if !done.insert(id) {
972 continue;
974 }
975 let info = self.get_alloc_info(id);
976
977 if !matches!(info.kind, AllocKind::LiveData) {
979 continue;
980 }
981
982 let alloc = self.get_alloc_raw(id)?;
984 for prov in alloc.provenance().provenances() {
985 M::expose_provenance(self, prov)?;
986 if let Some(id) = prov.get_alloc_id() {
987 todo.push(id);
988 }
989 }
990
991 if info.mutbl.is_mut() {
993 self.get_alloc_raw_mut(id)?
994 .0
995 .prepare_for_native_write()
996 .map_err(|e| e.to_interp_error(id))?;
997 }
998 }
999 interp_ok(())
1000 }
1001
1002 #[must_use]
1005 pub fn dump_alloc<'a>(&'a self, id: AllocId) -> DumpAllocs<'a, 'tcx, M> {
1006 self.dump_allocs(vec![id])
1007 }
1008
1009 #[must_use]
1012 pub fn dump_allocs<'a>(&'a self, mut allocs: Vec<AllocId>) -> DumpAllocs<'a, 'tcx, M> {
1013 allocs.sort();
1014 allocs.dedup();
1015 DumpAllocs { ecx: self, allocs }
1016 }
1017
1018 pub fn print_alloc_bytes_for_diagnostics(&self, id: AllocId) -> String {
1020 let alloc = self.get_alloc_raw(id).unwrap();
1023 let mut bytes = String::new();
1024 if alloc.size() != Size::ZERO {
1025 bytes = "\n".into();
1026 rustc_middle::mir::pretty::write_allocation_bytes(*self.tcx, alloc, &mut bytes, " ")
1028 .unwrap();
1029 }
1030 bytes
1031 }
1032
1033 pub fn take_leaked_allocations(
1039 &mut self,
1040 static_roots: impl FnOnce(&Self) -> &[AllocId],
1041 ) -> Vec<(AllocId, MemoryKind<M::MemoryKind>, Allocation<M::Provenance, M::AllocExtra, M::Bytes>)>
1042 {
1043 let reachable = {
1045 let mut reachable = FxHashSet::default();
1046 let global_kind = M::GLOBAL_KIND.map(MemoryKind::Machine);
1047 let mut todo: Vec<_> =
1048 self.memory.alloc_map.filter_map_collect(move |&id, &(kind, _)| {
1049 if Some(kind) == global_kind { Some(id) } else { None }
1050 });
1051 todo.extend(static_roots(self));
1052 while let Some(id) = todo.pop() {
1053 if reachable.insert(id) {
1054 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
1056 todo.extend(
1057 alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id()),
1058 );
1059 }
1060 }
1061 }
1062 reachable
1063 };
1064
1065 let leaked: Vec<_> = self.memory.alloc_map.filter_map_collect(|&id, &(kind, _)| {
1067 if kind.may_leak() || reachable.contains(&id) { None } else { Some(id) }
1068 });
1069 let mut result = Vec::new();
1070 for &id in leaked.iter() {
1071 let (kind, alloc) = self.memory.alloc_map.remove(&id).unwrap();
1072 result.push((id, kind, alloc));
1073 }
1074 result
1075 }
1076
1077 pub fn run_for_validation<R>(&mut self, f: impl FnOnce(&mut Self) -> R) -> R {
1083 assert!(
1086 mem::replace(&mut self.memory.validation_in_progress, true) == false,
1087 "`validation_in_progress` was already set"
1088 );
1089 let res = f(self);
1090 assert!(
1091 mem::replace(&mut self.memory.validation_in_progress, false) == true,
1092 "`validation_in_progress` was unset by someone else"
1093 );
1094 res
1095 }
1096
1097 pub(super) fn validation_in_progress(&self) -> bool {
1098 self.memory.validation_in_progress
1099 }
1100}
1101
1102#[doc(hidden)]
1103pub struct DumpAllocs<'a, 'tcx, M: Machine<'tcx>> {
1105 ecx: &'a InterpCx<'tcx, M>,
1106 allocs: Vec<AllocId>,
1107}
1108
1109impl<'a, 'tcx, M: Machine<'tcx>> std::fmt::Debug for DumpAllocs<'a, 'tcx, M> {
1110 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1111 fn write_allocation_track_relocs<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
1113 fmt: &mut std::fmt::Formatter<'_>,
1114 tcx: TyCtxt<'tcx>,
1115 allocs_to_print: &mut VecDeque<AllocId>,
1116 alloc: &Allocation<Prov, Extra, Bytes>,
1117 ) -> std::fmt::Result {
1118 for alloc_id in alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id())
1119 {
1120 allocs_to_print.push_back(alloc_id);
1121 }
1122 write!(fmt, "{}", display_allocation(tcx, alloc))
1123 }
1124
1125 let mut allocs_to_print: VecDeque<_> = self.allocs.iter().copied().collect();
1126 let mut allocs_printed = FxHashSet::default();
1128
1129 while let Some(id) = allocs_to_print.pop_front() {
1130 if !allocs_printed.insert(id) {
1131 continue;
1133 }
1134
1135 write!(fmt, "{id:?}")?;
1136 match self.ecx.memory.alloc_map.get(id) {
1137 Some((kind, alloc)) => {
1138 write!(fmt, " ({kind}, ")?;
1140 write_allocation_track_relocs(
1141 &mut *fmt,
1142 *self.ecx.tcx,
1143 &mut allocs_to_print,
1144 alloc,
1145 )?;
1146 }
1147 None => {
1148 match self.ecx.tcx.try_get_global_alloc(id) {
1150 Some(GlobalAlloc::Memory(alloc)) => {
1151 write!(fmt, " (unchanged global, ")?;
1152 write_allocation_track_relocs(
1153 &mut *fmt,
1154 *self.ecx.tcx,
1155 &mut allocs_to_print,
1156 alloc.inner(),
1157 )?;
1158 }
1159 Some(GlobalAlloc::Function { instance, .. }) => {
1160 write!(fmt, " (fn: {instance})")?;
1161 }
1162 Some(GlobalAlloc::VTable(ty, dyn_ty)) => {
1163 write!(fmt, " (vtable: impl {dyn_ty} for {ty})")?;
1164 }
1165 Some(GlobalAlloc::Static(did)) => {
1166 write!(fmt, " (static: {})", self.ecx.tcx.def_path_str(did))?;
1167 }
1168 None => {
1169 write!(fmt, " (deallocated)")?;
1170 }
1171 }
1172 }
1173 }
1174 writeln!(fmt)?;
1175 }
1176 Ok(())
1177 }
1178}
1179
1180impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>
1182 AllocRefMut<'a, 'tcx, Prov, Extra, Bytes>
1183{
1184 pub fn as_ref<'b>(&'b self) -> AllocRef<'b, 'tcx, Prov, Extra, Bytes> {
1185 AllocRef { alloc: self.alloc, range: self.range, tcx: self.tcx, alloc_id: self.alloc_id }
1186 }
1187
1188 pub fn write_scalar(&mut self, range: AllocRange, val: Scalar<Prov>) -> InterpResult<'tcx> {
1190 let range = self.range.subrange(range);
1191 debug!("write_scalar at {:?}{range:?}: {val:?}", self.alloc_id);
1192
1193 self.alloc
1194 .write_scalar(&self.tcx, range, val)
1195 .map_err(|e| e.to_interp_error(self.alloc_id))
1196 .into()
1197 }
1198
1199 pub fn write_ptr_sized(&mut self, offset: Size, val: Scalar<Prov>) -> InterpResult<'tcx> {
1201 self.write_scalar(alloc_range(offset, self.tcx.data_layout().pointer_size), val)
1202 }
1203
1204 pub fn write_uninit(&mut self, range: AllocRange) -> InterpResult<'tcx> {
1206 let range = self.range.subrange(range);
1207
1208 self.alloc
1209 .write_uninit(&self.tcx, range)
1210 .map_err(|e| e.to_interp_error(self.alloc_id))
1211 .into()
1212 }
1213
1214 pub fn write_uninit_full(&mut self) -> InterpResult<'tcx> {
1216 self.alloc
1217 .write_uninit(&self.tcx, self.range)
1218 .map_err(|e| e.to_interp_error(self.alloc_id))
1219 .into()
1220 }
1221
1222 pub fn clear_provenance(&mut self) -> InterpResult<'tcx> {
1224 self.alloc
1225 .clear_provenance(&self.tcx, self.range)
1226 .map_err(|e| e.to_interp_error(self.alloc_id))
1227 .into()
1228 }
1229}
1230
1231impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes> AllocRef<'a, 'tcx, Prov, Extra, Bytes> {
1232 pub fn read_scalar(
1234 &self,
1235 range: AllocRange,
1236 read_provenance: bool,
1237 ) -> InterpResult<'tcx, Scalar<Prov>> {
1238 let range = self.range.subrange(range);
1239 self.alloc
1240 .read_scalar(&self.tcx, range, read_provenance)
1241 .map_err(|e| e.to_interp_error(self.alloc_id))
1242 .into()
1243 }
1244
1245 pub fn read_integer(&self, range: AllocRange) -> InterpResult<'tcx, Scalar<Prov>> {
1247 self.read_scalar(range, false)
1248 }
1249
1250 pub fn read_pointer(&self, offset: Size) -> InterpResult<'tcx, Scalar<Prov>> {
1252 self.read_scalar(
1253 alloc_range(offset, self.tcx.data_layout().pointer_size),
1254 true,
1255 )
1256 }
1257
1258 pub fn get_bytes_strip_provenance<'b>(&'b self) -> InterpResult<'tcx, &'a [u8]> {
1260 self.alloc
1261 .get_bytes_strip_provenance(&self.tcx, self.range)
1262 .map_err(|e| e.to_interp_error(self.alloc_id))
1263 .into()
1264 }
1265
1266 pub fn has_provenance(&self) -> bool {
1268 !self.alloc.provenance().range_empty(self.range, &self.tcx)
1269 }
1270}
1271
1272impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1273 pub fn read_bytes_ptr_strip_provenance(
1278 &self,
1279 ptr: Pointer<Option<M::Provenance>>,
1280 size: Size,
1281 ) -> InterpResult<'tcx, &[u8]> {
1282 let Some(alloc_ref) = self.get_ptr_alloc(ptr, size)? else {
1283 return interp_ok(&[]);
1285 };
1286 interp_ok(
1289 alloc_ref
1290 .alloc
1291 .get_bytes_strip_provenance(&alloc_ref.tcx, alloc_ref.range)
1292 .map_err(|e| e.to_interp_error(alloc_ref.alloc_id))?,
1293 )
1294 }
1295
1296 pub fn write_bytes_ptr(
1300 &mut self,
1301 ptr: Pointer<Option<M::Provenance>>,
1302 src: impl IntoIterator<Item = u8>,
1303 ) -> InterpResult<'tcx> {
1304 let mut src = src.into_iter();
1305 let (lower, upper) = src.size_hint();
1306 let len = upper.expect("can only write bounded iterators");
1307 assert_eq!(lower, len, "can only write iterators with a precise length");
1308
1309 let size = Size::from_bytes(len);
1310 let Some(alloc_ref) = self.get_ptr_alloc_mut(ptr, size)? else {
1311 assert_matches!(src.next(), None, "iterator said it was empty but returned an element");
1313 return interp_ok(());
1314 };
1315
1316 let alloc_id = alloc_ref.alloc_id;
1319 let bytes = alloc_ref
1320 .alloc
1321 .get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range)
1322 .map_err(move |e| e.to_interp_error(alloc_id))?;
1323 for dest in bytes {
1326 *dest = src.next().expect("iterator was shorter than it said it would be");
1327 }
1328 assert_matches!(src.next(), None, "iterator was longer than it said it would be");
1329 interp_ok(())
1330 }
1331
1332 pub fn mem_copy(
1333 &mut self,
1334 src: Pointer<Option<M::Provenance>>,
1335 dest: Pointer<Option<M::Provenance>>,
1336 size: Size,
1337 nonoverlapping: bool,
1338 ) -> InterpResult<'tcx> {
1339 self.mem_copy_repeatedly(src, dest, size, 1, nonoverlapping)
1340 }
1341
1342 pub fn mem_copy_repeatedly(
1348 &mut self,
1349 src: Pointer<Option<M::Provenance>>,
1350 dest: Pointer<Option<M::Provenance>>,
1351 size: Size,
1352 num_copies: u64,
1353 nonoverlapping: bool,
1354 ) -> InterpResult<'tcx> {
1355 let tcx = self.tcx;
1356 let src_parts = self.get_ptr_access(src, size)?;
1358 let dest_parts = self.get_ptr_access(dest, size * num_copies)?; let Some((src_alloc_id, src_offset, src_prov)) = src_parts else {
1365 return interp_ok(());
1367 };
1368 let src_alloc = self.get_alloc_raw(src_alloc_id)?;
1369 let src_range = alloc_range(src_offset, size);
1370 assert!(!self.memory.validation_in_progress, "we can't be copying during validation");
1371 M::before_memory_read(
1374 tcx,
1375 &self.machine,
1376 &src_alloc.extra,
1377 (src_alloc_id, src_prov),
1378 src_range,
1379 )?;
1380 let Some((dest_alloc_id, dest_offset, dest_prov)) = dest_parts else {
1383 return interp_ok(());
1385 };
1386
1387 let src_bytes = src_alloc.get_bytes_unchecked(src_range).as_ptr(); let provenance = src_alloc
1394 .provenance()
1395 .prepare_copy(src_range, dest_offset, num_copies, self)
1396 .map_err(|e| e.to_interp_error(dest_alloc_id))?;
1397 let init = src_alloc.init_mask().prepare_copy(src_range);
1399
1400 let (dest_alloc, extra) = self.get_alloc_raw_mut(dest_alloc_id)?;
1402 let dest_range = alloc_range(dest_offset, size * num_copies);
1403 M::before_memory_write(
1404 tcx,
1405 extra,
1406 &mut dest_alloc.extra,
1407 (dest_alloc_id, dest_prov),
1408 dest_range,
1409 )?;
1410 let dest_bytes = dest_alloc
1412 .get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range)
1413 .map_err(|e| e.to_interp_error(dest_alloc_id))?
1414 .as_mut_ptr();
1415
1416 if init.no_bytes_init() {
1417 dest_alloc
1424 .write_uninit(&tcx, dest_range)
1425 .map_err(|e| e.to_interp_error(dest_alloc_id))?;
1426 return interp_ok(());
1428 }
1429
1430 unsafe {
1436 if src_alloc_id == dest_alloc_id {
1437 if nonoverlapping {
1438 if (src_offset <= dest_offset && src_offset + size > dest_offset)
1440 || (dest_offset <= src_offset && dest_offset + size > src_offset)
1441 {
1442 throw_ub_custom!(fluent::const_eval_copy_nonoverlapping_overlapping);
1443 }
1444 }
1445 }
1446 if num_copies > 1 {
1447 assert!(nonoverlapping, "multi-copy only supported in non-overlapping mode");
1448 }
1449
1450 let size_in_bytes = size.bytes_usize();
1451 if size_in_bytes == 1 {
1454 debug_assert!(num_copies >= 1); let value = *src_bytes;
1457 dest_bytes.write_bytes(value, (size * num_copies).bytes_usize());
1458 } else if src_alloc_id == dest_alloc_id {
1459 let mut dest_ptr = dest_bytes;
1460 for _ in 0..num_copies {
1461 ptr::copy(src_bytes, dest_ptr, size_in_bytes);
1464 dest_ptr = dest_ptr.add(size_in_bytes);
1465 }
1466 } else {
1467 let mut dest_ptr = dest_bytes;
1468 for _ in 0..num_copies {
1469 ptr::copy_nonoverlapping(src_bytes, dest_ptr, size_in_bytes);
1470 dest_ptr = dest_ptr.add(size_in_bytes);
1471 }
1472 }
1473 }
1474
1475 dest_alloc.init_mask_apply_copy(
1477 init,
1478 alloc_range(dest_offset, size), num_copies,
1480 );
1481 dest_alloc.provenance_apply_copy(provenance);
1483
1484 interp_ok(())
1485 }
1486}
1487
1488impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1490 pub fn scalar_may_be_null(&self, scalar: Scalar<M::Provenance>) -> InterpResult<'tcx, bool> {
1493 match scalar.try_to_scalar_int() {
1494 Ok(int) => interp_ok(int.is_null()),
1495 Err(_) => {
1496 let ptr = scalar.to_pointer(self)?;
1498 match self.ptr_try_get_alloc_id(ptr, 0) {
1499 Ok((alloc_id, offset, _)) => {
1500 let info = self.get_alloc_info(alloc_id);
1501 if offset <= info.size {
1503 return interp_ok(false);
1504 }
1505 if offset.bytes() % info.align.bytes() != 0 {
1509 return interp_ok(false);
1510 }
1511 interp_ok(true)
1513 }
1514 Err(_offset) => bug!("a non-int scalar is always a pointer"),
1515 }
1516 }
1517 }
1518 }
1519
1520 pub fn ptr_try_get_alloc_id(
1534 &self,
1535 ptr: Pointer<Option<M::Provenance>>,
1536 size: i64,
1537 ) -> Result<(AllocId, Size, M::ProvenanceExtra), u64> {
1538 match ptr.into_pointer_or_addr() {
1539 Ok(ptr) => match M::ptr_get_alloc(self, ptr, size) {
1540 Some((alloc_id, offset, extra)) => Ok((alloc_id, offset, extra)),
1541 None => {
1542 assert!(M::Provenance::OFFSET_IS_ADDR);
1543 let (_, addr) = ptr.into_parts();
1544 Err(addr.bytes())
1545 }
1546 },
1547 Err(addr) => Err(addr.bytes()),
1548 }
1549 }
1550
1551 #[inline(always)]
1564 pub fn ptr_get_alloc_id(
1565 &self,
1566 ptr: Pointer<Option<M::Provenance>>,
1567 size: i64,
1568 ) -> InterpResult<'tcx, (AllocId, Size, M::ProvenanceExtra)> {
1569 self.ptr_try_get_alloc_id(ptr, size)
1570 .map_err(|offset| {
1571 err_ub!(DanglingIntPointer {
1572 addr: offset,
1573 inbounds_size: size,
1574 msg: CheckInAllocMsg::InboundsTest
1575 })
1576 })
1577 .into()
1578 }
1579}