1mod init_mask;
4mod provenance_map;
5
6use std::borrow::Cow;
7use std::hash::Hash;
8use std::ops::{Deref, DerefMut, Range};
9use std::{fmt, hash, ptr};
10
11use either::{Left, Right};
12use init_mask::*;
13pub use init_mask::{InitChunk, InitChunkIter};
14use provenance_map::*;
15use rustc_abi::{Align, HasDataLayout, Size};
16use rustc_ast::Mutability;
17use rustc_data_structures::intern::Interned;
18use rustc_macros::HashStable;
19use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
20
21use super::{
22 AllocId, BadBytesAccess, CtfeProvenance, InterpErrorKind, InterpResult, Pointer,
23 PointerArithmetic, Provenance, ResourceExhaustionInfo, Scalar, ScalarSizeMismatch,
24 UndefinedBehaviorInfo, UnsupportedOpInfo, interp_ok, read_target_uint, write_target_uint,
25};
26use crate::ty;
27
28pub trait AllocBytes: Clone + fmt::Debug + Deref<Target = [u8]> + DerefMut<Target = [u8]> {
30 type AllocParams;
34
35 fn from_bytes<'a>(
37 slice: impl Into<Cow<'a, [u8]>>,
38 _align: Align,
39 _params: Self::AllocParams,
40 ) -> Self;
41
42 fn zeroed(size: Size, _align: Align, _params: Self::AllocParams) -> Option<Self>;
45
46 fn as_mut_ptr(&mut self) -> *mut u8;
52
53 fn as_ptr(&self) -> *const u8;
59}
60
61impl AllocBytes for Box<[u8]> {
63 type AllocParams = ();
64
65 fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, _align: Align, _params: ()) -> Self {
66 Box::<[u8]>::from(slice.into())
67 }
68
69 fn zeroed(size: Size, _align: Align, _params: ()) -> Option<Self> {
70 let bytes = Box::<[u8]>::try_new_zeroed_slice(size.bytes().try_into().ok()?).ok()?;
71 let bytes = unsafe { bytes.assume_init() };
73 Some(bytes)
74 }
75
76 fn as_mut_ptr(&mut self) -> *mut u8 {
77 Box::as_mut_ptr(self).cast()
78 }
79
80 fn as_ptr(&self) -> *const u8 {
81 Box::as_ptr(self).cast()
82 }
83}
84
85#[derive(Clone, Eq, PartialEq)]
93#[derive(HashStable)]
94pub struct Allocation<Prov: Provenance = CtfeProvenance, Extra = (), Bytes = Box<[u8]>> {
95 bytes: Bytes,
98 provenance: ProvenanceMap<Prov>,
103 init_mask: InitMask,
107 pub align: Align,
110 pub mutability: Mutability,
114 pub extra: Extra,
116}
117
118struct AllocFlags {
122 align: Align,
123 mutability: Mutability,
124 all_zero: bool,
125}
126
127impl<E: Encoder> Encodable<E> for AllocFlags {
128 fn encode(&self, encoder: &mut E) {
129 const {
131 let max_supported_align_repr = u8::MAX >> 2;
132 let max_supported_align = 1 << max_supported_align_repr;
133 assert!(Align::MAX.bytes() <= max_supported_align)
134 }
135
136 let mut flags = self.align.bytes().trailing_zeros() as u8;
137 flags |= match self.mutability {
138 Mutability::Not => 0,
139 Mutability::Mut => 1 << 6,
140 };
141 flags |= (self.all_zero as u8) << 7;
142 flags.encode(encoder);
143 }
144}
145
146impl<D: Decoder> Decodable<D> for AllocFlags {
147 fn decode(decoder: &mut D) -> Self {
148 let flags: u8 = Decodable::decode(decoder);
149 let align = flags & 0b0011_1111;
150 let mutability = flags & 0b0100_0000;
151 let all_zero = flags & 0b1000_0000;
152
153 let align = Align::from_bytes(1 << align).unwrap();
154 let mutability = match mutability {
155 0 => Mutability::Not,
156 _ => Mutability::Mut,
157 };
158 let all_zero = all_zero > 0;
159
160 AllocFlags { align, mutability, all_zero }
161 }
162}
163
164#[inline]
170fn all_zero(buf: &[u8]) -> bool {
171 if buf.is_empty() {
174 return true;
175 }
176 if buf[0] != 0 {
179 return false;
180 }
181
182 buf.iter().fold(true, |acc, b| acc & (*b == 0))
185}
186
187impl<Prov: Provenance, Extra, E: Encoder> Encodable<E> for Allocation<Prov, Extra, Box<[u8]>>
189where
190 ProvenanceMap<Prov>: Encodable<E>,
191 Extra: Encodable<E>,
192{
193 fn encode(&self, encoder: &mut E) {
194 let all_zero = all_zero(&self.bytes);
195 AllocFlags { align: self.align, mutability: self.mutability, all_zero }.encode(encoder);
196
197 encoder.emit_usize(self.bytes.len());
198 if !all_zero {
199 encoder.emit_raw_bytes(&self.bytes);
200 }
201 self.provenance.encode(encoder);
202 self.init_mask.encode(encoder);
203 self.extra.encode(encoder);
204 }
205}
206
207impl<Prov: Provenance, Extra, D: Decoder> Decodable<D> for Allocation<Prov, Extra, Box<[u8]>>
208where
209 ProvenanceMap<Prov>: Decodable<D>,
210 Extra: Decodable<D>,
211{
212 fn decode(decoder: &mut D) -> Self {
213 let AllocFlags { align, mutability, all_zero } = Decodable::decode(decoder);
214
215 let len = decoder.read_usize();
216 let bytes = if all_zero { vec![0u8; len] } else { decoder.read_raw_bytes(len).to_vec() };
217 let bytes = <Box<[u8]> as AllocBytes>::from_bytes(bytes, align, ());
218
219 let provenance = Decodable::decode(decoder);
220 let init_mask = Decodable::decode(decoder);
221 let extra = Decodable::decode(decoder);
222
223 Self { bytes, provenance, init_mask, align, mutability, extra }
224 }
225}
226
227const MAX_BYTES_TO_HASH: usize = 64;
232
233const MAX_HASHED_BUFFER_LEN: usize = 2 * MAX_BYTES_TO_HASH;
237
238impl hash::Hash for Allocation {
243 fn hash<H: hash::Hasher>(&self, state: &mut H) {
244 let Self {
245 bytes,
246 provenance,
247 init_mask,
248 align,
249 mutability,
250 extra: (), } = self;
252
253 let byte_count = bytes.len();
256 if byte_count > MAX_HASHED_BUFFER_LEN {
257 byte_count.hash(state);
259
260 bytes[..MAX_BYTES_TO_HASH].hash(state);
262 bytes[byte_count - MAX_BYTES_TO_HASH..].hash(state);
263 } else {
264 bytes.hash(state);
265 }
266
267 provenance.hash(state);
269 init_mask.hash(state);
270 align.hash(state);
271 mutability.hash(state);
272 }
273}
274
275#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)]
284#[rustc_pass_by_value]
285pub struct ConstAllocation<'tcx>(pub Interned<'tcx, Allocation>);
286
287impl<'tcx> fmt::Debug for ConstAllocation<'tcx> {
288 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
289 write!(f, "ConstAllocation {{ .. }}")
292 }
293}
294
295impl<'tcx> ConstAllocation<'tcx> {
296 pub fn inner(self) -> &'tcx Allocation {
297 self.0.0
298 }
299}
300
301#[derive(Debug)]
304pub enum AllocError {
305 ScalarSizeMismatch(ScalarSizeMismatch),
307 ReadPointerAsInt(Option<BadBytesAccess>),
309 ReadPartialPointer(Size),
311 InvalidUninitBytes(Option<BadBytesAccess>),
313}
314pub type AllocResult<T = ()> = Result<T, AllocError>;
315
316impl From<ScalarSizeMismatch> for AllocError {
317 fn from(s: ScalarSizeMismatch) -> Self {
318 AllocError::ScalarSizeMismatch(s)
319 }
320}
321
322impl AllocError {
323 pub fn to_interp_error<'tcx>(self, alloc_id: AllocId) -> InterpErrorKind<'tcx> {
324 use AllocError::*;
325 match self {
326 ScalarSizeMismatch(s) => {
327 InterpErrorKind::UndefinedBehavior(UndefinedBehaviorInfo::ScalarSizeMismatch(s))
328 }
329 ReadPointerAsInt(info) => InterpErrorKind::Unsupported(
330 UnsupportedOpInfo::ReadPointerAsInt(info.map(|b| (alloc_id, b))),
331 ),
332 ReadPartialPointer(offset) => InterpErrorKind::Unsupported(
333 UnsupportedOpInfo::ReadPartialPointer(Pointer::new(alloc_id, offset)),
334 ),
335 InvalidUninitBytes(info) => InterpErrorKind::UndefinedBehavior(
336 UndefinedBehaviorInfo::InvalidUninitBytes(info.map(|b| (alloc_id, b))),
337 ),
338 }
339 }
340}
341
342#[derive(Copy, Clone)]
344pub struct AllocRange {
345 pub start: Size,
346 pub size: Size,
347}
348
349impl fmt::Debug for AllocRange {
350 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
351 write!(f, "[{:#x}..{:#x}]", self.start.bytes(), self.end().bytes())
352 }
353}
354
355#[inline(always)]
357pub fn alloc_range(start: Size, size: Size) -> AllocRange {
358 AllocRange { start, size }
359}
360
361impl From<Range<Size>> for AllocRange {
362 #[inline]
363 fn from(r: Range<Size>) -> Self {
364 alloc_range(r.start, r.end - r.start) }
366}
367
368impl From<Range<usize>> for AllocRange {
369 #[inline]
370 fn from(r: Range<usize>) -> Self {
371 AllocRange::from(Size::from_bytes(r.start)..Size::from_bytes(r.end))
372 }
373}
374
375impl AllocRange {
376 #[inline(always)]
377 pub fn end(self) -> Size {
378 self.start + self.size }
380
381 #[inline]
383 pub fn subrange(self, subrange: AllocRange) -> AllocRange {
384 let sub_start = self.start + subrange.start;
385 let range = alloc_range(sub_start, subrange.size);
386 assert!(range.end() <= self.end(), "access outside the bounds for given AllocRange");
387 range
388 }
389}
390
391pub enum AllocInit {
393 Uninit,
394 Zero,
395}
396
397impl<Prov: Provenance, Bytes: AllocBytes> Allocation<Prov, (), Bytes> {
399 pub fn from_bytes<'a>(
401 slice: impl Into<Cow<'a, [u8]>>,
402 align: Align,
403 mutability: Mutability,
404 params: <Bytes as AllocBytes>::AllocParams,
405 ) -> Self {
406 let bytes = Bytes::from_bytes(slice, align, params);
407 let size = Size::from_bytes(bytes.len());
408 Self {
409 bytes,
410 provenance: ProvenanceMap::new(),
411 init_mask: InitMask::new(size, true),
412 align,
413 mutability,
414 extra: (),
415 }
416 }
417
418 pub fn from_bytes_byte_aligned_immutable<'a>(
419 slice: impl Into<Cow<'a, [u8]>>,
420 params: <Bytes as AllocBytes>::AllocParams,
421 ) -> Self {
422 Allocation::from_bytes(slice, Align::ONE, Mutability::Not, params)
423 }
424
425 fn new_inner<R>(
426 size: Size,
427 align: Align,
428 init: AllocInit,
429 params: <Bytes as AllocBytes>::AllocParams,
430 fail: impl FnOnce() -> R,
431 ) -> Result<Self, R> {
432 let bytes = Bytes::zeroed(size, align, params).ok_or_else(fail)?;
439
440 Ok(Allocation {
441 bytes,
442 provenance: ProvenanceMap::new(),
443 init_mask: InitMask::new(
444 size,
445 match init {
446 AllocInit::Uninit => false,
447 AllocInit::Zero => true,
448 },
449 ),
450 align,
451 mutability: Mutability::Mut,
452 extra: (),
453 })
454 }
455
456 pub fn try_new<'tcx>(
459 size: Size,
460 align: Align,
461 init: AllocInit,
462 params: <Bytes as AllocBytes>::AllocParams,
463 ) -> InterpResult<'tcx, Self> {
464 Self::new_inner(size, align, init, params, || {
465 ty::tls::with(|tcx| tcx.dcx().delayed_bug("exhausted memory during interpretation"));
466 InterpErrorKind::ResourceExhaustion(ResourceExhaustionInfo::MemoryExhausted)
467 })
468 .into()
469 }
470
471 pub fn new(
477 size: Size,
478 align: Align,
479 init: AllocInit,
480 params: <Bytes as AllocBytes>::AllocParams,
481 ) -> Self {
482 match Self::new_inner(size, align, init, params, || {
483 panic!(
484 "interpreter ran out of memory: cannot create allocation of {} bytes",
485 size.bytes()
486 );
487 }) {
488 Ok(x) => x,
489 Err(x) => x,
490 }
491 }
492
493 pub fn with_extra<Extra>(self, extra: Extra) -> Allocation<Prov, Extra, Bytes> {
495 Allocation {
496 bytes: self.bytes,
497 provenance: self.provenance,
498 init_mask: self.init_mask,
499 align: self.align,
500 mutability: self.mutability,
501 extra,
502 }
503 }
504}
505
506impl Allocation {
507 pub fn adjust_from_tcx<'tcx, Prov: Provenance, Bytes: AllocBytes>(
510 &self,
511 cx: &impl HasDataLayout,
512 alloc_bytes: impl FnOnce(&[u8], Align) -> InterpResult<'tcx, Bytes>,
513 mut adjust_ptr: impl FnMut(Pointer<CtfeProvenance>) -> InterpResult<'tcx, Pointer<Prov>>,
514 ) -> InterpResult<'tcx, Allocation<Prov, (), Bytes>> {
515 let mut bytes = alloc_bytes(&*self.bytes, self.align)?;
517 let mut new_provenance = Vec::with_capacity(self.provenance.ptrs().len());
519 let ptr_size = cx.data_layout().pointer_size().bytes_usize();
520 let endian = cx.data_layout().endian;
521 for &(offset, alloc_id) in self.provenance.ptrs().iter() {
522 let idx = offset.bytes_usize();
523 let ptr_bytes = &mut bytes[idx..idx + ptr_size];
524 let bits = read_target_uint(endian, ptr_bytes).unwrap();
525 let (ptr_prov, ptr_offset) =
526 adjust_ptr(Pointer::new(alloc_id, Size::from_bytes(bits)))?.into_raw_parts();
527 write_target_uint(endian, ptr_bytes, ptr_offset.bytes().into()).unwrap();
528 new_provenance.push((offset, ptr_prov));
529 }
530 interp_ok(Allocation {
532 bytes,
533 provenance: ProvenanceMap::from_presorted_ptrs(new_provenance),
534 init_mask: self.init_mask.clone(),
535 align: self.align,
536 mutability: self.mutability,
537 extra: self.extra,
538 })
539 }
540}
541
542impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
544 pub fn len(&self) -> usize {
545 self.bytes.len()
546 }
547
548 pub fn size(&self) -> Size {
549 Size::from_bytes(self.len())
550 }
551
552 pub fn inspect_with_uninit_and_ptr_outside_interpreter(&self, range: Range<usize>) -> &[u8] {
557 &self.bytes[range]
558 }
559
560 pub fn init_mask(&self) -> &InitMask {
562 &self.init_mask
563 }
564
565 pub fn provenance(&self) -> &ProvenanceMap<Prov> {
567 &self.provenance
568 }
569}
570
571impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
573 #[inline]
580 pub fn get_bytes_unchecked(&self, range: AllocRange) -> &[u8] {
581 &self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]
582 }
583
584 #[inline]
591 pub fn get_bytes_strip_provenance(
592 &self,
593 cx: &impl HasDataLayout,
594 range: AllocRange,
595 ) -> AllocResult<&[u8]> {
596 self.init_mask.is_range_initialized(range).map_err(|uninit_range| {
597 AllocError::InvalidUninitBytes(Some(BadBytesAccess {
598 access: range,
599 bad: uninit_range,
600 }))
601 })?;
602 if !Prov::OFFSET_IS_ADDR && !self.provenance.range_empty(range, cx) {
603 let (offset, _prov) = self
605 .provenance
606 .range_ptrs_get(range, cx)
607 .first()
608 .copied()
609 .expect("there must be provenance somewhere here");
610 let start = offset.max(range.start); let end = (offset + cx.pointer_size()).min(range.end()); return Err(AllocError::ReadPointerAsInt(Some(BadBytesAccess {
613 access: range,
614 bad: AllocRange::from(start..end),
615 })));
616 }
617 Ok(self.get_bytes_unchecked(range))
618 }
619
620 pub fn get_bytes_unchecked_for_overwrite(
628 &mut self,
629 cx: &impl HasDataLayout,
630 range: AllocRange,
631 ) -> &mut [u8] {
632 self.mark_init(range, true);
633 self.provenance.clear(range, cx);
634
635 &mut self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]
636 }
637
638 pub fn get_bytes_unchecked_for_overwrite_ptr(
641 &mut self,
642 cx: &impl HasDataLayout,
643 range: AllocRange,
644 ) -> *mut [u8] {
645 self.mark_init(range, true);
646 self.provenance.clear(range, cx);
647
648 assert!(range.end().bytes_usize() <= self.bytes.len()); let begin_ptr = self.bytes.as_mut_ptr().wrapping_add(range.start.bytes_usize());
651 let len = range.end().bytes_usize() - range.start.bytes_usize();
652 ptr::slice_from_raw_parts_mut(begin_ptr, len)
653 }
654
655 pub fn get_bytes_unchecked_raw_mut(&mut self) -> *mut u8 {
659 assert!(Prov::OFFSET_IS_ADDR);
660 self.bytes.as_mut_ptr()
661 }
662
663 pub fn get_bytes_unchecked_raw(&self) -> *const u8 {
667 assert!(Prov::OFFSET_IS_ADDR);
668 self.bytes.as_ptr()
669 }
670}
671
672impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
674 fn mark_init(&mut self, range: AllocRange, is_init: bool) {
676 if range.size.bytes() == 0 {
677 return;
678 }
679 assert!(self.mutability == Mutability::Mut);
680 self.init_mask.set_range(range, is_init);
681 }
682
683 pub fn read_scalar(
694 &self,
695 cx: &impl HasDataLayout,
696 range: AllocRange,
697 read_provenance: bool,
698 ) -> AllocResult<Scalar<Prov>> {
699 if let Err(bad) = self.init_mask.is_range_initialized(range) {
701 return Err(AllocError::InvalidUninitBytes(Some(BadBytesAccess {
702 access: range,
703 bad,
704 })));
705 }
706
707 let bytes = self.get_bytes_unchecked(range);
709 let bits = read_target_uint(cx.data_layout().endian, bytes).unwrap();
710
711 if read_provenance {
712 assert_eq!(range.size, cx.data_layout().pointer_size());
713
714 if let Some(prov) = self.provenance.get_ptr(range.start) {
717 let ptr = Pointer::new(prov, Size::from_bytes(bits));
719 return Ok(Scalar::from_pointer(ptr, cx));
720 }
721 if self.provenance.range_empty(range, cx) {
723 return Ok(Scalar::from_uint(bits, range.size));
724 }
725 let prov = 'prov: {
727 let Some((mut joint_prov, 0)) = self.provenance.get_byte(range.start, cx) else {
729 break 'prov None;
730 };
731 for offset in Size::from_bytes(1)..range.size {
733 let Some((frag_prov, frag_idx)) =
735 self.provenance.get_byte(range.start + offset, cx)
736 else {
737 break 'prov None;
738 };
739 if u64::from(frag_idx) != offset.bytes() && Some(frag_prov) != Prov::WILDCARD {
742 break 'prov None;
743 }
744 joint_prov = match Prov::join(joint_prov, frag_prov) {
746 Some(prov) => prov,
747 None => break 'prov None,
748 };
749 }
750 break 'prov Some(joint_prov);
751 };
752 if prov.is_none() && !Prov::OFFSET_IS_ADDR {
753 return Err(AllocError::ReadPartialPointer(range.start));
756 }
757 let ptr = Pointer::new(prov, Size::from_bytes(bits));
759 return Ok(Scalar::from_maybe_pointer(ptr, cx));
760 } else {
761 if Prov::OFFSET_IS_ADDR || self.provenance.range_empty(range, cx) {
764 return Ok(Scalar::from_uint(bits, range.size));
766 }
767 return Err(AllocError::ReadPointerAsInt(None));
769 }
770 }
771
772 pub fn write_scalar(
780 &mut self,
781 cx: &impl HasDataLayout,
782 range: AllocRange,
783 val: Scalar<Prov>,
784 ) -> AllocResult {
785 assert!(self.mutability == Mutability::Mut);
786
787 let (bytes, provenance) = match val.to_bits_or_ptr_internal(range.size)? {
790 Right(ptr) => {
791 let (provenance, offset) = ptr.into_raw_parts();
792 (u128::from(offset.bytes()), Some(provenance))
793 }
794 Left(data) => (data, None),
795 };
796
797 let endian = cx.data_layout().endian;
798 let dst = self.get_bytes_unchecked_for_overwrite(cx, range);
800 write_target_uint(endian, dst, bytes).unwrap();
801
802 if let Some(provenance) = provenance {
804 assert_eq!(range.size, cx.data_layout().pointer_size());
805 self.provenance.insert_ptr(range.start, provenance, cx);
806 }
807
808 Ok(())
809 }
810
811 pub fn write_uninit(&mut self, cx: &impl HasDataLayout, range: AllocRange) {
813 self.mark_init(range, false);
814 self.provenance.clear(range, cx);
815 }
816
817 pub fn process_native_write(&mut self, cx: &impl HasDataLayout, range: Option<AllocRange>) {
824 let range = range.unwrap_or_else(|| AllocRange {
825 start: Size::ZERO,
826 size: Size::from_bytes(self.len()),
827 });
828 self.mark_init(range, true);
829 self.provenance.write_wildcards(cx, range);
830 }
831
832 pub fn clear_provenance(&mut self, cx: &impl HasDataLayout, range: AllocRange) {
834 self.provenance.clear(range, cx);
835 }
836
837 pub fn provenance_merge_bytes(&mut self, cx: &impl HasDataLayout) -> bool {
838 self.provenance.merge_bytes(cx)
839 }
840
841 pub fn provenance_apply_copy(&mut self, copy: ProvenanceCopy<Prov>) {
848 self.provenance.apply_copy(copy)
849 }
850
851 pub fn init_mask_apply_copy(&mut self, copy: InitCopy, range: AllocRange, repeat: u64) {
856 self.init_mask.apply_copy(copy, range, repeat)
857 }
858}