1mod init_mask;
4mod provenance_map;
5
6use std::borrow::Cow;
7use std::hash::Hash;
8use std::ops::{Deref, DerefMut, Range};
9use std::{fmt, hash, ptr};
10
11use either::{Left, Right};
12use init_mask::*;
13pub use init_mask::{InitChunk, InitChunkIter};
14use provenance_map::*;
15use rustc_abi::{Align, HasDataLayout, Size};
16use rustc_ast::Mutability;
17use rustc_data_structures::intern::Interned;
18use rustc_macros::HashStable;
19use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
20
21use super::{
22 AllocId, BadBytesAccess, CtfeProvenance, InterpErrorKind, InterpResult, Pointer,
23 PointerArithmetic, Provenance, ResourceExhaustionInfo, Scalar, ScalarSizeMismatch,
24 UndefinedBehaviorInfo, UnsupportedOpInfo, interp_ok, read_target_uint, write_target_uint,
25};
26use crate::ty;
27
28pub trait AllocBytes: Clone + fmt::Debug + Deref<Target = [u8]> + DerefMut<Target = [u8]> {
30 type AllocParams;
34
35 fn from_bytes<'a>(
37 slice: impl Into<Cow<'a, [u8]>>,
38 _align: Align,
39 _params: Self::AllocParams,
40 ) -> Self;
41
42 fn zeroed(size: Size, _align: Align, _params: Self::AllocParams) -> Option<Self>;
45
46 fn as_mut_ptr(&mut self) -> *mut u8;
52
53 fn as_ptr(&self) -> *const u8;
59}
60
61impl AllocBytes for Box<[u8]> {
63 type AllocParams = ();
64
65 fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, _align: Align, _params: ()) -> Self {
66 Box::<[u8]>::from(slice.into())
67 }
68
69 fn zeroed(size: Size, _align: Align, _params: ()) -> Option<Self> {
70 let bytes = Box::<[u8]>::try_new_zeroed_slice(size.bytes().try_into().ok()?).ok()?;
71 let bytes = unsafe { bytes.assume_init() };
73 Some(bytes)
74 }
75
76 fn as_mut_ptr(&mut self) -> *mut u8 {
77 Box::as_mut_ptr(self).cast()
78 }
79
80 fn as_ptr(&self) -> *const u8 {
81 Box::as_ptr(self).cast()
82 }
83}
84
85#[derive(Clone, Eq, PartialEq)]
93#[derive(HashStable)]
94pub struct Allocation<Prov: Provenance = CtfeProvenance, Extra = (), Bytes = Box<[u8]>> {
95 bytes: Bytes,
98 provenance: ProvenanceMap<Prov>,
103 init_mask: InitMask,
105 pub align: Align,
108 pub mutability: Mutability,
112 pub extra: Extra,
114}
115
116struct AllocFlags {
120 align: Align,
121 mutability: Mutability,
122 all_zero: bool,
123}
124
125impl<E: Encoder> Encodable<E> for AllocFlags {
126 fn encode(&self, encoder: &mut E) {
127 const {
129 let max_supported_align_repr = u8::MAX >> 2;
130 let max_supported_align = 1 << max_supported_align_repr;
131 assert!(Align::MAX.bytes() <= max_supported_align)
132 }
133
134 let mut flags = self.align.bytes().trailing_zeros() as u8;
135 flags |= match self.mutability {
136 Mutability::Not => 0,
137 Mutability::Mut => 1 << 6,
138 };
139 flags |= (self.all_zero as u8) << 7;
140 flags.encode(encoder);
141 }
142}
143
144impl<D: Decoder> Decodable<D> for AllocFlags {
145 fn decode(decoder: &mut D) -> Self {
146 let flags: u8 = Decodable::decode(decoder);
147 let align = flags & 0b0011_1111;
148 let mutability = flags & 0b0100_0000;
149 let all_zero = flags & 0b1000_0000;
150
151 let align = Align::from_bytes(1 << align).unwrap();
152 let mutability = match mutability {
153 0 => Mutability::Not,
154 _ => Mutability::Mut,
155 };
156 let all_zero = all_zero > 0;
157
158 AllocFlags { align, mutability, all_zero }
159 }
160}
161
162#[inline]
168fn all_zero(buf: &[u8]) -> bool {
169 if buf.is_empty() {
172 return true;
173 }
174 if buf[0] != 0 {
177 return false;
178 }
179
180 buf.iter().fold(true, |acc, b| acc & (*b == 0))
183}
184
185impl<Prov: Provenance, Extra, E: Encoder> Encodable<E> for Allocation<Prov, Extra, Box<[u8]>>
187where
188 ProvenanceMap<Prov>: Encodable<E>,
189 Extra: Encodable<E>,
190{
191 fn encode(&self, encoder: &mut E) {
192 let all_zero = all_zero(&self.bytes);
193 AllocFlags { align: self.align, mutability: self.mutability, all_zero }.encode(encoder);
194
195 encoder.emit_usize(self.bytes.len());
196 if !all_zero {
197 encoder.emit_raw_bytes(&self.bytes);
198 }
199 self.provenance.encode(encoder);
200 self.init_mask.encode(encoder);
201 self.extra.encode(encoder);
202 }
203}
204
205impl<Prov: Provenance, Extra, D: Decoder> Decodable<D> for Allocation<Prov, Extra, Box<[u8]>>
206where
207 ProvenanceMap<Prov>: Decodable<D>,
208 Extra: Decodable<D>,
209{
210 fn decode(decoder: &mut D) -> Self {
211 let AllocFlags { align, mutability, all_zero } = Decodable::decode(decoder);
212
213 let len = decoder.read_usize();
214 let bytes = if all_zero { vec![0u8; len] } else { decoder.read_raw_bytes(len).to_vec() };
215 let bytes = <Box<[u8]> as AllocBytes>::from_bytes(bytes, align, ());
216
217 let provenance = Decodable::decode(decoder);
218 let init_mask = Decodable::decode(decoder);
219 let extra = Decodable::decode(decoder);
220
221 Self { bytes, provenance, init_mask, align, mutability, extra }
222 }
223}
224
225const MAX_BYTES_TO_HASH: usize = 64;
230
231const MAX_HASHED_BUFFER_LEN: usize = 2 * MAX_BYTES_TO_HASH;
235
236impl hash::Hash for Allocation {
241 fn hash<H: hash::Hasher>(&self, state: &mut H) {
242 let Self {
243 bytes,
244 provenance,
245 init_mask,
246 align,
247 mutability,
248 extra: (), } = self;
250
251 let byte_count = bytes.len();
254 if byte_count > MAX_HASHED_BUFFER_LEN {
255 byte_count.hash(state);
257
258 bytes[..MAX_BYTES_TO_HASH].hash(state);
260 bytes[byte_count - MAX_BYTES_TO_HASH..].hash(state);
261 } else {
262 bytes.hash(state);
263 }
264
265 provenance.hash(state);
267 init_mask.hash(state);
268 align.hash(state);
269 mutability.hash(state);
270 }
271}
272
273#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)]
282#[rustc_pass_by_value]
283pub struct ConstAllocation<'tcx>(pub Interned<'tcx, Allocation>);
284
285impl<'tcx> fmt::Debug for ConstAllocation<'tcx> {
286 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
287 write!(f, "ConstAllocation {{ .. }}")
290 }
291}
292
293impl<'tcx> ConstAllocation<'tcx> {
294 pub fn inner(self) -> &'tcx Allocation {
295 self.0.0
296 }
297}
298
299#[derive(Debug)]
302pub enum AllocError {
303 ScalarSizeMismatch(ScalarSizeMismatch),
305 ReadPointerAsInt(Option<BadBytesAccess>),
307 OverwritePartialPointer(Size),
309 ReadPartialPointer(Size),
311 InvalidUninitBytes(Option<BadBytesAccess>),
313}
314pub type AllocResult<T = ()> = Result<T, AllocError>;
315
316impl From<ScalarSizeMismatch> for AllocError {
317 fn from(s: ScalarSizeMismatch) -> Self {
318 AllocError::ScalarSizeMismatch(s)
319 }
320}
321
322impl AllocError {
323 pub fn to_interp_error<'tcx>(self, alloc_id: AllocId) -> InterpErrorKind<'tcx> {
324 use AllocError::*;
325 match self {
326 ScalarSizeMismatch(s) => {
327 InterpErrorKind::UndefinedBehavior(UndefinedBehaviorInfo::ScalarSizeMismatch(s))
328 }
329 ReadPointerAsInt(info) => InterpErrorKind::Unsupported(
330 UnsupportedOpInfo::ReadPointerAsInt(info.map(|b| (alloc_id, b))),
331 ),
332 OverwritePartialPointer(offset) => InterpErrorKind::Unsupported(
333 UnsupportedOpInfo::OverwritePartialPointer(Pointer::new(alloc_id, offset)),
334 ),
335 ReadPartialPointer(offset) => InterpErrorKind::Unsupported(
336 UnsupportedOpInfo::ReadPartialPointer(Pointer::new(alloc_id, offset)),
337 ),
338 InvalidUninitBytes(info) => InterpErrorKind::UndefinedBehavior(
339 UndefinedBehaviorInfo::InvalidUninitBytes(info.map(|b| (alloc_id, b))),
340 ),
341 }
342 }
343}
344
345#[derive(Copy, Clone)]
347pub struct AllocRange {
348 pub start: Size,
349 pub size: Size,
350}
351
352impl fmt::Debug for AllocRange {
353 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
354 write!(f, "[{:#x}..{:#x}]", self.start.bytes(), self.end().bytes())
355 }
356}
357
358#[inline(always)]
360pub fn alloc_range(start: Size, size: Size) -> AllocRange {
361 AllocRange { start, size }
362}
363
364impl From<Range<Size>> for AllocRange {
365 #[inline]
366 fn from(r: Range<Size>) -> Self {
367 alloc_range(r.start, r.end - r.start) }
369}
370
371impl From<Range<usize>> for AllocRange {
372 #[inline]
373 fn from(r: Range<usize>) -> Self {
374 AllocRange::from(Size::from_bytes(r.start)..Size::from_bytes(r.end))
375 }
376}
377
378impl AllocRange {
379 #[inline(always)]
380 pub fn end(self) -> Size {
381 self.start + self.size }
383
384 #[inline]
386 pub fn subrange(self, subrange: AllocRange) -> AllocRange {
387 let sub_start = self.start + subrange.start;
388 let range = alloc_range(sub_start, subrange.size);
389 assert!(range.end() <= self.end(), "access outside the bounds for given AllocRange");
390 range
391 }
392}
393
394pub enum AllocInit {
396 Uninit,
397 Zero,
398}
399
400impl<Prov: Provenance, Bytes: AllocBytes> Allocation<Prov, (), Bytes> {
402 pub fn from_bytes<'a>(
404 slice: impl Into<Cow<'a, [u8]>>,
405 align: Align,
406 mutability: Mutability,
407 params: <Bytes as AllocBytes>::AllocParams,
408 ) -> Self {
409 let bytes = Bytes::from_bytes(slice, align, params);
410 let size = Size::from_bytes(bytes.len());
411 Self {
412 bytes,
413 provenance: ProvenanceMap::new(),
414 init_mask: InitMask::new(size, true),
415 align,
416 mutability,
417 extra: (),
418 }
419 }
420
421 pub fn from_bytes_byte_aligned_immutable<'a>(
422 slice: impl Into<Cow<'a, [u8]>>,
423 params: <Bytes as AllocBytes>::AllocParams,
424 ) -> Self {
425 Allocation::from_bytes(slice, Align::ONE, Mutability::Not, params)
426 }
427
428 fn new_inner<R>(
429 size: Size,
430 align: Align,
431 init: AllocInit,
432 params: <Bytes as AllocBytes>::AllocParams,
433 fail: impl FnOnce() -> R,
434 ) -> Result<Self, R> {
435 let bytes = Bytes::zeroed(size, align, params).ok_or_else(fail)?;
442
443 Ok(Allocation {
444 bytes,
445 provenance: ProvenanceMap::new(),
446 init_mask: InitMask::new(
447 size,
448 match init {
449 AllocInit::Uninit => false,
450 AllocInit::Zero => true,
451 },
452 ),
453 align,
454 mutability: Mutability::Mut,
455 extra: (),
456 })
457 }
458
459 pub fn try_new<'tcx>(
462 size: Size,
463 align: Align,
464 init: AllocInit,
465 params: <Bytes as AllocBytes>::AllocParams,
466 ) -> InterpResult<'tcx, Self> {
467 Self::new_inner(size, align, init, params, || {
468 ty::tls::with(|tcx| tcx.dcx().delayed_bug("exhausted memory during interpretation"));
469 InterpErrorKind::ResourceExhaustion(ResourceExhaustionInfo::MemoryExhausted)
470 })
471 .into()
472 }
473
474 pub fn new(
480 size: Size,
481 align: Align,
482 init: AllocInit,
483 params: <Bytes as AllocBytes>::AllocParams,
484 ) -> Self {
485 match Self::new_inner(size, align, init, params, || {
486 panic!(
487 "interpreter ran out of memory: cannot create allocation of {} bytes",
488 size.bytes()
489 );
490 }) {
491 Ok(x) => x,
492 Err(x) => x,
493 }
494 }
495
496 pub fn with_extra<Extra>(self, extra: Extra) -> Allocation<Prov, Extra, Bytes> {
498 Allocation {
499 bytes: self.bytes,
500 provenance: self.provenance,
501 init_mask: self.init_mask,
502 align: self.align,
503 mutability: self.mutability,
504 extra,
505 }
506 }
507}
508
509impl Allocation {
510 pub fn adjust_from_tcx<'tcx, Prov: Provenance, Bytes: AllocBytes>(
513 &self,
514 cx: &impl HasDataLayout,
515 alloc_bytes: impl FnOnce(&[u8], Align) -> InterpResult<'tcx, Bytes>,
516 mut adjust_ptr: impl FnMut(Pointer<CtfeProvenance>) -> InterpResult<'tcx, Pointer<Prov>>,
517 ) -> InterpResult<'tcx, Allocation<Prov, (), Bytes>> {
518 let mut bytes = alloc_bytes(&*self.bytes, self.align)?;
520 let mut new_provenance = Vec::with_capacity(self.provenance.ptrs().len());
522 let ptr_size = cx.data_layout().pointer_size.bytes_usize();
523 let endian = cx.data_layout().endian;
524 for &(offset, alloc_id) in self.provenance.ptrs().iter() {
525 let idx = offset.bytes_usize();
526 let ptr_bytes = &mut bytes[idx..idx + ptr_size];
527 let bits = read_target_uint(endian, ptr_bytes).unwrap();
528 let (ptr_prov, ptr_offset) =
529 adjust_ptr(Pointer::new(alloc_id, Size::from_bytes(bits)))?.into_parts();
530 write_target_uint(endian, ptr_bytes, ptr_offset.bytes().into()).unwrap();
531 new_provenance.push((offset, ptr_prov));
532 }
533 interp_ok(Allocation {
535 bytes,
536 provenance: ProvenanceMap::from_presorted_ptrs(new_provenance),
537 init_mask: self.init_mask.clone(),
538 align: self.align,
539 mutability: self.mutability,
540 extra: self.extra,
541 })
542 }
543}
544
545impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
547 pub fn len(&self) -> usize {
548 self.bytes.len()
549 }
550
551 pub fn size(&self) -> Size {
552 Size::from_bytes(self.len())
553 }
554
555 pub fn inspect_with_uninit_and_ptr_outside_interpreter(&self, range: Range<usize>) -> &[u8] {
560 &self.bytes[range]
561 }
562
563 pub fn init_mask(&self) -> &InitMask {
565 &self.init_mask
566 }
567
568 pub fn provenance(&self) -> &ProvenanceMap<Prov> {
570 &self.provenance
571 }
572}
573
574impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
576 #[inline]
583 pub fn get_bytes_unchecked(&self, range: AllocRange) -> &[u8] {
584 &self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]
585 }
586
587 #[inline]
594 pub fn get_bytes_strip_provenance(
595 &self,
596 cx: &impl HasDataLayout,
597 range: AllocRange,
598 ) -> AllocResult<&[u8]> {
599 self.init_mask.is_range_initialized(range).map_err(|uninit_range| {
600 AllocError::InvalidUninitBytes(Some(BadBytesAccess {
601 access: range,
602 bad: uninit_range,
603 }))
604 })?;
605 if !Prov::OFFSET_IS_ADDR && !self.provenance.range_empty(range, cx) {
606 let (offset, _prov) = self
608 .provenance
609 .range_ptrs_get(range, cx)
610 .first()
611 .copied()
612 .expect("there must be provenance somewhere here");
613 let start = offset.max(range.start); let end = (offset + cx.pointer_size()).min(range.end()); return Err(AllocError::ReadPointerAsInt(Some(BadBytesAccess {
616 access: range,
617 bad: AllocRange::from(start..end),
618 })));
619 }
620 Ok(self.get_bytes_unchecked(range))
621 }
622
623 pub fn get_bytes_unchecked_for_overwrite(
631 &mut self,
632 cx: &impl HasDataLayout,
633 range: AllocRange,
634 ) -> AllocResult<&mut [u8]> {
635 self.mark_init(range, true);
636 self.provenance.clear(range, cx)?;
637
638 Ok(&mut self.bytes[range.start.bytes_usize()..range.end().bytes_usize()])
639 }
640
641 pub fn get_bytes_unchecked_for_overwrite_ptr(
644 &mut self,
645 cx: &impl HasDataLayout,
646 range: AllocRange,
647 ) -> AllocResult<*mut [u8]> {
648 self.mark_init(range, true);
649 self.provenance.clear(range, cx)?;
650
651 assert!(range.end().bytes_usize() <= self.bytes.len()); let begin_ptr = self.bytes.as_mut_ptr().wrapping_add(range.start.bytes_usize());
654 let len = range.end().bytes_usize() - range.start.bytes_usize();
655 Ok(ptr::slice_from_raw_parts_mut(begin_ptr, len))
656 }
657
658 pub fn get_bytes_unchecked_raw_mut(&mut self) -> *mut u8 {
662 assert!(Prov::OFFSET_IS_ADDR);
663 self.bytes.as_mut_ptr()
664 }
665
666 pub fn get_bytes_unchecked_raw(&self) -> *const u8 {
670 assert!(Prov::OFFSET_IS_ADDR);
671 self.bytes.as_ptr()
672 }
673}
674
675impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
677 fn mark_init(&mut self, range: AllocRange, is_init: bool) {
679 if range.size.bytes() == 0 {
680 return;
681 }
682 assert!(self.mutability == Mutability::Mut);
683 self.init_mask.set_range(range, is_init);
684 }
685
686 pub fn read_scalar(
697 &self,
698 cx: &impl HasDataLayout,
699 range: AllocRange,
700 read_provenance: bool,
701 ) -> AllocResult<Scalar<Prov>> {
702 if self.init_mask.is_range_initialized(range).is_err() {
704 return Err(AllocError::InvalidUninitBytes(None));
705 }
706
707 let bytes = self.get_bytes_unchecked(range);
709 let bits = read_target_uint(cx.data_layout().endian, bytes).unwrap();
710
711 if read_provenance {
712 assert_eq!(range.size, cx.data_layout().pointer_size);
713
714 if let Some(prov) = self.provenance.get_ptr(range.start) {
717 let ptr = Pointer::new(prov, Size::from_bytes(bits));
719 return Ok(Scalar::from_pointer(ptr, cx));
720 }
721
722 if Prov::OFFSET_IS_ADDR {
724 let mut prov = self.provenance.get(range.start, cx);
725 for offset in Size::from_bytes(1)..range.size {
726 let this_prov = self.provenance.get(range.start + offset, cx);
727 prov = Prov::join(prov, this_prov);
728 }
729 let ptr = Pointer::new(prov, Size::from_bytes(bits));
731 return Ok(Scalar::from_maybe_pointer(ptr, cx));
732 } else {
733 if self.provenance.range_empty(range, cx) {
736 return Ok(Scalar::from_uint(bits, range.size));
737 }
738 return Err(AllocError::ReadPartialPointer(range.start));
740 }
741 } else {
742 if Prov::OFFSET_IS_ADDR || self.provenance.range_empty(range, cx) {
745 return Ok(Scalar::from_uint(bits, range.size));
747 }
748 return Err(AllocError::ReadPointerAsInt(None));
750 }
751 }
752
753 pub fn write_scalar(
761 &mut self,
762 cx: &impl HasDataLayout,
763 range: AllocRange,
764 val: Scalar<Prov>,
765 ) -> AllocResult {
766 assert!(self.mutability == Mutability::Mut);
767
768 let (bytes, provenance) = match val.to_bits_or_ptr_internal(range.size)? {
771 Right(ptr) => {
772 let (provenance, offset) = ptr.into_parts();
773 (u128::from(offset.bytes()), Some(provenance))
774 }
775 Left(data) => (data, None),
776 };
777
778 let endian = cx.data_layout().endian;
779 let dst = self.get_bytes_unchecked_for_overwrite(cx, range)?;
781 write_target_uint(endian, dst, bytes).unwrap();
782
783 if let Some(provenance) = provenance {
785 assert_eq!(range.size, cx.data_layout().pointer_size);
786 self.provenance.insert_ptr(range.start, provenance, cx);
787 }
788
789 Ok(())
790 }
791
792 pub fn write_uninit(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
794 self.mark_init(range, false);
795 self.provenance.clear(range, cx)?;
796 Ok(())
797 }
798
799 pub fn prepare_for_native_write(&mut self) -> AllocResult {
803 let full_range = AllocRange { start: Size::ZERO, size: Size::from_bytes(self.len()) };
804 for chunk in self.init_mask.range_as_init_chunks(full_range) {
806 if !chunk.is_init() {
807 let uninit_bytes = &mut self.bytes
808 [chunk.range().start.bytes_usize()..chunk.range().end.bytes_usize()];
809 uninit_bytes.fill(0);
810 }
811 }
812 self.mark_init(full_range, true);
814
815 self.provenance.write_wildcards(self.len());
817
818 std::hint::black_box(self.get_bytes_unchecked_raw_mut().expose_provenance());
822
823 Ok(())
824 }
825
826 pub fn clear_provenance(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
828 self.provenance.clear(range, cx)?;
829 return Ok(());
830 }
831
832 pub fn provenance_apply_copy(&mut self, copy: ProvenanceCopy<Prov>) {
839 self.provenance.apply_copy(copy)
840 }
841
842 pub fn init_mask_apply_copy(&mut self, copy: InitCopy, range: AllocRange, repeat: u64) {
847 self.init_mask.apply_copy(copy, range, repeat)
848 }
849}