1mod init_mask;
4mod provenance_map;
5
6use std::borrow::Cow;
7use std::hash::Hash;
8use std::ops::{Deref, DerefMut, Range};
9use std::{fmt, hash, ptr};
10
11use either::{Left, Right};
12use init_mask::*;
13pub use init_mask::{InitChunk, InitChunkIter};
14use provenance_map::*;
15use rustc_abi::{Align, HasDataLayout, Size};
16use rustc_ast::Mutability;
17use rustc_data_structures::intern::Interned;
18use rustc_macros::HashStable;
19use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
20
21use super::{
22 AllocId, BadBytesAccess, CtfeProvenance, InterpErrorKind, InterpResult, Pointer,
23 PointerArithmetic, Provenance, ResourceExhaustionInfo, Scalar, ScalarSizeMismatch,
24 UndefinedBehaviorInfo, UnsupportedOpInfo, interp_ok, read_target_uint, write_target_uint,
25};
26use crate::ty;
27
28pub trait AllocBytes: Clone + fmt::Debug + Deref<Target = [u8]> + DerefMut<Target = [u8]> {
30 fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, _align: Align) -> Self;
32
33 fn zeroed(size: Size, _align: Align) -> Option<Self>;
36
37 fn as_mut_ptr(&mut self) -> *mut u8;
43
44 fn as_ptr(&self) -> *const u8;
50}
51
52impl AllocBytes for Box<[u8]> {
54 fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, _align: Align) -> Self {
55 Box::<[u8]>::from(slice.into())
56 }
57
58 fn zeroed(size: Size, _align: Align) -> Option<Self> {
59 let bytes = Box::<[u8]>::try_new_zeroed_slice(size.bytes().try_into().ok()?).ok()?;
60 let bytes = unsafe { bytes.assume_init() };
62 Some(bytes)
63 }
64
65 fn as_mut_ptr(&mut self) -> *mut u8 {
66 Box::as_mut_ptr(self).cast()
67 }
68
69 fn as_ptr(&self) -> *const u8 {
70 Box::as_ptr(self).cast()
71 }
72}
73
74#[derive(Clone, Eq, PartialEq)]
82#[derive(HashStable)]
83pub struct Allocation<Prov: Provenance = CtfeProvenance, Extra = (), Bytes = Box<[u8]>> {
84 bytes: Bytes,
87 provenance: ProvenanceMap<Prov>,
92 init_mask: InitMask,
94 pub align: Align,
97 pub mutability: Mutability,
101 pub extra: Extra,
103}
104
105struct AllocFlags {
109 align: Align,
110 mutability: Mutability,
111 all_zero: bool,
112}
113
114impl<E: Encoder> Encodable<E> for AllocFlags {
115 fn encode(&self, encoder: &mut E) {
116 const {
118 let max_supported_align_repr = u8::MAX >> 2;
119 let max_supported_align = 1 << max_supported_align_repr;
120 assert!(Align::MAX.bytes() <= max_supported_align)
121 }
122
123 let mut flags = self.align.bytes().trailing_zeros() as u8;
124 flags |= match self.mutability {
125 Mutability::Not => 0,
126 Mutability::Mut => 1 << 6,
127 };
128 flags |= (self.all_zero as u8) << 7;
129 flags.encode(encoder);
130 }
131}
132
133impl<D: Decoder> Decodable<D> for AllocFlags {
134 fn decode(decoder: &mut D) -> Self {
135 let flags: u8 = Decodable::decode(decoder);
136 let align = flags & 0b0011_1111;
137 let mutability = flags & 0b0100_0000;
138 let all_zero = flags & 0b1000_0000;
139
140 let align = Align::from_bytes(1 << align).unwrap();
141 let mutability = match mutability {
142 0 => Mutability::Not,
143 _ => Mutability::Mut,
144 };
145 let all_zero = all_zero > 0;
146
147 AllocFlags { align, mutability, all_zero }
148 }
149}
150
151#[inline]
157fn all_zero(buf: &[u8]) -> bool {
158 if buf.is_empty() {
161 return true;
162 }
163 if buf[0] != 0 {
166 return false;
167 }
168
169 buf.iter().fold(true, |acc, b| acc & (*b == 0))
172}
173
174impl<Prov: Provenance, Extra, Bytes, E: Encoder> Encodable<E> for Allocation<Prov, Extra, Bytes>
176where
177 Bytes: AllocBytes,
178 ProvenanceMap<Prov>: Encodable<E>,
179 Extra: Encodable<E>,
180{
181 fn encode(&self, encoder: &mut E) {
182 let all_zero = all_zero(&self.bytes);
183 AllocFlags { align: self.align, mutability: self.mutability, all_zero }.encode(encoder);
184
185 encoder.emit_usize(self.bytes.len());
186 if !all_zero {
187 encoder.emit_raw_bytes(&self.bytes);
188 }
189 self.provenance.encode(encoder);
190 self.init_mask.encode(encoder);
191 self.extra.encode(encoder);
192 }
193}
194
195impl<Prov: Provenance, Extra, Bytes, D: Decoder> Decodable<D> for Allocation<Prov, Extra, Bytes>
196where
197 Bytes: AllocBytes,
198 ProvenanceMap<Prov>: Decodable<D>,
199 Extra: Decodable<D>,
200{
201 fn decode(decoder: &mut D) -> Self {
202 let AllocFlags { align, mutability, all_zero } = Decodable::decode(decoder);
203
204 let len = decoder.read_usize();
205 let bytes = if all_zero { vec![0u8; len] } else { decoder.read_raw_bytes(len).to_vec() };
206 let bytes = Bytes::from_bytes(bytes, align);
207
208 let provenance = Decodable::decode(decoder);
209 let init_mask = Decodable::decode(decoder);
210 let extra = Decodable::decode(decoder);
211
212 Self { bytes, provenance, init_mask, align, mutability, extra }
213 }
214}
215
216const MAX_BYTES_TO_HASH: usize = 64;
221
222const MAX_HASHED_BUFFER_LEN: usize = 2 * MAX_BYTES_TO_HASH;
226
227impl hash::Hash for Allocation {
232 fn hash<H: hash::Hasher>(&self, state: &mut H) {
233 let Self {
234 bytes,
235 provenance,
236 init_mask,
237 align,
238 mutability,
239 extra: (), } = self;
241
242 let byte_count = bytes.len();
245 if byte_count > MAX_HASHED_BUFFER_LEN {
246 byte_count.hash(state);
248
249 bytes[..MAX_BYTES_TO_HASH].hash(state);
251 bytes[byte_count - MAX_BYTES_TO_HASH..].hash(state);
252 } else {
253 bytes.hash(state);
254 }
255
256 provenance.hash(state);
258 init_mask.hash(state);
259 align.hash(state);
260 mutability.hash(state);
261 }
262}
263
264#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)]
273#[rustc_pass_by_value]
274pub struct ConstAllocation<'tcx>(pub Interned<'tcx, Allocation>);
275
276impl<'tcx> fmt::Debug for ConstAllocation<'tcx> {
277 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
278 write!(f, "ConstAllocation {{ .. }}")
281 }
282}
283
284impl<'tcx> ConstAllocation<'tcx> {
285 pub fn inner(self) -> &'tcx Allocation {
286 self.0.0
287 }
288}
289
290#[derive(Debug)]
293pub enum AllocError {
294 ScalarSizeMismatch(ScalarSizeMismatch),
296 ReadPointerAsInt(Option<BadBytesAccess>),
298 OverwritePartialPointer(Size),
300 ReadPartialPointer(Size),
302 InvalidUninitBytes(Option<BadBytesAccess>),
304}
305pub type AllocResult<T = ()> = Result<T, AllocError>;
306
307impl From<ScalarSizeMismatch> for AllocError {
308 fn from(s: ScalarSizeMismatch) -> Self {
309 AllocError::ScalarSizeMismatch(s)
310 }
311}
312
313impl AllocError {
314 pub fn to_interp_error<'tcx>(self, alloc_id: AllocId) -> InterpErrorKind<'tcx> {
315 use AllocError::*;
316 match self {
317 ScalarSizeMismatch(s) => {
318 InterpErrorKind::UndefinedBehavior(UndefinedBehaviorInfo::ScalarSizeMismatch(s))
319 }
320 ReadPointerAsInt(info) => InterpErrorKind::Unsupported(
321 UnsupportedOpInfo::ReadPointerAsInt(info.map(|b| (alloc_id, b))),
322 ),
323 OverwritePartialPointer(offset) => InterpErrorKind::Unsupported(
324 UnsupportedOpInfo::OverwritePartialPointer(Pointer::new(alloc_id, offset)),
325 ),
326 ReadPartialPointer(offset) => InterpErrorKind::Unsupported(
327 UnsupportedOpInfo::ReadPartialPointer(Pointer::new(alloc_id, offset)),
328 ),
329 InvalidUninitBytes(info) => InterpErrorKind::UndefinedBehavior(
330 UndefinedBehaviorInfo::InvalidUninitBytes(info.map(|b| (alloc_id, b))),
331 ),
332 }
333 }
334}
335
336#[derive(Copy, Clone)]
338pub struct AllocRange {
339 pub start: Size,
340 pub size: Size,
341}
342
343impl fmt::Debug for AllocRange {
344 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
345 write!(f, "[{:#x}..{:#x}]", self.start.bytes(), self.end().bytes())
346 }
347}
348
349#[inline(always)]
351pub fn alloc_range(start: Size, size: Size) -> AllocRange {
352 AllocRange { start, size }
353}
354
355impl From<Range<Size>> for AllocRange {
356 #[inline]
357 fn from(r: Range<Size>) -> Self {
358 alloc_range(r.start, r.end - r.start) }
360}
361
362impl From<Range<usize>> for AllocRange {
363 #[inline]
364 fn from(r: Range<usize>) -> Self {
365 AllocRange::from(Size::from_bytes(r.start)..Size::from_bytes(r.end))
366 }
367}
368
369impl AllocRange {
370 #[inline(always)]
371 pub fn end(self) -> Size {
372 self.start + self.size }
374
375 #[inline]
377 pub fn subrange(self, subrange: AllocRange) -> AllocRange {
378 let sub_start = self.start + subrange.start;
379 let range = alloc_range(sub_start, subrange.size);
380 assert!(range.end() <= self.end(), "access outside the bounds for given AllocRange");
381 range
382 }
383}
384
385pub enum AllocInit {
387 Uninit,
388 Zero,
389}
390
391impl<Prov: Provenance, Bytes: AllocBytes> Allocation<Prov, (), Bytes> {
393 pub fn from_bytes<'a>(
395 slice: impl Into<Cow<'a, [u8]>>,
396 align: Align,
397 mutability: Mutability,
398 ) -> Self {
399 let bytes = Bytes::from_bytes(slice, align);
400 let size = Size::from_bytes(bytes.len());
401 Self {
402 bytes,
403 provenance: ProvenanceMap::new(),
404 init_mask: InitMask::new(size, true),
405 align,
406 mutability,
407 extra: (),
408 }
409 }
410
411 pub fn from_bytes_byte_aligned_immutable<'a>(slice: impl Into<Cow<'a, [u8]>>) -> Self {
412 Allocation::from_bytes(slice, Align::ONE, Mutability::Not)
413 }
414
415 fn new_inner<R>(
416 size: Size,
417 align: Align,
418 init: AllocInit,
419 fail: impl FnOnce() -> R,
420 ) -> Result<Self, R> {
421 let bytes = Bytes::zeroed(size, align).ok_or_else(fail)?;
428
429 Ok(Allocation {
430 bytes,
431 provenance: ProvenanceMap::new(),
432 init_mask: InitMask::new(
433 size,
434 match init {
435 AllocInit::Uninit => false,
436 AllocInit::Zero => true,
437 },
438 ),
439 align,
440 mutability: Mutability::Mut,
441 extra: (),
442 })
443 }
444
445 pub fn try_new<'tcx>(size: Size, align: Align, init: AllocInit) -> InterpResult<'tcx, Self> {
448 Self::new_inner(size, align, init, || {
449 ty::tls::with(|tcx| tcx.dcx().delayed_bug("exhausted memory during interpretation"));
450 InterpErrorKind::ResourceExhaustion(ResourceExhaustionInfo::MemoryExhausted)
451 })
452 .into()
453 }
454
455 pub fn new(size: Size, align: Align, init: AllocInit) -> Self {
461 match Self::new_inner(size, align, init, || {
462 panic!(
463 "interpreter ran out of memory: cannot create allocation of {} bytes",
464 size.bytes()
465 );
466 }) {
467 Ok(x) => x,
468 Err(x) => x,
469 }
470 }
471
472 pub fn with_extra<Extra>(self, extra: Extra) -> Allocation<Prov, Extra, Bytes> {
474 Allocation {
475 bytes: self.bytes,
476 provenance: self.provenance,
477 init_mask: self.init_mask,
478 align: self.align,
479 mutability: self.mutability,
480 extra,
481 }
482 }
483}
484
485impl Allocation {
486 pub fn adjust_from_tcx<'tcx, Prov: Provenance, Bytes: AllocBytes>(
489 &self,
490 cx: &impl HasDataLayout,
491 mut alloc_bytes: impl FnMut(&[u8], Align) -> InterpResult<'tcx, Bytes>,
492 mut adjust_ptr: impl FnMut(Pointer<CtfeProvenance>) -> InterpResult<'tcx, Pointer<Prov>>,
493 ) -> InterpResult<'tcx, Allocation<Prov, (), Bytes>> {
494 let mut bytes = alloc_bytes(&*self.bytes, self.align)?;
496 let mut new_provenance = Vec::with_capacity(self.provenance.ptrs().len());
498 let ptr_size = cx.data_layout().pointer_size.bytes_usize();
499 let endian = cx.data_layout().endian;
500 for &(offset, alloc_id) in self.provenance.ptrs().iter() {
501 let idx = offset.bytes_usize();
502 let ptr_bytes = &mut bytes[idx..idx + ptr_size];
503 let bits = read_target_uint(endian, ptr_bytes).unwrap();
504 let (ptr_prov, ptr_offset) =
505 adjust_ptr(Pointer::new(alloc_id, Size::from_bytes(bits)))?.into_parts();
506 write_target_uint(endian, ptr_bytes, ptr_offset.bytes().into()).unwrap();
507 new_provenance.push((offset, ptr_prov));
508 }
509 interp_ok(Allocation {
511 bytes,
512 provenance: ProvenanceMap::from_presorted_ptrs(new_provenance),
513 init_mask: self.init_mask.clone(),
514 align: self.align,
515 mutability: self.mutability,
516 extra: self.extra,
517 })
518 }
519}
520
521impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
523 pub fn len(&self) -> usize {
524 self.bytes.len()
525 }
526
527 pub fn size(&self) -> Size {
528 Size::from_bytes(self.len())
529 }
530
531 pub fn inspect_with_uninit_and_ptr_outside_interpreter(&self, range: Range<usize>) -> &[u8] {
536 &self.bytes[range]
537 }
538
539 pub fn init_mask(&self) -> &InitMask {
541 &self.init_mask
542 }
543
544 pub fn provenance(&self) -> &ProvenanceMap<Prov> {
546 &self.provenance
547 }
548}
549
550impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
552 #[inline]
559 pub fn get_bytes_unchecked(&self, range: AllocRange) -> &[u8] {
560 &self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]
561 }
562
563 #[inline]
570 pub fn get_bytes_strip_provenance(
571 &self,
572 cx: &impl HasDataLayout,
573 range: AllocRange,
574 ) -> AllocResult<&[u8]> {
575 self.init_mask.is_range_initialized(range).map_err(|uninit_range| {
576 AllocError::InvalidUninitBytes(Some(BadBytesAccess {
577 access: range,
578 bad: uninit_range,
579 }))
580 })?;
581 if !Prov::OFFSET_IS_ADDR && !self.provenance.range_empty(range, cx) {
582 let (offset, _prov) = self
584 .provenance
585 .range_ptrs_get(range, cx)
586 .first()
587 .copied()
588 .expect("there must be provenance somewhere here");
589 let start = offset.max(range.start); let end = (offset + cx.pointer_size()).min(range.end()); return Err(AllocError::ReadPointerAsInt(Some(BadBytesAccess {
592 access: range,
593 bad: AllocRange::from(start..end),
594 })));
595 }
596 Ok(self.get_bytes_unchecked(range))
597 }
598
599 pub fn get_bytes_unchecked_for_overwrite(
607 &mut self,
608 cx: &impl HasDataLayout,
609 range: AllocRange,
610 ) -> AllocResult<&mut [u8]> {
611 self.mark_init(range, true);
612 self.provenance.clear(range, cx)?;
613
614 Ok(&mut self.bytes[range.start.bytes_usize()..range.end().bytes_usize()])
615 }
616
617 pub fn get_bytes_unchecked_for_overwrite_ptr(
620 &mut self,
621 cx: &impl HasDataLayout,
622 range: AllocRange,
623 ) -> AllocResult<*mut [u8]> {
624 self.mark_init(range, true);
625 self.provenance.clear(range, cx)?;
626
627 assert!(range.end().bytes_usize() <= self.bytes.len()); let begin_ptr = self.bytes.as_mut_ptr().wrapping_add(range.start.bytes_usize());
630 let len = range.end().bytes_usize() - range.start.bytes_usize();
631 Ok(ptr::slice_from_raw_parts_mut(begin_ptr, len))
632 }
633
634 pub fn get_bytes_unchecked_raw_mut(&mut self) -> *mut u8 {
638 assert!(Prov::OFFSET_IS_ADDR);
639 self.bytes.as_mut_ptr()
640 }
641
642 pub fn get_bytes_unchecked_raw(&self) -> *const u8 {
646 assert!(Prov::OFFSET_IS_ADDR);
647 self.bytes.as_ptr()
648 }
649}
650
651impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
653 fn mark_init(&mut self, range: AllocRange, is_init: bool) {
655 if range.size.bytes() == 0 {
656 return;
657 }
658 assert!(self.mutability == Mutability::Mut);
659 self.init_mask.set_range(range, is_init);
660 }
661
662 pub fn read_scalar(
673 &self,
674 cx: &impl HasDataLayout,
675 range: AllocRange,
676 read_provenance: bool,
677 ) -> AllocResult<Scalar<Prov>> {
678 if self.init_mask.is_range_initialized(range).is_err() {
680 return Err(AllocError::InvalidUninitBytes(None));
681 }
682
683 let bytes = self.get_bytes_unchecked(range);
685 let bits = read_target_uint(cx.data_layout().endian, bytes).unwrap();
686
687 if read_provenance {
688 assert_eq!(range.size, cx.data_layout().pointer_size);
689
690 if let Some(prov) = self.provenance.get_ptr(range.start) {
693 let ptr = Pointer::new(prov, Size::from_bytes(bits));
695 return Ok(Scalar::from_pointer(ptr, cx));
696 }
697
698 if Prov::OFFSET_IS_ADDR {
700 let mut prov = self.provenance.get(range.start, cx);
701 for offset in Size::from_bytes(1)..range.size {
702 let this_prov = self.provenance.get(range.start + offset, cx);
703 prov = Prov::join(prov, this_prov);
704 }
705 let ptr = Pointer::new(prov, Size::from_bytes(bits));
707 return Ok(Scalar::from_maybe_pointer(ptr, cx));
708 } else {
709 if self.provenance.range_empty(range, cx) {
712 return Ok(Scalar::from_uint(bits, range.size));
713 }
714 return Err(AllocError::ReadPartialPointer(range.start));
716 }
717 } else {
718 if Prov::OFFSET_IS_ADDR || self.provenance.range_empty(range, cx) {
721 return Ok(Scalar::from_uint(bits, range.size));
723 }
724 return Err(AllocError::ReadPointerAsInt(None));
726 }
727 }
728
729 pub fn write_scalar(
737 &mut self,
738 cx: &impl HasDataLayout,
739 range: AllocRange,
740 val: Scalar<Prov>,
741 ) -> AllocResult {
742 assert!(self.mutability == Mutability::Mut);
743
744 let (bytes, provenance) = match val.to_bits_or_ptr_internal(range.size)? {
747 Right(ptr) => {
748 let (provenance, offset) = ptr.into_parts();
749 (u128::from(offset.bytes()), Some(provenance))
750 }
751 Left(data) => (data, None),
752 };
753
754 let endian = cx.data_layout().endian;
755 let dst = self.get_bytes_unchecked_for_overwrite(cx, range)?;
757 write_target_uint(endian, dst, bytes).unwrap();
758
759 if let Some(provenance) = provenance {
761 assert_eq!(range.size, cx.data_layout().pointer_size);
762 self.provenance.insert_ptr(range.start, provenance, cx);
763 }
764
765 Ok(())
766 }
767
768 pub fn write_uninit(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
770 self.mark_init(range, false);
771 self.provenance.clear(range, cx)?;
772 Ok(())
773 }
774
775 pub fn prepare_for_native_write(&mut self) -> AllocResult {
779 let full_range = AllocRange { start: Size::ZERO, size: Size::from_bytes(self.len()) };
780 for chunk in self.init_mask.range_as_init_chunks(full_range) {
782 if !chunk.is_init() {
783 let uninit_bytes = &mut self.bytes
784 [chunk.range().start.bytes_usize()..chunk.range().end.bytes_usize()];
785 uninit_bytes.fill(0);
786 }
787 }
788 self.mark_init(full_range, true);
790
791 self.provenance.write_wildcards(self.len());
793
794 std::hint::black_box(self.get_bytes_unchecked_raw_mut().expose_provenance());
798
799 Ok(())
800 }
801
802 pub fn clear_provenance(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
804 self.provenance.clear(range, cx)?;
805 return Ok(());
806 }
807
808 pub fn provenance_apply_copy(&mut self, copy: ProvenanceCopy<Prov>) {
815 self.provenance.apply_copy(copy)
816 }
817
818 pub fn init_mask_apply_copy(&mut self, copy: InitCopy, range: AllocRange, repeat: u64) {
823 self.init_mask.apply_copy(copy, range, repeat)
824 }
825}