rustc_middle/mir/interpret/allocation/
provenance_map.rs1use std::cmp;
5use std::ops::Range;
6
7use rustc_abi::{HasDataLayout, Size};
8use rustc_data_structures::sorted_map::SortedMap;
9use rustc_macros::HashStable;
10use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
11use tracing::trace;
12
13use super::{AllocRange, CtfeProvenance, Provenance, alloc_range};
14use crate::mir::interpret::{AllocError, AllocResult};
15
16#[derive(Clone, PartialEq, Eq, Hash, Debug)]
18#[derive(HashStable)]
19pub struct ProvenanceMap<Prov = CtfeProvenance> {
20 ptrs: SortedMap<Size, Prov>,
23 bytes: Option<Box<SortedMap<Size, (Prov, u8)>>>,
28}
29
30impl<D: Decoder, Prov: Provenance + Decodable<D>> Decodable<D> for ProvenanceMap<Prov> {
33 fn decode(d: &mut D) -> Self {
34 Self { ptrs: Decodable::decode(d), bytes: None }
36 }
37}
38impl<S: Encoder, Prov: Provenance + Encodable<S>> Encodable<S> for ProvenanceMap<Prov> {
39 fn encode(&self, s: &mut S) {
40 let Self { ptrs, bytes } = self;
41 assert!(bytes.is_none()); ptrs.encode(s)
43 }
44}
45
46impl<Prov> ProvenanceMap<Prov> {
47 pub fn new() -> Self {
48 ProvenanceMap { ptrs: SortedMap::new(), bytes: None }
49 }
50
51 pub fn from_presorted_ptrs(r: Vec<(Size, Prov)>) -> Self {
54 ProvenanceMap { ptrs: SortedMap::from_presorted_elements(r), bytes: None }
55 }
56}
57
58impl ProvenanceMap {
59 #[inline]
64 pub fn ptrs(&self) -> &SortedMap<Size, CtfeProvenance> {
65 assert!(self.bytes.is_none(), "`ptrs()` called on non-interned allocation");
66 &self.ptrs
67 }
68}
69
70impl<Prov: Provenance> ProvenanceMap<Prov> {
71 fn adjusted_range_ptrs(range: AllocRange, cx: &impl HasDataLayout) -> Range<Size> {
72 let adjusted_start = Size::from_bytes(
75 range.start.bytes().saturating_sub(cx.data_layout().pointer_size().bytes() - 1),
76 );
77 adjusted_start..range.end()
78 }
79
80 pub(super) fn range_ptrs_get(
84 &self,
85 range: AllocRange,
86 cx: &impl HasDataLayout,
87 ) -> &[(Size, Prov)] {
88 self.ptrs.range(Self::adjusted_range_ptrs(range, cx))
89 }
90
91 fn range_ptrs_is_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
93 self.ptrs.range_is_empty(Self::adjusted_range_ptrs(range, cx))
94 }
95
96 fn range_bytes_get(&self, range: AllocRange) -> &[(Size, (Prov, u8))] {
98 if let Some(bytes) = self.bytes.as_ref() {
99 bytes.range(range.start..range.end())
100 } else {
101 &[]
102 }
103 }
104
105 fn range_bytes_is_empty(&self, range: AllocRange) -> bool {
107 self.bytes.as_ref().is_none_or(|bytes| bytes.range_is_empty(range.start..range.end()))
108 }
109
110 pub fn get_byte(&self, offset: Size, cx: &impl HasDataLayout) -> Option<(Prov, u8)> {
112 let prov = self.range_ptrs_get(alloc_range(offset, Size::from_bytes(1)), cx);
113 debug_assert!(prov.len() <= 1);
114 if let Some(entry) = prov.first() {
115 debug_assert!(self.bytes.as_ref().is_none_or(|b| !b.contains_key(&offset)));
117 Some((entry.1, (offset - entry.0).bytes() as u8))
118 } else {
119 self.bytes.as_ref().and_then(|b| b.get(&offset).copied())
121 }
122 }
123
124 pub fn get_range(
126 &self,
127 cx: &impl HasDataLayout,
128 range: AllocRange,
129 ) -> impl Iterator<Item = Prov> {
130 let ptr_provs = self.range_ptrs_get(range, cx).iter().map(|(_, p)| *p);
131 let byte_provs = self.range_bytes_get(range).iter().map(|(_, (p, _))| *p);
132 ptr_provs.chain(byte_provs)
133 }
134
135 pub fn merge_bytes(&mut self, cx: &impl HasDataLayout) -> bool {
138 let Some(bytes) = self.bytes.as_deref_mut() else {
139 return true;
140 };
141 if !Prov::OFFSET_IS_ADDR {
142 return false;
145 }
146 let ptr_size = cx.data_layout().pointer_size();
147 while let Some((offset, (prov, _))) = bytes.iter().next().copied() {
148 let range = offset..offset + ptr_size;
150 let frags = bytes.range(range.clone());
151 if frags.len() != ptr_size.bytes_usize() {
152 return false;
153 }
154 for (idx, (_offset, (frag_prov, frag_idx))) in frags.iter().copied().enumerate() {
155 if frag_prov != prov || frag_idx != idx as u8 {
156 return false;
157 }
158 }
159 bytes.remove_range(range);
161 self.ptrs.insert(offset, prov);
162 }
163 self.bytes = None;
165 true
166 }
167
168 pub fn get_ptr(&self, offset: Size) -> Option<Prov> {
171 self.ptrs.get(&offset).copied()
172 }
173
174 pub fn range_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
180 self.range_ptrs_is_empty(range, cx) && self.range_bytes_is_empty(range)
181 }
182
183 pub fn provenances(&self) -> impl Iterator<Item = Prov> {
185 let bytes = self.bytes.iter().flat_map(|b| b.values().map(|(p, _i)| p));
186 self.ptrs.values().chain(bytes).copied()
187 }
188
189 pub fn insert_ptr(&mut self, offset: Size, prov: Prov, cx: &impl HasDataLayout) {
190 debug_assert!(self.range_empty(alloc_range(offset, cx.data_layout().pointer_size()), cx));
191 self.ptrs.insert(offset, prov);
192 }
193
194 pub fn clear(&mut self, range: AllocRange, cx: &impl HasDataLayout) {
197 let start = range.start;
198 let end = range.end();
199 if let Some(bytes) = self.bytes.as_mut() {
201 bytes.remove_range(start..end);
202 }
203
204 let pointer_size = cx.data_layout().pointer_size();
205
206 let (first, last) = {
209 if self.range_ptrs_is_empty(range, cx) {
211 return;
213 }
214
215 let provenance = self.range_ptrs_get(range, cx);
218 (provenance.first().unwrap().0, provenance.last().unwrap().0 + pointer_size)
219 };
220
221 if first < start {
223 let prov = self.ptrs[&first];
225 let bytes = self.bytes.get_or_insert_with(Box::default);
226 for offset in first..start {
227 bytes.insert(offset, (prov, (offset - first).bytes() as u8));
228 }
229 }
230 if last > end {
231 let begin_of_last = last - pointer_size;
232 let prov = self.ptrs[&begin_of_last];
234 let bytes = self.bytes.get_or_insert_with(Box::default);
235 for offset in end..last {
236 bytes.insert(offset, (prov, (offset - begin_of_last).bytes() as u8));
237 }
238 }
239
240 self.ptrs.remove_range(first..last);
244 }
245
246 pub fn write_wildcards(&mut self, cx: &impl HasDataLayout, range: AllocRange) {
252 let wildcard = Prov::WILDCARD.unwrap();
253
254 let bytes = self.bytes.get_or_insert_with(Box::default);
255
256 let ptr_range = Self::adjusted_range_ptrs(range, cx);
258 let ptrs = self.ptrs.range(ptr_range.clone());
259 if let Some((offset, prov)) = ptrs.first().copied() {
260 for byte_ofs in offset..range.start {
261 bytes.insert(byte_ofs, (prov, (byte_ofs - offset).bytes() as u8));
262 }
263 }
264 if let Some((offset, prov)) = ptrs.last().copied() {
265 for byte_ofs in range.end()..offset + cx.data_layout().pointer_size() {
266 bytes.insert(byte_ofs, (prov, (byte_ofs - offset).bytes() as u8));
267 }
268 }
269 self.ptrs.remove_range(ptr_range);
270
271 for offset in range.start..range.end() {
273 bytes.insert(offset, (wildcard, 0));
275 }
276 }
277}
278
279pub struct ProvenanceCopy<Prov> {
283 ptrs: Box<[(Size, Prov)]>,
284 bytes: Box<[(Size, (Prov, u8))]>,
285}
286
287impl<Prov: Provenance> ProvenanceMap<Prov> {
288 pub fn prepare_copy(
289 &self,
290 range: AllocRange,
291 cx: &impl HasDataLayout,
292 ) -> AllocResult<ProvenanceCopy<Prov>> {
293 let shift_offset = move |offset| offset - range.start;
294 let ptr_size = cx.data_layout().pointer_size();
295
296 let mut ptrs_box: Box<[_]> = Box::new([]);
301 if range.size >= ptr_size {
302 let adjusted_end = Size::from_bytes(range.end().bytes() - (ptr_size.bytes() - 1));
303 let ptrs = self.ptrs.range(range.start..adjusted_end);
304 ptrs_box = ptrs.iter().map(|&(offset, reloc)| (shift_offset(offset), reloc)).collect();
305 };
306
307 let mut bytes_box: Box<[_]> = Box::new([]);
311 let begin_overlap = self.range_ptrs_get(alloc_range(range.start, Size::ZERO), cx).first();
312 let end_overlap = self.range_ptrs_get(alloc_range(range.end(), Size::ZERO), cx).first();
313 if begin_overlap.is_some() || end_overlap.is_some() || self.bytes.is_some() {
315 let mut bytes: Vec<(Size, (Prov, u8))> = Vec::new();
316 if let Some(entry) = begin_overlap {
318 trace!("start overlapping entry: {entry:?}");
319 let entry_end = cmp::min(entry.0 + ptr_size, range.end());
321 for offset in range.start..entry_end {
322 bytes.push((shift_offset(offset), (entry.1, (offset - entry.0).bytes() as u8)));
323 }
324 } else {
325 trace!("no start overlapping entry");
326 }
327
328 bytes.extend(
330 self.range_bytes_get(range)
331 .iter()
332 .map(|&(offset, reloc)| (shift_offset(offset), reloc)),
333 );
334
335 if let Some(entry) = end_overlap {
337 trace!("end overlapping entry: {entry:?}");
338 let entry_start = cmp::max(entry.0, range.start);
340 for offset in entry_start..range.end() {
341 if bytes.last().is_none_or(|bytes_entry| bytes_entry.0 < offset) {
342 bytes.push((
345 shift_offset(offset),
346 (entry.1, (offset - entry.0).bytes() as u8),
347 ));
348 } else {
349 assert!(entry.0 <= range.start);
353 }
354 }
355 } else {
356 trace!("no end overlapping entry");
357 }
358 trace!("byte provenances: {bytes:?}");
359
360 if !bytes.is_empty() && !Prov::OFFSET_IS_ADDR {
361 return Err(AllocError::ReadPartialPointer(range.start));
364 }
365
366 bytes_box = bytes.into_boxed_slice();
368 }
369
370 Ok(ProvenanceCopy { ptrs: ptrs_box, bytes: bytes_box })
371 }
372
373 pub fn apply_copy(&mut self, copy: ProvenanceCopy<Prov>, range: AllocRange, repeat: u64) {
377 let shift_offset = |idx: u64, offset: Size| offset + range.start + idx * range.size;
378 if !copy.ptrs.is_empty() {
379 let chunk_len = copy.ptrs.len() as u64;
382 self.ptrs.insert_presorted((0..chunk_len * repeat).map(|i| {
383 let chunk = i / chunk_len;
384 let (offset, reloc) = copy.ptrs[(i % chunk_len) as usize];
385 (shift_offset(chunk, offset), reloc)
386 }));
387 }
388 if !copy.bytes.is_empty() {
389 let chunk_len = copy.bytes.len() as u64;
390 self.bytes.get_or_insert_with(Box::default).insert_presorted(
391 (0..chunk_len * repeat).map(|i| {
392 let chunk = i / chunk_len;
393 let (offset, reloc) = copy.bytes[(i % chunk_len) as usize];
394 (shift_offset(chunk, offset), reloc)
395 }),
396 );
397 }
398 }
399}