rustc_middle/mir/interpret/allocation/
provenance_map.rs
1use std::cmp;
5use std::ops::Range;
6
7use rustc_abi::{HasDataLayout, Size};
8use rustc_data_structures::sorted_map::SortedMap;
9use rustc_macros::HashStable;
10use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
11use tracing::trace;
12
13use super::{AllocError, AllocRange, AllocResult, CtfeProvenance, Provenance, alloc_range};
14
15#[derive(Clone, PartialEq, Eq, Hash, Debug)]
17#[derive(HashStable)]
18pub struct ProvenanceMap<Prov = CtfeProvenance> {
19 ptrs: SortedMap<Size, Prov>,
22 bytes: Option<Box<SortedMap<Size, Prov>>>,
26}
27
28impl<D: Decoder, Prov: Provenance + Decodable<D>> Decodable<D> for ProvenanceMap<Prov> {
31 fn decode(d: &mut D) -> Self {
32 assert!(!Prov::OFFSET_IS_ADDR); Self { ptrs: Decodable::decode(d), bytes: None }
34 }
35}
36impl<S: Encoder, Prov: Provenance + Encodable<S>> Encodable<S> for ProvenanceMap<Prov> {
37 fn encode(&self, s: &mut S) {
38 let Self { ptrs, bytes } = self;
39 assert!(!Prov::OFFSET_IS_ADDR); debug_assert!(bytes.is_none()); ptrs.encode(s)
42 }
43}
44
45impl<Prov> ProvenanceMap<Prov> {
46 pub fn new() -> Self {
47 ProvenanceMap { ptrs: SortedMap::new(), bytes: None }
48 }
49
50 pub fn from_presorted_ptrs(r: Vec<(Size, Prov)>) -> Self {
53 ProvenanceMap { ptrs: SortedMap::from_presorted_elements(r), bytes: None }
54 }
55}
56
57impl ProvenanceMap {
58 #[inline]
63 pub fn ptrs(&self) -> &SortedMap<Size, CtfeProvenance> {
64 debug_assert!(self.bytes.is_none()); &self.ptrs
66 }
67}
68
69impl<Prov: Provenance> ProvenanceMap<Prov> {
70 fn adjusted_range_ptrs(range: AllocRange, cx: &impl HasDataLayout) -> Range<Size> {
71 let adjusted_start = Size::from_bytes(
74 range.start.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1),
75 );
76 adjusted_start..range.end()
77 }
78
79 pub(super) fn range_ptrs_get(
83 &self,
84 range: AllocRange,
85 cx: &impl HasDataLayout,
86 ) -> &[(Size, Prov)] {
87 self.ptrs.range(Self::adjusted_range_ptrs(range, cx))
88 }
89
90 pub(super) fn range_ptrs_is_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
92 self.ptrs.range_is_empty(Self::adjusted_range_ptrs(range, cx))
93 }
94
95 fn range_bytes_get(&self, range: AllocRange) -> &[(Size, Prov)] {
97 if let Some(bytes) = self.bytes.as_ref() {
98 bytes.range(range.start..range.end())
99 } else {
100 &[]
101 }
102 }
103
104 fn range_bytes_is_empty(&self, range: AllocRange) -> bool {
106 self.bytes.as_ref().is_none_or(|bytes| bytes.range_is_empty(range.start..range.end()))
107 }
108
109 pub fn get(&self, offset: Size, cx: &impl HasDataLayout) -> Option<Prov> {
111 let prov = self.range_ptrs_get(alloc_range(offset, Size::from_bytes(1)), cx);
112 debug_assert!(prov.len() <= 1);
113 if let Some(entry) = prov.first() {
114 debug_assert!(self.bytes.as_ref().is_none_or(|b| !b.contains_key(&offset)));
116 Some(entry.1)
117 } else {
118 self.bytes.as_ref().and_then(|b| b.get(&offset).copied())
120 }
121 }
122
123 pub fn get_ptr(&self, offset: Size) -> Option<Prov> {
126 self.ptrs.get(&offset).copied()
127 }
128
129 pub fn range_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
135 self.range_ptrs_is_empty(range, cx) && self.range_bytes_is_empty(range)
136 }
137
138 pub fn provenances(&self) -> impl Iterator<Item = Prov> {
140 let bytes = self.bytes.iter().flat_map(|b| b.values());
141 self.ptrs.values().chain(bytes).copied()
142 }
143
144 pub fn insert_ptr(&mut self, offset: Size, prov: Prov, cx: &impl HasDataLayout) {
145 debug_assert!(self.range_empty(alloc_range(offset, cx.data_layout().pointer_size), cx));
146 self.ptrs.insert(offset, prov);
147 }
148
149 pub fn clear(&mut self, range: AllocRange, cx: &impl HasDataLayout) -> AllocResult {
152 let start = range.start;
153 let end = range.end();
154 if Prov::OFFSET_IS_ADDR {
156 if let Some(bytes) = self.bytes.as_mut() {
157 bytes.remove_range(start..end);
158 }
159 } else {
160 debug_assert!(self.bytes.is_none());
161 }
162
163 let (first, last) = {
166 if self.range_ptrs_is_empty(range, cx) {
168 return Ok(());
170 }
171
172 let provenance = self.range_ptrs_get(range, cx);
175 (
176 provenance.first().unwrap().0,
177 provenance.last().unwrap().0 + cx.data_layout().pointer_size,
178 )
179 };
180
181 if first < start {
183 if !Prov::OFFSET_IS_ADDR {
184 return Err(AllocError::OverwritePartialPointer(first));
186 }
187 let prov = self.ptrs[&first];
189 let bytes = self.bytes.get_or_insert_with(Box::default);
190 for offset in first..start {
191 bytes.insert(offset, prov);
192 }
193 }
194 if last > end {
195 let begin_of_last = last - cx.data_layout().pointer_size;
196 if !Prov::OFFSET_IS_ADDR {
197 return Err(AllocError::OverwritePartialPointer(begin_of_last));
199 }
200 let prov = self.ptrs[&begin_of_last];
202 let bytes = self.bytes.get_or_insert_with(Box::default);
203 for offset in end..last {
204 bytes.insert(offset, prov);
205 }
206 }
207
208 self.ptrs.remove_range(first..last);
212
213 Ok(())
214 }
215
216 pub fn write_wildcards(&mut self, alloc_size: usize) {
220 assert!(
221 Prov::OFFSET_IS_ADDR,
222 "writing wildcard provenance is not supported when `OFFSET_IS_ADDR` is false"
223 );
224 let wildcard = Prov::WILDCARD.unwrap();
225
226 self.ptrs.clear();
228 let last = Size::from_bytes(alloc_size);
229 let bytes = self.bytes.get_or_insert_with(Box::default);
230 for offset in Size::ZERO..last {
231 bytes.insert(offset, wildcard);
232 }
233 }
234}
235
236pub struct ProvenanceCopy<Prov> {
240 dest_ptrs: Option<Box<[(Size, Prov)]>>,
241 dest_bytes: Option<Box<[(Size, Prov)]>>,
242}
243
244impl<Prov: Provenance> ProvenanceMap<Prov> {
245 pub fn prepare_copy(
246 &self,
247 src: AllocRange,
248 dest: Size,
249 count: u64,
250 cx: &impl HasDataLayout,
251 ) -> AllocResult<ProvenanceCopy<Prov>> {
252 let shift_offset = move |idx, offset| {
253 let dest_offset = dest + src.size * idx; (offset - src.start) + dest_offset };
258 let ptr_size = cx.data_layout().pointer_size;
259
260 let mut dest_ptrs_box = None;
265 if src.size >= ptr_size {
266 let adjusted_end = Size::from_bytes(src.end().bytes() - (ptr_size.bytes() - 1));
267 let ptrs = self.ptrs.range(src.start..adjusted_end);
268 let mut dest_ptrs = Vec::with_capacity(ptrs.len() * (count as usize));
275 for i in 0..count {
276 dest_ptrs
277 .extend(ptrs.iter().map(|&(offset, reloc)| (shift_offset(i, offset), reloc)));
278 }
279 debug_assert_eq!(dest_ptrs.len(), dest_ptrs.capacity());
280 dest_ptrs_box = Some(dest_ptrs.into_boxed_slice());
281 };
282
283 let mut dest_bytes_box = None;
287 let begin_overlap = self.range_ptrs_get(alloc_range(src.start, Size::ZERO), cx).first();
288 let end_overlap = self.range_ptrs_get(alloc_range(src.end(), Size::ZERO), cx).first();
289 if !Prov::OFFSET_IS_ADDR {
290 if let Some(entry) = begin_overlap {
292 return Err(AllocError::ReadPartialPointer(entry.0));
293 }
294 if let Some(entry) = end_overlap {
295 return Err(AllocError::ReadPartialPointer(entry.0));
296 }
297 debug_assert!(self.bytes.is_none());
298 } else {
299 let mut bytes = Vec::new();
300 if let Some(entry) = begin_overlap {
302 trace!("start overlapping entry: {entry:?}");
303 let entry_end = cmp::min(entry.0 + ptr_size, src.end());
305 for offset in src.start..entry_end {
306 bytes.push((offset, entry.1));
307 }
308 } else {
309 trace!("no start overlapping entry");
310 }
311
312 bytes.extend(self.range_bytes_get(src));
314
315 if let Some(entry) = end_overlap {
317 trace!("end overlapping entry: {entry:?}");
318 let entry_start = cmp::max(entry.0, src.start);
320 for offset in entry_start..src.end() {
321 if bytes.last().is_none_or(|bytes_entry| bytes_entry.0 < offset) {
322 bytes.push((offset, entry.1));
324 } else {
325 assert!(entry.0 <= src.start);
329 }
330 }
331 } else {
332 trace!("no end overlapping entry");
333 }
334 trace!("byte provenances: {bytes:?}");
335
336 let mut dest_bytes = Vec::with_capacity(bytes.len() * (count as usize));
338 for i in 0..count {
339 dest_bytes
340 .extend(bytes.iter().map(|&(offset, reloc)| (shift_offset(i, offset), reloc)));
341 }
342 debug_assert_eq!(dest_bytes.len(), dest_bytes.capacity());
343 dest_bytes_box = Some(dest_bytes.into_boxed_slice());
344 }
345
346 Ok(ProvenanceCopy { dest_ptrs: dest_ptrs_box, dest_bytes: dest_bytes_box })
347 }
348
349 pub fn apply_copy(&mut self, copy: ProvenanceCopy<Prov>) {
353 if let Some(dest_ptrs) = copy.dest_ptrs {
354 self.ptrs.insert_presorted(dest_ptrs.into());
355 }
356 if Prov::OFFSET_IS_ADDR {
357 if let Some(dest_bytes) = copy.dest_bytes
358 && !dest_bytes.is_empty()
359 {
360 self.bytes.get_or_insert_with(Box::default).insert_presorted(dest_bytes.into());
361 }
362 } else {
363 debug_assert!(copy.dest_bytes.is_none());
364 }
365 }
366}