1use std::cmp;
5use std::ops::{Range, RangeBounds};
6
7use rustc_abi::{HasDataLayout, Size};
8use rustc_data_structures::sorted_map::SortedMap;
9use rustc_macros::HashStable;
10use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
11use tracing::trace;
12
13use super::{AllocRange, CtfeProvenance, Provenance, alloc_range};
14use crate::mir::interpret::{AllocError, AllocResult};
15
16#[derive(#[automatically_derived]
impl<Prov: ::core::clone::Clone> ::core::clone::Clone for PointerFrag<Prov> {
#[inline]
fn clone(&self) -> PointerFrag<Prov> {
PointerFrag {
idx: ::core::clone::Clone::clone(&self.idx),
prov: ::core::clone::Clone::clone(&self.prov),
bytes: ::core::clone::Clone::clone(&self.bytes),
}
}
}Clone, #[automatically_derived]
impl<Prov: ::core::cmp::PartialEq> ::core::cmp::PartialEq for
PointerFrag<Prov> {
#[inline]
fn eq(&self, other: &PointerFrag<Prov>) -> bool {
self.idx == other.idx && self.prov == other.prov &&
self.bytes == other.bytes
}
}PartialEq, #[automatically_derived]
impl<Prov: ::core::cmp::Eq> ::core::cmp::Eq for PointerFrag<Prov> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<u8>;
let _: ::core::cmp::AssertParamIsEq<Prov>;
let _: ::core::cmp::AssertParamIsEq<[u8; 8]>;
}
}Eq, #[automatically_derived]
impl<Prov: ::core::hash::Hash> ::core::hash::Hash for PointerFrag<Prov> {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.idx, state);
::core::hash::Hash::hash(&self.prov, state);
::core::hash::Hash::hash(&self.bytes, state)
}
}Hash, #[automatically_derived]
impl<Prov: ::core::fmt::Debug> ::core::fmt::Debug for PointerFrag<Prov> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field3_finish(f, "PointerFrag",
"idx", &self.idx, "prov", &self.prov, "bytes", &&self.bytes)
}
}Debug)]
20#[derive(const _: () =
{
impl<'__ctx, Prov>
::rustc_data_structures::stable_hasher::HashStable<::rustc_query_system::ich::StableHashingContext<'__ctx>>
for PointerFrag<Prov> where
Prov: ::rustc_data_structures::stable_hasher::HashStable<::rustc_query_system::ich::StableHashingContext<'__ctx>>
{
#[inline]
fn hash_stable(&self,
__hcx:
&mut ::rustc_query_system::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
PointerFrag {
idx: ref __binding_0,
prov: ref __binding_1,
bytes: ref __binding_2 } => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
{ __binding_2.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable)]
21pub struct PointerFrag<Prov> {
22 pub idx: u8,
24 pub prov: Prov,
26 pub bytes: [u8; 8],
31}
32
33#[derive(#[automatically_derived]
impl<Prov: ::core::clone::Clone> ::core::clone::Clone for ProvenanceMap<Prov>
{
#[inline]
fn clone(&self) -> ProvenanceMap<Prov> {
ProvenanceMap {
ptrs: ::core::clone::Clone::clone(&self.ptrs),
bytes: ::core::clone::Clone::clone(&self.bytes),
}
}
}Clone, #[automatically_derived]
impl<Prov: ::core::cmp::PartialEq> ::core::cmp::PartialEq for
ProvenanceMap<Prov> {
#[inline]
fn eq(&self, other: &ProvenanceMap<Prov>) -> bool {
self.ptrs == other.ptrs && self.bytes == other.bytes
}
}PartialEq, #[automatically_derived]
impl<Prov: ::core::cmp::Eq> ::core::cmp::Eq for ProvenanceMap<Prov> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<SortedMap<Size, Prov>>;
let _:
::core::cmp::AssertParamIsEq<Option<Box<SortedMap<Size,
PointerFrag<Prov>>>>>;
}
}Eq, #[automatically_derived]
impl<Prov: ::core::hash::Hash> ::core::hash::Hash for ProvenanceMap<Prov> {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.ptrs, state);
::core::hash::Hash::hash(&self.bytes, state)
}
}Hash, #[automatically_derived]
impl<Prov: ::core::fmt::Debug> ::core::fmt::Debug for ProvenanceMap<Prov> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "ProvenanceMap",
"ptrs", &self.ptrs, "bytes", &&self.bytes)
}
}Debug)]
35#[derive(const _: () =
{
impl<'__ctx, Prov>
::rustc_data_structures::stable_hasher::HashStable<::rustc_query_system::ich::StableHashingContext<'__ctx>>
for ProvenanceMap<Prov> where
Prov: ::rustc_data_structures::stable_hasher::HashStable<::rustc_query_system::ich::StableHashingContext<'__ctx>>
{
#[inline]
fn hash_stable(&self,
__hcx:
&mut ::rustc_query_system::ich::StableHashingContext<'__ctx>,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
ProvenanceMap {
ptrs: ref __binding_0, bytes: ref __binding_1 } => {
{ __binding_0.hash_stable(__hcx, __hasher); }
{ __binding_1.hash_stable(__hcx, __hasher); }
}
}
}
}
};HashStable)]
36pub struct ProvenanceMap<Prov = CtfeProvenance> {
37 ptrs: SortedMap<Size, Prov>,
40 bytes: Option<Box<SortedMap<Size, PointerFrag<Prov>>>>,
42}
43
44impl<D: Decoder, Prov: Provenance + Decodable<D>> Decodable<D> for ProvenanceMap<Prov> {
47 fn decode(d: &mut D) -> Self {
48 Self { ptrs: Decodable::decode(d), bytes: None }
50 }
51}
52impl<S: Encoder, Prov: Provenance + Encodable<S>> Encodable<S> for ProvenanceMap<Prov> {
53 fn encode(&self, s: &mut S) {
54 let Self { ptrs, bytes } = self;
55 if !bytes.is_none() {
::core::panicking::panic("assertion failed: bytes.is_none()")
};assert!(bytes.is_none()); ptrs.encode(s)
57 }
58}
59
60impl<Prov> ProvenanceMap<Prov> {
61 pub fn new() -> Self {
62 ProvenanceMap { ptrs: SortedMap::new(), bytes: None }
63 }
64
65 pub fn from_presorted_ptrs(r: Vec<(Size, Prov)>) -> Self {
68 ProvenanceMap { ptrs: SortedMap::from_presorted_elements(r), bytes: None }
69 }
70}
71
72impl ProvenanceMap {
73 #[inline]
78 pub fn ptrs(&self) -> &SortedMap<Size, CtfeProvenance> {
79 if !self.bytes.is_none() {
{
::core::panicking::panic_fmt(format_args!("`ptrs()` called on non-interned allocation"));
}
};assert!(self.bytes.is_none(), "`ptrs()` called on non-interned allocation");
80 &self.ptrs
81 }
82}
83
84impl<Prov: Provenance> ProvenanceMap<Prov> {
85 fn adjusted_range_ptrs(range: AllocRange, cx: &impl HasDataLayout) -> Range<Size> {
86 let adjusted_start = Size::from_bytes(
89 range.start.bytes().saturating_sub(cx.data_layout().pointer_size().bytes() - 1),
90 );
91 adjusted_start..range.end()
92 }
93
94 fn range_ptrs_get(&self, range: AllocRange, cx: &impl HasDataLayout) -> &[(Size, Prov)] {
98 self.ptrs.range(Self::adjusted_range_ptrs(range, cx))
99 }
100
101 fn range_ptrs_is_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
103 self.ptrs.range_is_empty(Self::adjusted_range_ptrs(range, cx))
104 }
105
106 pub fn get_ptr(&self, offset: Size) -> Option<Prov> {
109 self.ptrs.get(&offset).copied()
110 }
111
112 fn range_bytes_get(&self, range: AllocRange) -> &[(Size, PointerFrag<Prov>)] {
114 if let Some(bytes) = self.bytes.as_ref() {
115 bytes.range(range.start..range.end())
116 } else {
117 &[]
118 }
119 }
120
121 fn range_bytes_is_empty(&self, range: AllocRange) -> bool {
123 self.bytes.as_ref().is_none_or(|bytes| bytes.range_is_empty(range.start..range.end()))
124 }
125
126 pub fn get_byte(&self, offset: Size, cx: &impl HasDataLayout) -> Option<&PointerFrag<Prov>> {
129 if true {
if !self.range_ptrs_is_empty(alloc_range(offset, Size::from_bytes(1)), cx)
{
::core::panicking::panic("assertion failed: self.range_ptrs_is_empty(alloc_range(offset, Size::from_bytes(1)), cx)")
};
};debug_assert!(self.range_ptrs_is_empty(alloc_range(offset, Size::from_bytes(1)), cx));
130 self.bytes.as_ref().and_then(|b| b.get(&offset))
131 }
132
133 pub fn get_range(
135 &self,
136 range: AllocRange,
137 cx: &impl HasDataLayout,
138 ) -> impl Iterator<Item = (AllocRange, Prov)> {
139 let ptr_size = cx.data_layout().pointer_size();
140 let ptr_provs = self
141 .range_ptrs_get(range, cx)
142 .iter()
143 .map(move |(offset, p)| (alloc_range(*offset, ptr_size), *p));
144 let byte_provs = self
145 .range_bytes_get(range)
146 .iter()
147 .map(move |(offset, frag)| (alloc_range(*offset, Size::from_bytes(1)), frag.prov));
148 ptr_provs.chain(byte_provs)
149 }
150
151 pub fn merge_bytes(&mut self, cx: &impl HasDataLayout) -> bool {
154 let Some(bytes) = self.bytes.as_deref_mut() else {
155 return true;
156 };
157 let ptr_size = cx.data_layout().pointer_size();
158 while let Some((offset, first_frag)) = bytes.iter().next() {
159 let offset = *offset;
160 let range = offset..offset + ptr_size;
162 let frags = bytes.range(range.clone());
163 if frags.len() != ptr_size.bytes_usize() {
164 return false;
166 }
167 for (idx, (_offset, frag)) in frags.iter().enumerate() {
168 if !(frag.prov == first_frag.prov
169 && frag.bytes == first_frag.bytes
170 && frag.idx == idx as u8)
171 {
172 return false;
173 }
174 }
175 self.ptrs.insert(offset, first_frag.prov);
177 bytes.remove_range(range);
178 }
179 self.bytes = None;
181 true
182 }
183
184 pub fn read_ptr(&self, offset: Size, cx: &impl HasDataLayout) -> AllocResult<Option<Prov>> {
187 if let Some(prov) = self.get_ptr(offset) {
189 return Ok(Some(prov));
190 }
191 let range = alloc_range(offset, cx.data_layout().pointer_size());
193 let no_ptrs = self.range_ptrs_is_empty(range, cx);
194 if no_ptrs && self.range_bytes_is_empty(range) {
195 return Ok(None);
196 }
197 let prov = 'prov: {
199 if !no_ptrs {
202 break 'prov None;
203 }
204 let mut expected = None;
209 for idx in Size::ZERO..range.size {
210 let Some(frag) = self.get_byte(offset + idx, cx) else {
212 break 'prov None;
213 };
214 if Some(frag.prov) == Prov::WILDCARD {
216 continue;
217 }
218 if u64::from(frag.idx) != idx.bytes() {
220 break 'prov None;
221 }
222 match expected {
225 Some(expected) => {
226 if (frag.prov, frag.bytes) != expected {
227 break 'prov None;
228 }
229 }
230 None => {
231 expected = Some((frag.prov, frag.bytes));
232 }
233 }
234 }
235 Some(expected.map(|(prov, _addr)| prov).or_else(|| Prov::WILDCARD).unwrap())
238 };
239 if prov.is_none() && !Prov::OFFSET_IS_ADDR {
240 return Err(AllocError::ReadPartialPointer(offset));
244 }
245 Ok(prov)
246 }
247
248 pub fn range_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
254 self.range_ptrs_is_empty(range, cx) && self.range_bytes_is_empty(range)
255 }
256
257 pub fn provenances(&self) -> impl Iterator<Item = Prov> {
259 let bytes = self.bytes.iter().flat_map(|b| b.values().map(|frag| frag.prov));
260 self.ptrs.values().copied().chain(bytes)
261 }
262
263 pub fn insert_ptr(&mut self, offset: Size, prov: Prov, cx: &impl HasDataLayout) {
264 if true {
if !self.range_empty(alloc_range(offset, cx.data_layout().pointer_size()),
cx) {
::core::panicking::panic("assertion failed: self.range_empty(alloc_range(offset, cx.data_layout().pointer_size()), cx)")
};
};debug_assert!(self.range_empty(alloc_range(offset, cx.data_layout().pointer_size()), cx));
265 self.ptrs.insert(offset, prov);
266 }
267
268 fn ptr_fragments(
271 pos_range: impl RangeBounds<Size>,
272 ptr_pos: Size,
273 prov: Prov,
274 data_bytes: &[u8],
275 ptr_size: Size,
276 ) -> impl Iterator<Item = (Size, PointerFrag<Prov>)> {
277 if pos_range.is_empty() {
278 return either::Left(std::iter::empty());
279 }
280 let mut bytes = [0u8; 8];
282 (&mut bytes[..ptr_size.bytes_usize()])
283 .copy_from_slice(&data_bytes[ptr_pos.bytes_usize()..][..ptr_size.bytes_usize()]);
284 either::Right(
286 (ptr_pos..ptr_pos + ptr_size).filter(move |pos| pos_range.contains(pos)).map(
287 move |pos| (pos, PointerFrag { idx: (pos - ptr_pos).bytes() as u8, bytes, prov }),
288 ),
289 )
290 }
291
292 #[allow(irrefutable_let_patterns)] pub fn clear(&mut self, range: AllocRange, data_bytes: &[u8], cx: &impl HasDataLayout) {
295 if range.size == Size::ZERO {
296 return;
297 }
298
299 let start = range.start;
300 let end = range.end();
301 if let Some(bytes) = self.bytes.as_mut() {
303 bytes.remove_range(start..end);
304 }
305
306 let ptrs_range = Self::adjusted_range_ptrs(range, cx);
308 if self.ptrs.range_is_empty(ptrs_range.clone()) {
309 return;
311 }
312 let pointer_size = cx.data_layout().pointer_size();
313
314 let ptrs = self.ptrs.range(ptrs_range.clone());
317
318 if let &(first, prov) = ptrs.first().unwrap()
320 && first < start
321 {
322 let bytes = self.bytes.get_or_insert_with(Box::default);
324 for (pos, frag) in Self::ptr_fragments(..start, first, prov, data_bytes, pointer_size) {
325 bytes.insert(pos, frag);
326 }
327 }
328 if let &(last, prov) = ptrs.last().unwrap()
329 && last + pointer_size > end
330 {
331 let bytes = self.bytes.get_or_insert_with(Box::default);
333 for (pos, frag) in Self::ptr_fragments(end.., last, prov, data_bytes, pointer_size) {
334 bytes.insert(pos, frag);
335 }
336 }
337
338 self.ptrs.remove_range(ptrs_range);
342 }
343
344 pub fn write_wildcards(
350 &mut self,
351 cx: &impl HasDataLayout,
352 data_bytes: &[u8],
353 range: AllocRange,
354 ) {
355 let wildcard = Prov::WILDCARD.unwrap();
356
357 self.clear(range, data_bytes, cx);
359
360 let bytes = self.bytes.get_or_insert_with(Box::default);
362 for offset in range.start..range.end() {
363 bytes.insert(
365 offset,
366 PointerFrag { prov: wildcard, idx: Default::default(), bytes: Default::default() },
367 );
368 }
369 }
370}
371
372pub struct ProvenanceCopy<Prov> {
376 ptrs: Box<[(Size, Prov)]>,
377 bytes: Box<[(Size, PointerFrag<Prov>)]>,
378}
379
380impl<Prov: Provenance> ProvenanceMap<Prov> {
381 pub fn prepare_copy(
382 &self,
383 range: AllocRange,
384 data_bytes: &[u8],
385 cx: &impl HasDataLayout,
386 ) -> ProvenanceCopy<Prov> {
387 let shift_offset = move |offset| offset - range.start;
388 let ptr_size = cx.data_layout().pointer_size();
389
390 let mut ptrs_box: Box<[_]> = Box::new([]);
395 if range.size >= ptr_size {
396 let adjusted_end = Size::from_bytes(range.end().bytes() - (ptr_size.bytes() - 1));
397 let ptrs = self.ptrs.range(range.start..adjusted_end);
398 ptrs_box = ptrs.iter().map(|&(offset, reloc)| (shift_offset(offset), reloc)).collect();
399 };
400
401 let mut bytes_box: Box<[_]> = Box::new([]);
405 let begin_overlap = self.range_ptrs_get(alloc_range(range.start, Size::ZERO), cx).first();
406 let end_overlap = self.range_ptrs_get(alloc_range(range.end(), Size::ZERO), cx).first();
407 if begin_overlap.is_some() || end_overlap.is_some() || self.bytes.is_some() {
409 let mut bytes: Vec<(Size, PointerFrag<Prov>)> = Vec::new();
410 if let Some(&(pos, prov)) = begin_overlap {
412 let end = cmp::min(pos + ptr_size, range.end());
414 for (pos, frag) in
415 Self::ptr_fragments(range.start..end, pos, prov, data_bytes, ptr_size)
416 {
417 bytes.push((shift_offset(pos), frag));
418 }
419 } else {
420 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs:420",
"rustc_middle::mir::interpret::allocation::provenance_map",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs"),
::tracing_core::__macro_support::Option::Some(420u32),
::tracing_core::__macro_support::Option::Some("rustc_middle::mir::interpret::allocation::provenance_map"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("no start overlapping entry")
as &dyn Value))])
});
} else { ; }
};trace!("no start overlapping entry");
421 }
422
423 bytes.extend(
425 self.range_bytes_get(range)
426 .iter()
427 .map(|(offset, frag)| (shift_offset(*offset), frag.clone())),
428 );
429
430 if let Some(&(pos, prov)) = end_overlap
433 && begin_overlap.is_none_or(|(begin, _)| *begin != pos)
434 {
435 if !(pos >= range.start) {
::core::panicking::panic("assertion failed: pos >= range.start")
};assert!(pos >= range.start);
437 for (pos, frag) in
438 Self::ptr_fragments(pos..range.end(), pos, prov, data_bytes, ptr_size)
439 {
440 let pos = shift_offset(pos);
441 if true {
if !bytes.last().is_none_or(|bytes_entry| bytes_entry.0 < pos) {
::core::panicking::panic("assertion failed: bytes.last().is_none_or(|bytes_entry| bytes_entry.0 < pos)")
};
};debug_assert!(bytes.last().is_none_or(|bytes_entry| bytes_entry.0 < pos));
444 bytes.push((pos, frag));
445 }
446 } else {
447 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs:447",
"rustc_middle::mir::interpret::allocation::provenance_map",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs"),
::tracing_core::__macro_support::Option::Some(447u32),
::tracing_core::__macro_support::Option::Some("rustc_middle::mir::interpret::allocation::provenance_map"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("no end overlapping entry")
as &dyn Value))])
});
} else { ; }
};trace!("no end overlapping entry");
448 }
449 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs:449",
"rustc_middle::mir::interpret::allocation::provenance_map",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs"),
::tracing_core::__macro_support::Option::Some(449u32),
::tracing_core::__macro_support::Option::Some("rustc_middle::mir::interpret::allocation::provenance_map"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("byte provenances: {0:?}",
bytes) as &dyn Value))])
});
} else { ; }
};trace!("byte provenances: {bytes:?}");
450
451 bytes_box = bytes.into_boxed_slice();
453 }
454
455 ProvenanceCopy { ptrs: ptrs_box, bytes: bytes_box }
456 }
457
458 pub fn apply_copy(&mut self, copy: ProvenanceCopy<Prov>, range: AllocRange, repeat: u64) {
462 let shift_offset = |idx: u64, offset: Size| offset + range.start + idx * range.size;
463 if !copy.ptrs.is_empty() {
464 let chunk_len = copy.ptrs.len() as u64;
467 self.ptrs.insert_presorted((0..chunk_len * repeat).map(|i| {
468 let chunk = i / chunk_len;
469 let (offset, prov) = copy.ptrs[(i % chunk_len) as usize];
470 (shift_offset(chunk, offset), prov)
471 }));
472 }
473 if !copy.bytes.is_empty() {
474 let chunk_len = copy.bytes.len() as u64;
475 self.bytes.get_or_insert_with(Box::default).insert_presorted(
476 (0..chunk_len * repeat).map(|i| {
477 let chunk = i / chunk_len;
478 let (offset, frag) = ©.bytes[(i % chunk_len) as usize];
479 (shift_offset(chunk, *offset), frag.clone())
480 }),
481 );
482 }
483 }
484}