miri/borrow_tracker/tree_borrows/mod.rs
1use rustc_abi::{BackendRepr, Size};
2use rustc_middle::mir::{Mutability, RetagKind};
3use rustc_middle::ty::layout::HasTypingEnv;
4use rustc_middle::ty::{self, Ty};
5
6use self::foreign_access_skipping::IdempotentForeignAccess;
7use self::tree::LocationState;
8use crate::borrow_tracker::{GlobalState, GlobalStateInner, ProtectorKind};
9use crate::concurrency::data_race::NaReadType;
10use crate::*;
11
12pub mod diagnostics;
13mod foreign_access_skipping;
14mod perms;
15mod tree;
16mod unimap;
17
18#[cfg(test)]
19mod exhaustive;
20
21use self::perms::Permission;
22pub use self::tree::Tree;
23
24pub type AllocState = Tree;
25
26impl<'tcx> Tree {
27 /// Create a new allocation, i.e. a new tree
28 pub fn new_allocation(
29 id: AllocId,
30 size: Size,
31 state: &mut GlobalStateInner,
32 _kind: MemoryKind,
33 machine: &MiriMachine<'tcx>,
34 ) -> Self {
35 let tag = state.root_ptr_tag(id, machine); // Fresh tag for the root
36 let span = machine.current_span();
37 Tree::new(tag, size, span)
38 }
39
40 /// Check that an access on the entire range is permitted, and update
41 /// the tree.
42 pub fn before_memory_access(
43 &mut self,
44 access_kind: AccessKind,
45 alloc_id: AllocId,
46 prov: ProvenanceExtra,
47 range: AllocRange,
48 machine: &MiriMachine<'tcx>,
49 ) -> InterpResult<'tcx> {
50 trace!(
51 "{} with tag {:?}: {:?}, size {}",
52 access_kind,
53 prov,
54 interpret::Pointer::new(alloc_id, range.start),
55 range.size.bytes(),
56 );
57 // TODO: for now we bail out on wildcard pointers. Eventually we should
58 // handle them as much as we can.
59 let tag = match prov {
60 ProvenanceExtra::Concrete(tag) => tag,
61 ProvenanceExtra::Wildcard => return interp_ok(()),
62 };
63 let global = machine.borrow_tracker.as_ref().unwrap();
64 let span = machine.current_span();
65 self.perform_access(
66 tag,
67 Some((range, access_kind, diagnostics::AccessCause::Explicit(access_kind))),
68 global,
69 alloc_id,
70 span,
71 )
72 }
73
74 /// Check that this pointer has permission to deallocate this range.
75 pub fn before_memory_deallocation(
76 &mut self,
77 alloc_id: AllocId,
78 prov: ProvenanceExtra,
79 size: Size,
80 machine: &MiriMachine<'tcx>,
81 ) -> InterpResult<'tcx> {
82 // TODO: for now we bail out on wildcard pointers. Eventually we should
83 // handle them as much as we can.
84 let tag = match prov {
85 ProvenanceExtra::Concrete(tag) => tag,
86 ProvenanceExtra::Wildcard => return interp_ok(()),
87 };
88 let global = machine.borrow_tracker.as_ref().unwrap();
89 let span = machine.current_span();
90 self.dealloc(tag, alloc_range(Size::ZERO, size), global, alloc_id, span)
91 }
92
93 pub fn expose_tag(&mut self, _tag: BorTag) {
94 // TODO
95 }
96
97 /// A tag just lost its protector.
98 ///
99 /// This emits a special kind of access that is only applied
100 /// to accessed locations, as a protection against other
101 /// tags not having been made aware of the existence of this
102 /// protector.
103 pub fn release_protector(
104 &mut self,
105 machine: &MiriMachine<'tcx>,
106 global: &GlobalState,
107 tag: BorTag,
108 alloc_id: AllocId, // diagnostics
109 ) -> InterpResult<'tcx> {
110 let span = machine.current_span();
111 // `None` makes it the magic on-protector-end operation
112 self.perform_access(tag, None, global, alloc_id, span)
113 }
114}
115
116/// Policy for a new borrow.
117#[derive(Debug, Clone, Copy)]
118pub struct NewPermission {
119 /// Permission for the frozen part of the range.
120 freeze_perm: Permission,
121 /// Whether a read access should be performed on the frozen part on a retag.
122 freeze_access: bool,
123 /// Permission for the non-frozen part of the range.
124 nonfreeze_perm: Permission,
125 /// Whether a read access should be performed on the non-frozen
126 /// part on a retag.
127 nonfreeze_access: bool,
128 /// Whether this pointer is part of the arguments of a function call.
129 /// `protector` is `Some(_)` for all pointers marked `noalias`.
130 protector: Option<ProtectorKind>,
131}
132
133impl<'tcx> NewPermission {
134 /// Determine NewPermission of the reference from the type of the pointee.
135 fn from_ref_ty(
136 pointee: Ty<'tcx>,
137 mutability: Mutability,
138 kind: RetagKind,
139 cx: &crate::MiriInterpCx<'tcx>,
140 ) -> Option<Self> {
141 let ty_is_unpin = pointee.is_unpin(*cx.tcx, cx.typing_env());
142 let is_protected = kind == RetagKind::FnEntry;
143 let protector = is_protected.then_some(ProtectorKind::StrongProtector);
144
145 Some(match mutability {
146 Mutability::Mut if ty_is_unpin =>
147 NewPermission {
148 freeze_perm: Permission::new_reserved(
149 /* ty_is_freeze */ true,
150 is_protected,
151 ),
152 freeze_access: true,
153 nonfreeze_perm: Permission::new_reserved(
154 /* ty_is_freeze */ false,
155 is_protected,
156 ),
157 // If we have a mutable reference, then the non-frozen part will
158 // have state `ReservedIM` or `Reserved`, which can have an initial read access
159 // performed on it because you cannot have multiple mutable borrows.
160 nonfreeze_access: true,
161 protector,
162 },
163 Mutability::Not =>
164 NewPermission {
165 freeze_perm: Permission::new_frozen(),
166 freeze_access: true,
167 nonfreeze_perm: Permission::new_cell(),
168 // If it is a shared reference, then the non-frozen
169 // part will have state `Cell`, which should not have an initial access,
170 // as this can cause data races when using thread-safe data types like
171 // `Mutex<T>`.
172 nonfreeze_access: false,
173 protector,
174 },
175 _ => return None,
176 })
177 }
178
179 /// Compute permission for `Box`-like type (`Box` always, and also `Unique` if enabled).
180 /// These pointers allow deallocation so need a different kind of protector not handled
181 /// by `from_ref_ty`.
182 fn from_unique_ty(
183 ty: Ty<'tcx>,
184 kind: RetagKind,
185 cx: &crate::MiriInterpCx<'tcx>,
186 ) -> Option<Self> {
187 let pointee = ty.builtin_deref(true).unwrap();
188 pointee.is_unpin(*cx.tcx, cx.typing_env()).then_some(()).map(|()| {
189 // Regular `Unpin` box, give it `noalias` but only a weak protector
190 // because it is valid to deallocate it within the function.
191 let is_protected = kind == RetagKind::FnEntry;
192 let protector = is_protected.then_some(ProtectorKind::WeakProtector);
193 NewPermission {
194 freeze_perm: Permission::new_reserved(/* ty_is_freeze */ true, is_protected),
195 freeze_access: true,
196 nonfreeze_perm: Permission::new_reserved(
197 /* ty_is_freeze */ false,
198 is_protected,
199 ),
200 nonfreeze_access: true,
201 protector,
202 }
203 })
204 }
205}
206
207/// Retagging/reborrowing.
208/// Policy on which permission to grant to each pointer should be left to
209/// the implementation of NewPermission.
210impl<'tcx> EvalContextPrivExt<'tcx> for crate::MiriInterpCx<'tcx> {}
211trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
212 /// Returns the provenance that should be used henceforth.
213 fn tb_reborrow(
214 &mut self,
215 place: &MPlaceTy<'tcx>, // parent tag extracted from here
216 ptr_size: Size,
217 new_perm: NewPermission,
218 new_tag: BorTag,
219 ) -> InterpResult<'tcx, Option<Provenance>> {
220 let this = self.eval_context_mut();
221 // Ensure we bail out if the pointer goes out-of-bounds (see miri#1050).
222 this.check_ptr_access(place.ptr(), ptr_size, CheckInAllocMsg::Dereferenceable)?;
223
224 // It is crucial that this gets called on all code paths, to ensure we track tag creation.
225 let log_creation = |this: &MiriInterpCx<'tcx>,
226 loc: Option<(AllocId, Size, ProvenanceExtra)>| // alloc_id, base_offset, orig_tag
227 -> InterpResult<'tcx> {
228 let global = this.machine.borrow_tracker.as_ref().unwrap().borrow();
229 let ty = place.layout.ty;
230 if global.tracked_pointer_tags.contains(&new_tag) {
231 let ty_is_freeze = ty.is_freeze(*this.tcx, this.typing_env());
232 let kind_str =
233 if ty_is_freeze {
234 format!("initial state {} (pointee type {ty})", new_perm.freeze_perm)
235 } else {
236 format!("initial state {}/{} outside/inside UnsafeCell (pointee type {ty})", new_perm.freeze_perm, new_perm.nonfreeze_perm)
237 };
238 this.emit_diagnostic(NonHaltingDiagnostic::CreatedPointerTag(
239 new_tag.inner(),
240 Some(kind_str),
241 loc.map(|(alloc_id, base_offset, orig_tag)| (alloc_id, alloc_range(base_offset, ptr_size), orig_tag)),
242 ));
243 }
244 drop(global); // don't hold that reference any longer than we have to
245 interp_ok(())
246 };
247
248 trace!("Reborrow of size {:?}", ptr_size);
249 let (alloc_id, base_offset, parent_prov) = match this.ptr_try_get_alloc_id(place.ptr(), 0) {
250 Ok(data) => {
251 // Unlike SB, we *do* a proper retag for size 0 if can identify the allocation.
252 // After all, the pointer may be lazily initialized outside this initial range.
253 data
254 }
255 Err(_) => {
256 assert_eq!(ptr_size, Size::ZERO); // we did the deref check above, size has to be 0 here
257 // This pointer doesn't come with an AllocId, so there's no
258 // memory to do retagging in.
259 trace!(
260 "reborrow of size 0: reference {:?} derived from {:?} (pointee {})",
261 new_tag,
262 place.ptr(),
263 place.layout.ty,
264 );
265 log_creation(this, None)?;
266 // Keep original provenance.
267 return interp_ok(place.ptr().provenance);
268 }
269 };
270 log_creation(this, Some((alloc_id, base_offset, parent_prov)))?;
271
272 let orig_tag = match parent_prov {
273 ProvenanceExtra::Wildcard => return interp_ok(place.ptr().provenance), // TODO: handle wildcard pointers
274 ProvenanceExtra::Concrete(tag) => tag,
275 };
276
277 trace!(
278 "reborrow: reference {:?} derived from {:?} (pointee {}): {:?}, size {}",
279 new_tag,
280 orig_tag,
281 place.layout.ty,
282 interpret::Pointer::new(alloc_id, base_offset),
283 ptr_size.bytes()
284 );
285
286 if let Some(protect) = new_perm.protector {
287 // We register the protection in two different places.
288 // This makes creating a protector slower, but checking whether a tag
289 // is protected faster.
290 this.frame_mut()
291 .extra
292 .borrow_tracker
293 .as_mut()
294 .unwrap()
295 .protected_tags
296 .push((alloc_id, new_tag));
297 this.machine
298 .borrow_tracker
299 .as_mut()
300 .expect("We should have borrow tracking data")
301 .get_mut()
302 .protected_tags
303 .insert(new_tag, protect);
304 }
305
306 let alloc_kind = this.get_alloc_info(alloc_id).kind;
307 if !matches!(alloc_kind, AllocKind::LiveData) {
308 assert_eq!(ptr_size, Size::ZERO); // we did the deref check above, size has to be 0 here
309 // There's not actually any bytes here where accesses could even be tracked.
310 // Just produce the new provenance, nothing else to do.
311 return interp_ok(Some(Provenance::Concrete { alloc_id, tag: new_tag }));
312 }
313
314 let span = this.machine.current_span();
315
316 // Store initial permissions and their corresponding range.
317 let mut perms_map: RangeMap<LocationState> = RangeMap::new(
318 ptr_size,
319 LocationState::new_accessed(Permission::new_disabled(), IdempotentForeignAccess::None), // this will be overwritten
320 );
321 // Keep track of whether the node has any part that allows for interior mutability.
322 // FIXME: This misses `PhantomData<UnsafeCell<T>>` which could be considered a marker
323 // for requesting interior mutability.
324 let mut has_unsafe_cell = false;
325
326 // When adding a new node, the SIFA of its parents needs to be updated, potentially across
327 // the entire memory range. For the parts that are being accessed below, the access itself
328 // trivially takes care of that. However, we have to do some more work to also deal with
329 // the parts that are not being accessed. Specifically what we do is that we
330 // call `update_last_accessed_after_retag` on the SIFA of the permission set for the part of
331 // memory outside `perm_map` -- so that part is definitely taken care of. The remaining concern
332 // is the part of memory that is in the range of `perms_map`, but not accessed below.
333 // There we have two cases:
334 // * If we do have an `UnsafeCell` (`has_unsafe_cell` becomes true), then the non-accessed part
335 // uses `nonfreeze_perm`, so the `nonfreeze_perm` initialized parts are also fine. We enforce
336 // the `freeze_perm` parts to be accessed, and thus everything is taken care of.
337 // * If there is no `UnsafeCell`, then `freeze_perm` is used everywhere (both inside and outside the initial range),
338 // and we update everything to have the `freeze_perm`'s SIFA, so there are no issues. (And this assert below is not
339 // actually needed in this case).
340 assert!(new_perm.freeze_access);
341
342 let protected = new_perm.protector.is_some();
343 let precise_interior_mut = this
344 .machine
345 .borrow_tracker
346 .as_mut()
347 .unwrap()
348 .get_mut()
349 .borrow_tracker_method
350 .get_tree_borrows_params()
351 .precise_interior_mut;
352
353 let default_perm = if !precise_interior_mut {
354 // NOTE: Using `ty_is_freeze` doesn't give the same result as going through the range
355 // and computing `has_unsafe_cell`. This is because of zero-sized `UnsafeCell`, for which
356 // `has_unsafe_cell` is false, but `!ty_is_freeze` is true.
357 let ty_is_freeze = place.layout.ty.is_freeze(*this.tcx, this.typing_env());
358 let (perm, access) = if ty_is_freeze {
359 (new_perm.freeze_perm, new_perm.freeze_access)
360 } else {
361 (new_perm.nonfreeze_perm, new_perm.nonfreeze_access)
362 };
363 let sifa = perm.strongest_idempotent_foreign_access(protected);
364 let new_loc = if access {
365 LocationState::new_accessed(perm, sifa)
366 } else {
367 LocationState::new_non_accessed(perm, sifa)
368 };
369
370 for (_loc_range, loc) in perms_map.iter_mut_all() {
371 *loc = new_loc;
372 }
373
374 perm
375 } else {
376 this.visit_freeze_sensitive(place, ptr_size, |range, frozen| {
377 has_unsafe_cell = has_unsafe_cell || !frozen;
378
379 // We are only ever `Frozen` inside the frozen bits.
380 let (perm, access) = if frozen {
381 (new_perm.freeze_perm, new_perm.freeze_access)
382 } else {
383 (new_perm.nonfreeze_perm, new_perm.nonfreeze_access)
384 };
385 let sifa = perm.strongest_idempotent_foreign_access(protected);
386 // NOTE: Currently, `access` is false if and only if `perm` is Cell, so this `if`
387 // doesn't not change whether any code is UB or not. We could just always use
388 // `new_accessed` and everything would stay the same. But that seems conceptually
389 // odd, so we keep the initial "accessed" bit of the `LocationState` in sync with whether
390 // a read access is performed below.
391 let new_loc = if access {
392 LocationState::new_accessed(perm, sifa)
393 } else {
394 LocationState::new_non_accessed(perm, sifa)
395 };
396
397 // Store initial permissions.
398 for (_loc_range, loc) in perms_map.iter_mut(range.start, range.size) {
399 *loc = new_loc;
400 }
401
402 interp_ok(())
403 })?;
404
405 // Allow lazily writing to surrounding data if we found an `UnsafeCell`.
406 if has_unsafe_cell { new_perm.nonfreeze_perm } else { new_perm.freeze_perm }
407 };
408
409 let alloc_extra = this.get_alloc_extra(alloc_id)?;
410 let mut tree_borrows = alloc_extra.borrow_tracker_tb().borrow_mut();
411
412 for (perm_range, perm) in perms_map.iter_mut_all() {
413 if perm.is_accessed() {
414 // Some reborrows incur a read access to the parent.
415 // Adjust range to be relative to allocation start (rather than to `place`).
416 let range_in_alloc = AllocRange {
417 start: Size::from_bytes(perm_range.start) + base_offset,
418 size: Size::from_bytes(perm_range.end - perm_range.start),
419 };
420
421 tree_borrows.perform_access(
422 orig_tag,
423 Some((range_in_alloc, AccessKind::Read, diagnostics::AccessCause::Reborrow)),
424 this.machine.borrow_tracker.as_ref().unwrap(),
425 alloc_id,
426 this.machine.current_span(),
427 )?;
428
429 // Also inform the data race model (but only if any bytes are actually affected).
430 if range_in_alloc.size.bytes() > 0 {
431 if let Some(data_race) = alloc_extra.data_race.as_vclocks_ref() {
432 data_race.read(
433 alloc_id,
434 range_in_alloc,
435 NaReadType::Retag,
436 Some(place.layout.ty),
437 &this.machine,
438 )?
439 }
440 }
441 }
442 }
443
444 // Record the parent-child pair in the tree.
445 tree_borrows.new_child(
446 base_offset,
447 orig_tag,
448 new_tag,
449 perms_map,
450 default_perm,
451 protected,
452 span,
453 )?;
454 drop(tree_borrows);
455
456 interp_ok(Some(Provenance::Concrete { alloc_id, tag: new_tag }))
457 }
458
459 fn tb_retag_place(
460 &mut self,
461 place: &MPlaceTy<'tcx>,
462 new_perm: NewPermission,
463 ) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
464 let this = self.eval_context_mut();
465
466 // Determine the size of the reborrow.
467 // For most types this is the entire size of the place, however
468 // - when `extern type` is involved we use the size of the known prefix,
469 // - if the pointer is not reborrowed (raw pointer) then we override the size
470 // to do a zero-length reborrow.
471 let reborrow_size = this
472 .size_and_align_of_mplace(place)?
473 .map(|(size, _)| size)
474 .unwrap_or(place.layout.size);
475 trace!("Creating new permission: {:?} with size {:?}", new_perm, reborrow_size);
476
477 // This new tag is not guaranteed to actually be used.
478 //
479 // If you run out of tags, consider the following optimization: adjust `tb_reborrow`
480 // so that rather than taking as input a fresh tag and deciding whether it uses this
481 // one or the parent it instead just returns whether a new tag should be created.
482 // This will avoid creating tags than end up never being used.
483 let new_tag = this.machine.borrow_tracker.as_mut().unwrap().get_mut().new_ptr();
484
485 // Compute the actual reborrow.
486 let new_prov = this.tb_reborrow(place, reborrow_size, new_perm, new_tag)?;
487
488 // Adjust place.
489 // (If the closure gets called, that means the old provenance was `Some`, and hence the new
490 // one must also be `Some`.)
491 interp_ok(place.clone().map_provenance(|_| new_prov.unwrap()))
492 }
493
494 /// Retags an individual pointer, returning the retagged version.
495 fn tb_retag_reference(
496 &mut self,
497 val: &ImmTy<'tcx>,
498 new_perm: NewPermission,
499 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
500 let this = self.eval_context_mut();
501 let place = this.ref_to_mplace(val)?;
502 let new_place = this.tb_retag_place(&place, new_perm)?;
503 interp_ok(ImmTy::from_immediate(new_place.to_ref(this), val.layout))
504 }
505}
506
507impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {}
508pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
509 /// Retag a pointer. References are passed to `from_ref_ty` and
510 /// raw pointers are never reborrowed.
511 fn tb_retag_ptr_value(
512 &mut self,
513 kind: RetagKind,
514 val: &ImmTy<'tcx>,
515 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
516 let this = self.eval_context_mut();
517 let new_perm = match val.layout.ty.kind() {
518 &ty::Ref(_, pointee, mutability) =>
519 NewPermission::from_ref_ty(pointee, mutability, kind, this),
520 _ => None,
521 };
522 if let Some(new_perm) = new_perm {
523 this.tb_retag_reference(val, new_perm)
524 } else {
525 interp_ok(val.clone())
526 }
527 }
528
529 /// Retag all pointers that are stored in this place.
530 fn tb_retag_place_contents(
531 &mut self,
532 kind: RetagKind,
533 place: &PlaceTy<'tcx>,
534 ) -> InterpResult<'tcx> {
535 let this = self.eval_context_mut();
536 let options = this.machine.borrow_tracker.as_mut().unwrap().get_mut();
537 let retag_fields = options.retag_fields;
538 let mut visitor = RetagVisitor { ecx: this, kind, retag_fields };
539 return visitor.visit_value(place);
540
541 // The actual visitor.
542 struct RetagVisitor<'ecx, 'tcx> {
543 ecx: &'ecx mut MiriInterpCx<'tcx>,
544 kind: RetagKind,
545 retag_fields: RetagFields,
546 }
547 impl<'ecx, 'tcx> RetagVisitor<'ecx, 'tcx> {
548 #[inline(always)] // yes this helps in our benchmarks
549 fn retag_ptr_inplace(
550 &mut self,
551 place: &PlaceTy<'tcx>,
552 new_perm: Option<NewPermission>,
553 ) -> InterpResult<'tcx> {
554 if let Some(new_perm) = new_perm {
555 let val = self.ecx.read_immediate(&self.ecx.place_to_op(place)?)?;
556 let val = self.ecx.tb_retag_reference(&val, new_perm)?;
557 self.ecx.write_immediate(*val, place)?;
558 }
559 interp_ok(())
560 }
561 }
562 impl<'ecx, 'tcx> ValueVisitor<'tcx, MiriMachine<'tcx>> for RetagVisitor<'ecx, 'tcx> {
563 type V = PlaceTy<'tcx>;
564
565 #[inline(always)]
566 fn ecx(&self) -> &MiriInterpCx<'tcx> {
567 self.ecx
568 }
569
570 /// Regardless of how `Unique` is handled, Boxes are always reborrowed.
571 /// When `Unique` is also reborrowed, then it behaves exactly like `Box`
572 /// except for the fact that `Box` has a non-zero-sized reborrow.
573 fn visit_box(&mut self, box_ty: Ty<'tcx>, place: &PlaceTy<'tcx>) -> InterpResult<'tcx> {
574 // Only boxes for the global allocator get any special treatment.
575 if box_ty.is_box_global(*self.ecx.tcx) {
576 let new_perm =
577 NewPermission::from_unique_ty(place.layout.ty, self.kind, self.ecx);
578 self.retag_ptr_inplace(place, new_perm)?;
579 }
580 interp_ok(())
581 }
582
583 fn visit_value(&mut self, place: &PlaceTy<'tcx>) -> InterpResult<'tcx> {
584 // If this place is smaller than a pointer, we know that it can't contain any
585 // pointers we need to retag, so we can stop recursion early.
586 // This optimization is crucial for ZSTs, because they can contain way more fields
587 // than we can ever visit.
588 if place.layout.is_sized() && place.layout.size < self.ecx.pointer_size() {
589 return interp_ok(());
590 }
591
592 // Check the type of this value to see what to do with it (retag, or recurse).
593 match place.layout.ty.kind() {
594 &ty::Ref(_, pointee, mutability) => {
595 let new_perm =
596 NewPermission::from_ref_ty(pointee, mutability, self.kind, self.ecx);
597 self.retag_ptr_inplace(place, new_perm)?;
598 }
599 ty::RawPtr(_, _) => {
600 // We definitely do *not* want to recurse into raw pointers -- wide raw
601 // pointers have fields, and for dyn Trait pointees those can have reference
602 // type!
603 // We also do not want to reborrow them.
604 }
605 ty::Adt(adt, _) if adt.is_box() => {
606 // Recurse for boxes, they require some tricky handling and will end up in `visit_box` above.
607 // (Yes this means we technically also recursively retag the allocator itself
608 // even if field retagging is not enabled. *shrug*)
609 self.walk_value(place)?;
610 }
611 _ => {
612 // Not a reference/pointer/box. Only recurse if configured appropriately.
613 let recurse = match self.retag_fields {
614 RetagFields::No => false,
615 RetagFields::Yes => true,
616 RetagFields::OnlyScalar => {
617 // Matching `ArgAbi::new` at the time of writing, only fields of
618 // `Scalar` and `ScalarPair` ABI are considered.
619 matches!(
620 place.layout.backend_repr,
621 BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)
622 )
623 }
624 };
625 if recurse {
626 self.walk_value(place)?;
627 }
628 }
629 }
630 interp_ok(())
631 }
632 }
633 }
634
635 /// Protect a place so that it cannot be used any more for the duration of the current function
636 /// call.
637 ///
638 /// This is used to ensure soundness of in-place function argument/return passing.
639 fn tb_protect_place(&mut self, place: &MPlaceTy<'tcx>) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
640 let this = self.eval_context_mut();
641
642 // Retag it. With protection! That is the entire point.
643 let new_perm = NewPermission {
644 // Note: If we are creating a protected Reserved, which can
645 // never be ReservedIM, the value of the `ty_is_freeze`
646 // argument doesn't matter
647 // (`ty_is_freeze || true` in `new_reserved` will always be `true`).
648 freeze_perm: Permission::new_reserved(
649 /* ty_is_freeze */ true, /* protected */ true,
650 ),
651 freeze_access: true,
652 nonfreeze_perm: Permission::new_reserved(
653 /* ty_is_freeze */ false, /* protected */ true,
654 ),
655 nonfreeze_access: true,
656 protector: Some(ProtectorKind::StrongProtector),
657 };
658 this.tb_retag_place(place, new_perm)
659 }
660
661 /// Mark the given tag as exposed. It was found on a pointer with the given AllocId.
662 fn tb_expose_tag(&self, alloc_id: AllocId, tag: BorTag) -> InterpResult<'tcx> {
663 let this = self.eval_context_ref();
664
665 // Function pointers and dead objects don't have an alloc_extra so we ignore them.
666 // This is okay because accessing them is UB anyway, no need for any Tree Borrows checks.
667 // NOT using `get_alloc_extra_mut` since this might be a read-only allocation!
668 let kind = this.get_alloc_info(alloc_id).kind;
669 match kind {
670 AllocKind::LiveData => {
671 // This should have alloc_extra data, but `get_alloc_extra` can still fail
672 // if converting this alloc_id from a global to a local one
673 // uncovers a non-supported `extern static`.
674 let alloc_extra = this.get_alloc_extra(alloc_id)?;
675 trace!("Tree Borrows tag {tag:?} exposed in {alloc_id:?}");
676 alloc_extra.borrow_tracker_tb().borrow_mut().expose_tag(tag);
677 }
678 AllocKind::Function | AllocKind::VTable | AllocKind::Dead => {
679 // No tree borrows on these allocations.
680 }
681 }
682 interp_ok(())
683 }
684
685 /// Display the tree.
686 fn print_tree(&mut self, alloc_id: AllocId, show_unnamed: bool) -> InterpResult<'tcx> {
687 let this = self.eval_context_mut();
688 let alloc_extra = this.get_alloc_extra(alloc_id)?;
689 let tree_borrows = alloc_extra.borrow_tracker_tb().borrow();
690 let borrow_tracker = &this.machine.borrow_tracker.as_ref().unwrap().borrow();
691 tree_borrows.print_tree(&borrow_tracker.protected_tags, show_unnamed)
692 }
693
694 /// Give a name to the pointer, usually the name it has in the source code (for debugging).
695 /// The name given is `name` and the pointer that receives it is the `nth_parent`
696 /// of `ptr` (with 0 representing `ptr` itself)
697 fn tb_give_pointer_debug_name(
698 &mut self,
699 ptr: Pointer,
700 nth_parent: u8,
701 name: &str,
702 ) -> InterpResult<'tcx> {
703 let this = self.eval_context_mut();
704 let (tag, alloc_id) = match ptr.provenance {
705 Some(Provenance::Concrete { tag, alloc_id }) => (tag, alloc_id),
706 _ => {
707 eprintln!("Can't give the name {name} to Wildcard pointer");
708 return interp_ok(());
709 }
710 };
711 let alloc_extra = this.get_alloc_extra(alloc_id)?;
712 let mut tree_borrows = alloc_extra.borrow_tracker_tb().borrow_mut();
713 tree_borrows.give_pointer_debug_name(tag, nth_parent, name)
714 }
715}