Skip to main content

miri/borrow_tracker/tree_borrows/
tree.rs

1//! In this file we handle the "Tree" part of Tree Borrows, i.e. all tree
2//! traversal functions, optimizations to trim branches, and keeping track of
3//! the relative position of the access to each node being updated. This of course
4//! also includes the definition of the tree structure.
5//!
6//! Functions here manipulate permissions but are oblivious to them: as
7//! the internals of `Permission` are private, the update process is a black
8//! box. All we need to know here are
9//! - the fact that updates depend only on the old state, the status of protectors,
10//!   and the relative position of the access;
11//! - idempotency properties asserted in `perms.rs` (for optimizations)
12
13use std::ops::Range;
14use std::{cmp, fmt, mem};
15
16use rustc_abi::Size;
17use rustc_data_structures::fx::FxHashSet;
18use rustc_span::Span;
19use smallvec::SmallVec;
20
21use super::diagnostics::{
22    AccessCause, DiagnosticInfo, NodeDebugInfo, TbError, TransitionError,
23    no_valid_exposed_references_error,
24};
25use super::foreign_access_skipping::IdempotentForeignAccess;
26use super::perms::{PermTransition, Permission};
27use super::tree_visitor::{ChildrenVisitMode, ContinueTraversal, NodeAppArgs, TreeVisitor};
28use super::unimap::{UniIndex, UniKeyMap, UniValMap};
29use super::wildcard::ExposedCache;
30use crate::borrow_tracker::{AccessKind, GlobalState, ProtectorKind};
31use crate::*;
32
33mod tests;
34
35/// Data for a reference at single *location*.
36#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
37pub(super) struct LocationState {
38    /// A location is "accessed" when it is child-accessed for the first time (and the initial
39    /// retag initializes the location for the range covered by the type), and it then stays
40    /// accessed forever.
41    /// For accessed locations, "permission" is the current permission. However, for
42    /// non-accessed locations, we still need to track the "future initial permission": this will
43    /// start out to be `default_initial_perm`, but foreign accesses need to be taken into account.
44    /// Crucially however, while transitions to `Disabled` would usually be UB if this location is
45    /// protected, that is *not* the case for non-accessed locations. Instead we just have a latent
46    /// "future initial permission" of `Disabled`, causing UB only if an access is ever actually
47    /// performed.
48    /// Note that the tree root is also always accessed, as if the allocation was a write access.
49    accessed: bool,
50    /// This pointer's current permission / future initial permission.
51    permission: Permission,
52    /// See `foreign_access_skipping.rs`.
53    /// Stores an idempotent foreign access for this location and its children.
54    /// For correctness, this must not be too strong, and the recorded idempotent foreign access
55    /// of all children must be at least as strong as this. For performance, it should be as strong as possible.
56    idempotent_foreign_access: IdempotentForeignAccess,
57}
58
59impl LocationState {
60    /// Constructs a new initial state. It has neither been accessed, nor been subjected
61    /// to any foreign access yet.
62    /// The permission is not allowed to be `Unique`.
63    /// `sifa` is the (strongest) idempotent foreign access, see `foreign_access_skipping.rs`
64    pub fn new_non_accessed(permission: Permission, sifa: IdempotentForeignAccess) -> Self {
65        assert!(permission.is_initial() || permission.is_disabled());
66        Self { permission, accessed: false, idempotent_foreign_access: sifa }
67    }
68
69    /// Constructs a new initial state. It has not yet been subjected
70    /// to any foreign access. However, it is already marked as having been accessed.
71    /// `sifa` is the (strongest) idempotent foreign access, see `foreign_access_skipping.rs`
72    pub fn new_accessed(permission: Permission, sifa: IdempotentForeignAccess) -> Self {
73        Self { permission, accessed: true, idempotent_foreign_access: sifa }
74    }
75
76    /// Check if the location has been accessed, i.e. if it has
77    /// ever been accessed through a child pointer.
78    pub fn accessed(&self) -> bool {
79        self.accessed
80    }
81
82    pub fn permission(&self) -> Permission {
83        self.permission
84    }
85
86    /// Performs an access on this index and updates node,
87    /// perm and wildcard_state to reflect the transition.
88    fn perform_transition(
89        &mut self,
90        idx: UniIndex,
91        nodes: &mut UniValMap<Node>,
92        exposed_cache: &mut ExposedCache,
93        access_kind: AccessKind,
94        relatedness: AccessRelatedness,
95        protected: bool,
96        diagnostics: &DiagnosticInfo,
97    ) -> Result<(), TransitionError> {
98        // Call this function now (i.e. only if we know `relatedness`), which
99        // ensures it is only called when `skip_if_known_noop` returns
100        // `Recurse`, due to the contract of `traverse_this_parents_children_other`.
101        self.record_new_access(access_kind, relatedness);
102        let old_access_level = self.permission.strongest_allowed_local_access(protected);
103        let transition = self.perform_access(access_kind, relatedness, protected)?;
104        if !transition.is_noop() {
105            let node = nodes.get_mut(idx).unwrap();
106            // Record the event as part of the history.
107            node.debug_info
108                .history
109                .push(diagnostics.create_event(transition, relatedness.is_foreign()));
110
111            // We need to update the wildcard state, if the permission
112            // of an exposed pointer changes.
113            if node.is_exposed {
114                let access_level = self.permission.strongest_allowed_local_access(protected);
115                exposed_cache.update_exposure(nodes, idx, old_access_level, access_level);
116            }
117        }
118        Ok(())
119    }
120
121    /// Apply the effect of an access to one location, including
122    /// - applying `Permission::perform_access` to the inner `Permission`,
123    /// - emitting protector UB if the location is accessed,
124    /// - updating the accessed status (child accesses produce accessed locations).
125    fn perform_access(
126        &mut self,
127        access_kind: AccessKind,
128        rel_pos: AccessRelatedness,
129        protected: bool,
130    ) -> Result<PermTransition, TransitionError> {
131        let old_perm = self.permission;
132        let transition = Permission::perform_access(access_kind, rel_pos, old_perm, protected)
133            .ok_or(TransitionError::ChildAccessForbidden(old_perm))?;
134        self.accessed |= !rel_pos.is_foreign();
135        self.permission = transition.applied(old_perm).unwrap();
136        // Why do only accessed locations cause protector errors?
137        // Consider two mutable references `x`, `y` into disjoint parts of
138        // the same allocation. A priori, these may actually both be used to
139        // access the entire allocation, as long as only reads occur. However,
140        // a write to `y` needs to somehow record that `x` can no longer be used
141        // on that location at all. For these non-accessed locations (i.e., locations
142        // that haven't been accessed with `x` yet), we track the "future initial state":
143        // it defaults to whatever the initial state of the tag is,
144        // but the access to `y` moves that "future initial state" of `x` to `Disabled`.
145        // However, usually a `Reserved -> Disabled` transition would be UB due to the protector!
146        // So clearly protectors shouldn't fire for such "future initial state" transitions.
147        //
148        // See the test `two_mut_protected_same_alloc` in `tests/pass/tree_borrows/tree-borrows.rs`
149        // for an example of safe code that would be UB if we forgot to check `self.accessed`.
150        if protected && self.accessed && transition.produces_disabled() {
151            return Err(TransitionError::ProtectedDisabled(old_perm));
152        }
153        Ok(transition)
154    }
155
156    /// Like `perform_access`, but ignores the concrete error cause and also uses state-passing
157    /// rather than a mutable reference. As such, it returns `Some(x)` if the transition succeeded,
158    /// or `None` if there was an error.
159    #[cfg(test)]
160    fn perform_access_no_fluff(
161        mut self,
162        access_kind: AccessKind,
163        rel_pos: AccessRelatedness,
164        protected: bool,
165    ) -> Option<Self> {
166        match self.perform_access(access_kind, rel_pos, protected) {
167            Ok(_) => Some(self),
168            Err(_) => None,
169        }
170    }
171
172    /// Tree traversal optimizations. See `foreign_access_skipping.rs`.
173    /// This checks if such a foreign access can be skipped.
174    fn skip_if_known_noop(
175        &self,
176        access_kind: AccessKind,
177        rel_pos: AccessRelatedness,
178    ) -> ContinueTraversal {
179        if rel_pos.is_foreign() {
180            let happening_now = IdempotentForeignAccess::from_foreign(access_kind);
181            let mut new_access_noop =
182                self.idempotent_foreign_access.can_skip_foreign_access(happening_now);
183            if self.permission.is_disabled() {
184                // A foreign access to a `Disabled` tag will have almost no observable effect.
185                // It's a theorem that `Disabled` node have no protected accessed children,
186                // and so this foreign access will never trigger any protector.
187                // (Intuition: You're either protected accessed, and thus can't become Disabled
188                // or you're already Disabled protected, but not accessed, and then can't
189                // become accessed since that requires a child access, which Disabled blocks.)
190                // Further, the children will never be able to read or write again, since they
191                // have a `Disabled` parent. So this only affects diagnostics, such that the
192                // blocking write will still be identified directly, just at a different tag.
193                new_access_noop = true;
194            }
195            if self.permission.is_frozen() && access_kind == AccessKind::Read {
196                // A foreign read to a `Frozen` tag will have almost no observable effect.
197                // It's a theorem that `Frozen` nodes have no `Unique` children, so all children
198                // already survive foreign reads. Foreign reads in general have almost no
199                // effect, the only further thing they could do is make protected `Reserved`
200                // nodes become conflicted, i.e. make them reject child writes for the further
201                // duration of their protector. But such a child write is already rejected
202                // because this node is frozen. So this only affects diagnostics, but the
203                // blocking read will still be identified directly, just at a different tag.
204                new_access_noop = true;
205            }
206            if new_access_noop {
207                // Abort traversal if the new access is indeed guaranteed
208                // to be noop.
209                // No need to update `self.idempotent_foreign_access`,
210                // the type of the current streak among nonempty read-only
211                // or nonempty with at least one write has not changed.
212                ContinueTraversal::SkipSelfAndChildren
213            } else {
214                // Otherwise propagate this time, and also record the
215                // access that just occurred so that we can skip the propagation
216                // next time.
217                ContinueTraversal::Recurse
218            }
219        } else {
220            // A child access occurred, this breaks the streak of foreign
221            // accesses in a row and the sequence since the previous child access
222            // is now empty.
223            ContinueTraversal::Recurse
224        }
225    }
226
227    /// Records a new access, so that future access can potentially be skipped
228    /// by `skip_if_known_noop`. This must be called on child accesses, and otherwise
229    /// should be called on foreign accesses for increased performance. It should not be called
230    /// when `skip_if_known_noop` indicated skipping, since it then is a no-op.
231    /// See `foreign_access_skipping.rs`
232    fn record_new_access(&mut self, access_kind: AccessKind, rel_pos: AccessRelatedness) {
233        debug_assert!(matches!(
234            self.skip_if_known_noop(access_kind, rel_pos),
235            ContinueTraversal::Recurse
236        ));
237        self.idempotent_foreign_access
238            .record_new(IdempotentForeignAccess::from_acc_and_rel(access_kind, rel_pos));
239    }
240}
241
242impl fmt::Display for LocationState {
243    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
244        write!(f, "{}", self.permission)?;
245        if !self.accessed {
246            write!(f, "?")?;
247        }
248        Ok(())
249    }
250}
251/// The state of the full tree for a particular location: for all nodes, the local permissions
252/// of that node, and the tracking for wildcard accesses.
253#[derive(Clone, Debug, PartialEq, Eq)]
254pub struct LocationTree {
255    /// Maps a tag to a perm, with possible lazy initialization.
256    ///
257    /// NOTE: not all tags registered in `Tree::nodes` are necessarily in all
258    /// ranges of `perms`, because `perms` is in part lazily initialized.
259    /// Just because `nodes.get(key)` is `Some(_)` does not mean you can safely
260    /// `unwrap` any `perm.get(key)`.
261    ///
262    /// We do uphold the fact that `keys(perms)` is a subset of `keys(nodes)`
263    pub perms: UniValMap<LocationState>,
264    /// Caches information about the relatedness of nodes for a wildcard access.
265    pub exposed_cache: ExposedCache,
266}
267/// Tree structure with both parents and children since we want to be
268/// able to traverse the tree efficiently in both directions.
269#[derive(Clone, Debug)]
270pub struct Tree {
271    /// Mapping from tags to keys. The key obtained can then be used in
272    /// any of the `UniValMap` relative to this allocation, i.e.
273    /// `nodes`, `LocationTree::perms` and `LocationTree::exposed_cache`
274    /// of the same `Tree`.
275    /// The parent-child relationship in `Node` is encoded in terms of these same
276    /// keys, so traversing the entire tree needs exactly one access to
277    /// `tag_mapping`.
278    pub(super) tag_mapping: UniKeyMap<BorTag>,
279    /// All nodes of this tree.
280    pub(super) nodes: UniValMap<Node>,
281    /// Associates with each location its state and wildcard access tracking.
282    pub(super) locations: DedupRangeMap<LocationTree>,
283    /// Contains both the root of the main tree as well as the roots of the wildcard subtrees.
284    ///
285    /// If we reborrow a reference which has wildcard provenance, then we do not know where in
286    /// the tree to attach them. Instead we create a new additional tree for this allocation
287    /// with this new reference as a root. We call this additional tree a wildcard subtree.
288    ///
289    /// The actual structure should be a single tree but with wildcard provenance we approximate
290    /// this with this ordered set of trees. Each wildcard subtree is the direct child of *some* exposed
291    /// tag (that is smaller than the root), but we do not know which. This also means that it can only be the
292    /// child of a tree that comes before it in the vec ensuring we don't have any cycles in our
293    /// approximated tree.
294    ///
295    /// Sorted according to `BorTag` from low to high. This also means the main root is `root[0]`.
296    ///
297    /// Has array size 2 because that still ensures the minimum size for SmallVec.
298    pub(super) roots: SmallVec<[UniIndex; 2]>,
299}
300
301/// A node in the borrow tree. Each node is uniquely identified by a tag via
302/// the `nodes` map of `Tree`.
303#[derive(Clone, Debug)]
304pub(super) struct Node {
305    /// The tag of this node.
306    pub tag: BorTag,
307    /// All tags except the root have a parent tag.
308    pub parent: Option<UniIndex>,
309    /// If the pointer was reborrowed, it has children.
310    // FIXME: bench to compare this to FxHashSet and to other SmallVec sizes
311    pub children: SmallVec<[UniIndex; 4]>,
312    /// Either `Reserved`,  `Frozen`, or `Disabled`, it is the permission this tag will
313    /// lazily be initialized to on the first access.
314    /// It is only ever `Disabled` for a tree root, since the root is initialized to `Unique` by
315    /// its own separate mechanism.
316    default_initial_perm: Permission,
317    /// The default initial (strongest) idempotent foreign access.
318    /// This participates in the invariant for `LocationState::idempotent_foreign_access`
319    /// in cases where there is no location state yet. See `foreign_access_skipping.rs`,
320    /// and `LocationState::idempotent_foreign_access` for more information
321    default_initial_idempotent_foreign_access: IdempotentForeignAccess,
322    /// Whether a wildcard access could happen through this node.
323    pub is_exposed: bool,
324    /// Some extra information useful only for debugging purposes.
325    pub debug_info: NodeDebugInfo,
326}
327
328impl Tree {
329    /// Create a new tree, with only a root pointer.
330    pub fn new(root_tag: BorTag, size: Size, span: Span) -> Self {
331        // The root has `Disabled` as the default permission,
332        // so that any access out of bounds is invalid.
333        let root_default_perm = Permission::new_disabled();
334        let mut tag_mapping = UniKeyMap::default();
335        let root_idx = tag_mapping.insert(root_tag);
336        let nodes = {
337            let mut nodes = UniValMap::<Node>::default();
338            let mut debug_info = NodeDebugInfo::new(root_tag, root_default_perm, span);
339            // name the root so that all allocations contain one named pointer
340            debug_info.add_name("root of the allocation");
341            nodes.insert(
342                root_idx,
343                Node {
344                    tag: root_tag,
345                    parent: None,
346                    children: SmallVec::default(),
347                    default_initial_perm: root_default_perm,
348                    // The root may never be skipped, all accesses will be local.
349                    default_initial_idempotent_foreign_access: IdempotentForeignAccess::None,
350                    is_exposed: false,
351                    debug_info,
352                },
353            );
354            nodes
355        };
356        let locations = {
357            let mut perms = UniValMap::default();
358            // We manually set it to `Unique` on all in-bounds positions.
359            // We also ensure that it is accessed, so that no `Unique` but
360            // not yet accessed nodes exist. Essentially, we pretend there
361            // was a write that initialized these to `Unique`.
362            perms.insert(
363                root_idx,
364                LocationState::new_accessed(
365                    Permission::new_unique(),
366                    IdempotentForeignAccess::None,
367                ),
368            );
369            let exposed_cache = ExposedCache::default();
370            DedupRangeMap::new(size, LocationTree { perms, exposed_cache })
371        };
372        Self { roots: SmallVec::from_slice(&[root_idx]), nodes, locations, tag_mapping }
373    }
374}
375
376impl<'tcx> Tree {
377    /// Insert a new tag in the tree.
378    ///
379    /// `inside_perm` defines the initial permissions for a block of memory starting at
380    /// `base_offset`. These may nor may not be already marked as "accessed".
381    /// `outside_perm` defines the initial permission for the rest of the allocation.
382    /// These are definitely not "accessed".
383    pub(super) fn new_child(
384        &mut self,
385        base_offset: Size,
386        parent_prov: ProvenanceExtra,
387        new_tag: BorTag,
388        inside_perms: DedupRangeMap<LocationState>,
389        outside_perm: Permission,
390        protected: bool,
391        span: Span,
392    ) -> InterpResult<'tcx> {
393        let idx = self.tag_mapping.insert(new_tag);
394        let parent_idx = match parent_prov {
395            ProvenanceExtra::Concrete(parent_tag) =>
396                Some(self.tag_mapping.get(&parent_tag).unwrap()),
397            ProvenanceExtra::Wildcard => None,
398        };
399        assert!(outside_perm.is_initial());
400
401        let default_strongest_idempotent =
402            outside_perm.strongest_idempotent_foreign_access(protected);
403        // Create the node
404        self.nodes.insert(
405            idx,
406            Node {
407                tag: new_tag,
408                parent: parent_idx,
409                children: SmallVec::default(),
410                default_initial_perm: outside_perm,
411                default_initial_idempotent_foreign_access: default_strongest_idempotent,
412                is_exposed: false,
413                debug_info: NodeDebugInfo::new(new_tag, outside_perm, span),
414            },
415        );
416        if let Some(parent_idx) = parent_idx {
417            let parent_node = self.nodes.get_mut(parent_idx).unwrap();
418            // Register new_tag as a child of parent_tag
419            parent_node.children.push(idx);
420        } else {
421            // If the parent had wildcard provenance, then register the idx
422            // as a new wildcard root.
423            // This preserves the orderedness of `roots` because a newly created
424            // tag is greater than all previous tags.
425            self.roots.push(idx);
426        }
427
428        // We need to know the weakest SIFA for `update_idempotent_foreign_access_after_retag`.
429        let mut min_sifa = default_strongest_idempotent;
430        for (Range { start, end }, &perm) in
431            inside_perms.iter(Size::from_bytes(0), inside_perms.size())
432        {
433            assert!(perm.permission.is_initial());
434            assert_eq!(
435                perm.idempotent_foreign_access,
436                perm.permission.strongest_idempotent_foreign_access(protected)
437            );
438
439            min_sifa = cmp::min(min_sifa, perm.idempotent_foreign_access);
440            for (_range, loc) in self
441                .locations
442                .iter_mut(Size::from_bytes(start) + base_offset, Size::from_bytes(end - start))
443            {
444                loc.perms.insert(idx, perm);
445            }
446        }
447
448        // We don't have to update `exposed_cache` as the new node is not exposed and
449        // has no children so the default counts of 0 are correct.
450
451        // If the parent is a wildcard pointer, then it doesn't track SIFA and doesn't need to be updated.
452        if let Some(parent_idx) = parent_idx {
453            // Inserting the new perms might have broken the SIFA invariant (see
454            // `foreign_access_skipping.rs`) if the SIFA we inserted is weaker than that of some parent.
455            // We now weaken the recorded SIFA for our parents, until the invariant is restored. We
456            // could weaken them all to `None`, but it is more efficient to compute the SIFA for the new
457            // permission statically, and use that. For this we need the *minimum* SIFA (`None` needs
458            // more fixup than `Write`).
459            self.update_idempotent_foreign_access_after_retag(parent_idx, min_sifa);
460        }
461
462        interp_ok(())
463    }
464
465    /// Restores the SIFA "children are stronger"/"parents are weaker" invariant after a retag:
466    /// reduce the SIFA of `current` and its parents to be no stronger than `strongest_allowed`.
467    /// See `foreign_access_skipping.rs` and [`Tree::new_child`].
468    fn update_idempotent_foreign_access_after_retag(
469        &mut self,
470        mut current: UniIndex,
471        strongest_allowed: IdempotentForeignAccess,
472    ) {
473        if strongest_allowed == IdempotentForeignAccess::Write {
474            // Nothing is stronger than `Write`.
475            return;
476        }
477        // We walk the tree upwards, until the invariant is restored
478        loop {
479            let current_node = self.nodes.get_mut(current).unwrap();
480            // Call `ensure_no_stronger_than` on all SIFAs for this node: the per-location SIFA, as well
481            // as the default SIFA for not-yet-initialized locations.
482            // Record whether we did any change; if not, the invariant is restored and we can stop the traversal.
483            let mut any_change = false;
484            for (_range, loc) in self.locations.iter_mut_all() {
485                // Check if this node has a state for this location (or range of locations).
486                if let Some(perm) = loc.perms.get_mut(current) {
487                    // Update the per-location SIFA, recording if it changed.
488                    any_change |=
489                        perm.idempotent_foreign_access.ensure_no_stronger_than(strongest_allowed);
490                }
491            }
492            // Now update `default_initial_idempotent_foreign_access`, which stores the default SIFA for not-yet-initialized locations.
493            any_change |= current_node
494                .default_initial_idempotent_foreign_access
495                .ensure_no_stronger_than(strongest_allowed);
496
497            if any_change {
498                let Some(next) = self.nodes.get(current).unwrap().parent else {
499                    // We have arrived at the root.
500                    break;
501                };
502                current = next;
503                continue;
504            } else {
505                break;
506            }
507        }
508    }
509
510    /// Deallocation requires
511    /// - a pointer that permits write accesses
512    /// - the absence of Strong Protectors anywhere in the allocation
513    pub fn dealloc(
514        &mut self,
515        prov: ProvenanceExtra,
516        access_range: AllocRange,
517        global: &GlobalState,
518        alloc_id: AllocId, // diagnostics
519        span: Span,        // diagnostics
520    ) -> InterpResult<'tcx> {
521        self.perform_access(
522            prov,
523            access_range,
524            AccessKind::Write,
525            AccessCause::Dealloc,
526            global,
527            alloc_id,
528            span,
529        )?;
530
531        let start_idx = match prov {
532            ProvenanceExtra::Concrete(tag) => Some(self.tag_mapping.get(&tag).unwrap()),
533            ProvenanceExtra::Wildcard => None,
534        };
535
536        // Check if this breaks any strong protector.
537        // (Weak protectors are already handled by `perform_access`.)
538        for (loc_range, loc) in self.locations.iter_mut(access_range.start, access_range.size) {
539            let diagnostics = DiagnosticInfo {
540                alloc_id,
541                span,
542                transition_range: loc_range,
543                access_range: Some(access_range),
544                access_cause: AccessCause::Dealloc,
545            };
546            // Checks the tree containing `idx` for strong protector violations.
547            // It does this in traversal order.
548            let mut check_tree = |idx| {
549                TreeVisitor { nodes: &mut self.nodes, data: loc }
550                    .traverse_this_parents_children_other(
551                        idx,
552                        // Visit all children, skipping none.
553                        |_| ContinueTraversal::Recurse,
554                        |args: NodeAppArgs<'_, _>| {
555                            let node = args.nodes.get(args.idx).unwrap();
556
557                            let perm = args
558                                .data
559                                .perms
560                                .get(args.idx)
561                                .copied()
562                                .unwrap_or_else(|| node.default_location_state());
563                            if global.borrow().protected_tags.get(&node.tag)
564                                == Some(&ProtectorKind::StrongProtector)
565                                // Don't check for protector if it is a Cell (see `unsafe_cell_deallocate` in `interior_mutability.rs`).
566                                // Related to https://github.com/rust-lang/rust/issues/55005.
567                                && !perm.permission.is_cell()
568                                // Only trigger UB if the accessed bit is set, i.e. if the protector is actually protecting this offset. See #4579.
569                                && perm.accessed
570                            {
571                                Err(TbError {
572                                    error_kind: TransitionError::ProtectedDealloc,
573                                    access_info: &diagnostics,
574                                    conflicting_node_info: &node.debug_info,
575                                    accessed_node_info: start_idx
576                                        .map(|idx| &args.nodes.get(idx).unwrap().debug_info),
577                                }
578                                .build())
579                            } else {
580                                Ok(())
581                            }
582                        },
583                    )
584            };
585            // If we have a start index we first check its subtree in traversal order.
586            // This results in us showing the error of the closest node instead of an
587            // arbitrary one.
588            let accessed_root = start_idx.map(&mut check_tree).transpose()?;
589            // Afterwards we check all other trees.
590            // We iterate over the list in reverse order to ensure that we do not visit
591            // a parent before its child.
592            for &root in self.roots.iter().rev() {
593                if Some(root) == accessed_root {
594                    continue;
595                }
596                check_tree(root)?;
597            }
598        }
599        interp_ok(())
600    }
601
602    /// Map the per-node and per-location `LocationState::perform_access`
603    /// to each location of the first component of `access_range_and_kind`,
604    /// on every tag of the allocation.
605    ///
606    /// `LocationState::perform_access` will take care of raising transition
607    /// errors and updating the `accessed` status of each location,
608    /// this traversal adds to that:
609    /// - inserting into the map locations that do not exist yet,
610    /// - trimming the traversal,
611    /// - recording the history.
612    pub fn perform_access(
613        &mut self,
614        prov: ProvenanceExtra,
615        access_range: AllocRange,
616        access_kind: AccessKind,
617        access_cause: AccessCause, // diagnostics
618        global: &GlobalState,
619        alloc_id: AllocId, // diagnostics
620        span: Span,        // diagnostics
621    ) -> InterpResult<'tcx> {
622        #[cfg(feature = "expensive-consistency-checks")]
623        if self.roots.len() > 1 || matches!(prov, ProvenanceExtra::Wildcard) {
624            self.verify_wildcard_consistency(global);
625        }
626
627        let source_idx = match prov {
628            ProvenanceExtra::Concrete(tag) => Some(self.tag_mapping.get(&tag).unwrap()),
629            ProvenanceExtra::Wildcard => None,
630        };
631        // We iterate over affected locations and traverse the tree for each of them.
632        for (loc_range, loc) in self.locations.iter_mut(access_range.start, access_range.size) {
633            let diagnostics = DiagnosticInfo {
634                access_cause,
635                access_range: Some(access_range),
636                alloc_id,
637                span,
638                transition_range: loc_range,
639            };
640            loc.perform_access(
641                self.roots.iter().copied(),
642                &mut self.nodes,
643                source_idx,
644                access_kind,
645                global,
646                ChildrenVisitMode::VisitChildrenOfAccessed,
647                &diagnostics,
648                /* min_exposed_child */ None, // only matters for protector end access,
649            )?;
650        }
651        interp_ok(())
652    }
653    /// This is the special access that is applied on protector release:
654    /// - the access will be applied only to accessed locations of the allocation,
655    /// - it will not be visible to children,
656    /// - it will be recorded as a `FnExit` diagnostic access
657    /// - and it will be a read except if the location is `Unique`, i.e. has been written to,
658    ///   in which case it will be a write.
659    /// - otherwise identical to `Tree::perform_access`
660    pub fn perform_protector_end_access(
661        &mut self,
662        tag: BorTag,
663        global: &GlobalState,
664        alloc_id: AllocId, // diagnostics
665        span: Span,        // diagnostics
666    ) -> InterpResult<'tcx> {
667        #[cfg(feature = "expensive-consistency-checks")]
668        if self.roots.len() > 1 {
669            self.verify_wildcard_consistency(global);
670        }
671
672        let source_idx = self.tag_mapping.get(&tag).unwrap();
673
674        let min_exposed_child = if self.roots.len() > 1 {
675            LocationTree::get_min_exposed_child(source_idx, &self.nodes)
676        } else {
677            // There's no point in computing this when there is just one tree.
678            None
679        };
680
681        // This is a special access through the entire allocation.
682        // It actually only affects `accessed` locations, so we need
683        // to filter on those before initiating the traversal.
684        //
685        // In addition this implicit access should not be visible to children,
686        // thus the use of `traverse_nonchildren`.
687        // See the test case `returned_mut_is_usable` from
688        // `tests/pass/tree_borrows/tree-borrows.rs` for an example of
689        // why this is important.
690        for (loc_range, loc) in self.locations.iter_mut_all() {
691            // Only visit accessed permissions
692            if let Some(p) = loc.perms.get(source_idx)
693                && let Some(access_kind) = p.permission.protector_end_access()
694                && p.accessed
695            {
696                let diagnostics = DiagnosticInfo {
697                    access_cause: AccessCause::FnExit(access_kind),
698                    access_range: None,
699                    alloc_id,
700                    span,
701                    transition_range: loc_range,
702                };
703                loc.perform_access(
704                    self.roots.iter().copied(),
705                    &mut self.nodes,
706                    Some(source_idx),
707                    access_kind,
708                    global,
709                    ChildrenVisitMode::SkipChildrenOfAccessed,
710                    &diagnostics,
711                    min_exposed_child,
712                )?;
713            }
714        }
715        interp_ok(())
716    }
717}
718
719/// Integration with the BorTag garbage collector
720impl Tree {
721    pub fn remove_unreachable_tags(&mut self, live_tags: &FxHashSet<BorTag>) {
722        for i in 0..(self.roots.len()) {
723            self.remove_useless_children(self.roots[i], live_tags);
724        }
725        // Right after the GC runs is a good moment to check if we can
726        // merge some adjacent ranges that were made equal by the removal of some
727        // tags (this does not necessarily mean that they have identical internal representations,
728        // see the `PartialEq` impl for `UniValMap`)
729        self.locations.merge_adjacent_thorough();
730    }
731
732    /// Checks if a node is useless and should be GC'ed.
733    /// A node is useless if it has no children and also the tag is no longer live.
734    fn is_useless(&self, idx: UniIndex, live: &FxHashSet<BorTag>) -> bool {
735        let node = self.nodes.get(idx).unwrap();
736        node.children.is_empty() && !live.contains(&node.tag)
737    }
738
739    /// Checks whether a node can be replaced by its only child.
740    /// If so, returns the index of said only child.
741    /// If not, returns none.
742    fn can_be_replaced_by_single_child(
743        &self,
744        idx: UniIndex,
745        live: &FxHashSet<BorTag>,
746    ) -> Option<UniIndex> {
747        let node = self.nodes.get(idx).unwrap();
748
749        let [child_idx] = node.children[..] else { return None };
750
751        // We never want to replace the root node, as it is also kept in `root_ptr_tags`.
752        if live.contains(&node.tag) || node.parent.is_none() {
753            return None;
754        }
755        // Since protected nodes are never GC'd (see `borrow_tracker::FrameExtra::visit_provenance`),
756        // we know that `node` is not protected because otherwise `live` would
757        // have contained `node.tag`.
758        let child = self.nodes.get(child_idx).unwrap();
759        // Check that for that one child, `can_be_replaced_by_child` holds for the permission
760        // on all locations.
761        for (_range, loc) in self.locations.iter_all() {
762            let parent_perm = loc
763                .perms
764                .get(idx)
765                .map(|x| x.permission)
766                .unwrap_or_else(|| node.default_initial_perm);
767            let child_perm = loc
768                .perms
769                .get(child_idx)
770                .map(|x| x.permission)
771                .unwrap_or_else(|| child.default_initial_perm);
772            if !parent_perm.can_be_replaced_by_child(child_perm) {
773                return None;
774            }
775        }
776
777        Some(child_idx)
778    }
779
780    /// Properly removes a node.
781    /// The node to be removed should not otherwise be usable. It also
782    /// should have no children, but this is not checked, so that nodes
783    /// whose children were rotated somewhere else can be deleted without
784    /// having to first modify them to clear that array.
785    fn remove_useless_node(&mut self, this: UniIndex) {
786        // Due to the API of UniMap we must make sure to call
787        // `UniValMap::remove` for the key of this node on *all* maps that used it
788        // (which are `self.nodes` and every range of `self.rperms`)
789        // before we can safely apply `UniKeyMap::remove` to truly remove
790        // this tag from the `tag_mapping`.
791        let node = self.nodes.remove(this).unwrap();
792        for (_range, loc) in self.locations.iter_mut_all() {
793            loc.perms.remove(this);
794            loc.exposed_cache.remove(this);
795        }
796        self.tag_mapping.remove(&node.tag);
797    }
798
799    /// Traverses the entire tree looking for useless tags.
800    /// Removes from the tree all useless child nodes of root.
801    /// It will not delete the root itself.
802    ///
803    /// NOTE: This leaves in the middle of the tree tags that are unreachable but have
804    /// reachable children. There is a potential for compacting the tree by reassigning
805    /// children of dead tags to the nearest live parent, but it must be done with care
806    /// not to remove UB.
807    ///
808    /// Example: Consider the tree `root - parent - child`, with `parent: Frozen` and
809    /// `child: Reserved`. This tree can exist. If we blindly delete `parent` and reassign
810    /// `child` to be a direct child of `root` then Writes to `child` are now permitted
811    /// whereas they were not when `parent` was still there.
812    fn remove_useless_children(&mut self, root: UniIndex, live: &FxHashSet<BorTag>) {
813        // To avoid stack overflows, we roll our own stack.
814        // Each element in the stack consists of the current tag, and the number of the
815        // next child to be processed.
816
817        // The other functions are written using the `TreeVisitorStack`, but that does not work here
818        // since we need to 1) do a post-traversal and 2) remove nodes from the tree.
819        // Since we do a post-traversal (by deleting nodes only after handling all children),
820        // we also need to be a bit smarter than "pop node, push all children."
821        let mut stack = vec![(root, 0)];
822        while let Some((tag, nth_child)) = stack.last_mut() {
823            let node = self.nodes.get(*tag).unwrap();
824            if *nth_child < node.children.len() {
825                // Visit the child by pushing it to the stack.
826                // Also increase `nth_child` so that when we come back to the `tag` node, we
827                // look at the next child.
828                let next_child = node.children[*nth_child];
829                *nth_child += 1;
830                stack.push((next_child, 0));
831                continue;
832            } else {
833                // We have processed all children of `node`, so now it is time to process `node` itself.
834                // First, get the current children of `node`. To appease the borrow checker,
835                // we have to temporarily move the list out of the node, and then put the
836                // list of remaining children back in.
837                let mut children_of_node =
838                    mem::take(&mut self.nodes.get_mut(*tag).unwrap().children);
839                // Remove all useless children.
840                children_of_node.retain_mut(|idx| {
841                    if self.is_useless(*idx, live) {
842                        // Delete `idx` node everywhere else.
843                        self.remove_useless_node(*idx);
844                        // And delete it from children_of_node.
845                        false
846                    } else {
847                        if let Some(nextchild) = self.can_be_replaced_by_single_child(*idx, live) {
848                            // `nextchild` is our grandchild, and will become our direct child.
849                            // Delete the in-between node, `idx`.
850                            self.remove_useless_node(*idx);
851                            // Set the new child's parent.
852                            self.nodes.get_mut(nextchild).unwrap().parent = Some(*tag);
853                            // Save the new child in children_of_node.
854                            *idx = nextchild;
855                        }
856                        // retain it
857                        true
858                    }
859                });
860                // Put back the now-filtered vector.
861                self.nodes.get_mut(*tag).unwrap().children = children_of_node;
862
863                // We are done, the parent can continue.
864                stack.pop();
865                continue;
866            }
867        }
868    }
869}
870
871impl<'tcx> LocationTree {
872    /// Returns the smallest exposed tag, if any, that is a transitive child of `root`.
873    fn get_min_exposed_child(root: UniIndex, nodes: &UniValMap<Node>) -> Option<BorTag> {
874        // We cannot use the wildcard datastructure to improve this lookup. This is because
875        // the datastructure only tracks enabled nodes and we need to also consider disabled ones.
876        let mut stack = vec![root];
877        let mut min_tag = None;
878        while let Some(idx) = stack.pop() {
879            let node = nodes.get(idx).unwrap();
880            if min_tag.is_some_and(|min| min < node.tag) {
881                // The minimum we found before is bigger than this tag, and therefore
882                // also bigger than all its children, so we can skip this subtree.
883                continue;
884            }
885            stack.extend_from_slice(node.children.as_slice());
886            if node.is_exposed {
887                min_tag = match min_tag {
888                    Some(prev) if prev < node.tag => Some(prev),
889                    _ => Some(node.tag),
890                };
891            }
892        }
893        min_tag
894    }
895
896    /// Performs an access on this location.
897    /// * `access_source`: The index, if any, where the access came from.
898    /// * `visit_children`: Whether to skip updating the children of `access_source`.
899    /// * `min_exposed_child`: The tag of the smallest exposed (transitive) child of the accessed node.
900    ///   This is only used with `visit_children == SkipChildrenOfAccessed`, where we need to skip children
901    ///   of the accessed node.
902    fn perform_access(
903        &mut self,
904        roots: impl Iterator<Item = UniIndex>,
905        nodes: &mut UniValMap<Node>,
906        access_source: Option<UniIndex>,
907        access_kind: AccessKind,
908        global: &GlobalState,
909        visit_children: ChildrenVisitMode,
910        diagnostics: &DiagnosticInfo,
911        min_exposed_child: Option<BorTag>,
912    ) -> InterpResult<'tcx> {
913        let accessed_root = if let Some(idx) = access_source {
914            Some(self.perform_normal_access(
915                idx,
916                nodes,
917                access_kind,
918                global,
919                visit_children,
920                diagnostics,
921            )?)
922        } else {
923            // `SkipChildrenOfAccessed` only gets set on protector release, which only
924            // occurs on a known node.
925            assert!(matches!(visit_children, ChildrenVisitMode::VisitChildrenOfAccessed));
926            None
927        };
928
929        let accessed_root_tag = accessed_root.map(|idx| nodes.get(idx).unwrap().tag);
930        for (i, root) in roots.enumerate() {
931            let tag = nodes.get(root).unwrap().tag;
932            // On a protector release access we have to skip the children of the accessed tag.
933            // However, if the tag has exposed children then some of the wildcard subtrees could
934            // also be children of the accessed node and would also need to be skipped. We can
935            // narrow down which wildcard trees might be children by comparing their root tag to the
936            // minimum exposed child of the accessed node. As the parent tag is always smaller
937            // than the child tag this means we only need to skip subtrees with a root tag larger
938            // than `min_exposed_child`. Once we find such a root, we can leave the loop because roots
939            // are sorted by tag.
940            if matches!(visit_children, ChildrenVisitMode::SkipChildrenOfAccessed)
941                && let Some(min_exposed_child) = min_exposed_child
942                && tag > min_exposed_child
943            {
944                break;
945            }
946            // We don't perform a wildcard access on the tree we already performed a
947            // normal access on.
948            if Some(root) == accessed_root {
949                continue;
950            }
951            // The choice of `max_local_tag` requires some thought.
952            // This can only be a local access for nodes that are a parent of the accessed node
953            // and are therefore smaller, so the accessed node itself is a valid choice for `max_local_tag`.
954            // However, using `accessed_root` is better since that will be smaller. It is still a valid choice
955            // because for nodes *in other trees*, if they are a parent of the accessed node then they
956            // are a parent of `accessed_root`.
957            //
958            // As a consequence of this, since the root of the main tree is the smallest tag in the entire
959            // allocation, if the access occurred in the main tree then other subtrees will only see foreign accesses.
960            self.perform_wildcard_access(
961                root,
962                access_source,
963                /*max_local_tag*/ accessed_root_tag,
964                nodes,
965                access_kind,
966                global,
967                diagnostics,
968                /*is_wildcard_tree*/ i != 0,
969            )?;
970        }
971        interp_ok(())
972    }
973
974    /// Performs a normal access on the tree containing `access_source`.
975    ///
976    /// Returns the root index of this tree.
977    /// * `access_source`: The index of the tag being accessed.
978    /// * `visit_children`: Whether to skip the children of `access_source`
979    ///   during the access. Used for protector end access.
980    fn perform_normal_access(
981        &mut self,
982        access_source: UniIndex,
983        nodes: &mut UniValMap<Node>,
984        access_kind: AccessKind,
985        global: &GlobalState,
986        visit_children: ChildrenVisitMode,
987        diagnostics: &DiagnosticInfo,
988    ) -> InterpResult<'tcx, UniIndex> {
989        // Performs the per-node work:
990        // - insert the permission if it does not exist
991        // - perform the access
992        // - record the transition
993        // to which some optimizations are added:
994        // - skip the traversal of the children in some cases
995        // - do not record noop transitions
996        //
997        // `loc_range` is only for diagnostics (it is the range of
998        // the `RangeMap` on which we are currently working).
999        let node_skipper = |args: &NodeAppArgs<'_, LocationTree>| -> ContinueTraversal {
1000            let node = args.nodes.get(args.idx).unwrap();
1001            let perm = args.data.perms.get(args.idx);
1002
1003            let old_state = perm.copied().unwrap_or_else(|| node.default_location_state());
1004            old_state.skip_if_known_noop(access_kind, args.rel_pos)
1005        };
1006        let node_app = |args: NodeAppArgs<'_, LocationTree>| {
1007            let node = args.nodes.get_mut(args.idx).unwrap();
1008            let mut perm = args.data.perms.entry(args.idx);
1009
1010            let state = perm.or_insert(node.default_location_state());
1011
1012            let protected = global.borrow().protected_tags.contains_key(&node.tag);
1013            state
1014                .perform_transition(
1015                    args.idx,
1016                    args.nodes,
1017                    &mut args.data.exposed_cache,
1018                    access_kind,
1019                    args.rel_pos,
1020                    protected,
1021                    diagnostics,
1022                )
1023                .map_err(|error_kind| {
1024                    TbError {
1025                        error_kind,
1026                        access_info: diagnostics,
1027                        conflicting_node_info: &args.nodes.get(args.idx).unwrap().debug_info,
1028                        accessed_node_info: Some(
1029                            &args.nodes.get(access_source).unwrap().debug_info,
1030                        ),
1031                    }
1032                    .build()
1033                })
1034        };
1035
1036        let visitor = TreeVisitor { nodes, data: self };
1037        match visit_children {
1038            ChildrenVisitMode::VisitChildrenOfAccessed =>
1039                visitor.traverse_this_parents_children_other(access_source, node_skipper, node_app),
1040            ChildrenVisitMode::SkipChildrenOfAccessed =>
1041                visitor.traverse_nonchildren(access_source, node_skipper, node_app),
1042        }
1043        .into()
1044    }
1045
1046    /// Performs a wildcard access on the tree with root `root`. Takes the `access_relatedness`
1047    /// for each node from the `WildcardState` datastructure.
1048    /// * `root`: Root of the tree being accessed.
1049    /// * `access_source`: the index of the accessed tag, if any.
1050    ///   This is only used for printing the correct tag on errors.
1051    /// * `max_local_tag`: The access can only be local for nodes whose tag is
1052    ///   at most `max_local_tag`.
1053    fn perform_wildcard_access(
1054        &mut self,
1055        root: UniIndex,
1056        access_source: Option<UniIndex>,
1057        max_local_tag: Option<BorTag>,
1058        nodes: &mut UniValMap<Node>,
1059        access_kind: AccessKind,
1060        global: &GlobalState,
1061        diagnostics: &DiagnosticInfo,
1062        is_wildcard_tree: bool,
1063    ) -> InterpResult<'tcx> {
1064        let get_relatedness = |idx: UniIndex, node: &Node, loc: &LocationTree| {
1065            // If the tag is larger than `max_local_tag` then the access can only be foreign.
1066            let only_foreign = max_local_tag.is_some_and(|max_local_tag| max_local_tag < node.tag);
1067            loc.exposed_cache.access_relatedness(
1068                root,
1069                idx,
1070                access_kind,
1071                is_wildcard_tree,
1072                only_foreign,
1073            )
1074        };
1075
1076        // Whether there is an exposed node in this tree that allows this access.
1077        let mut has_valid_exposed = false;
1078
1079        // This does a traversal across the tree updating children before their parents. The
1080        // difference to `perform_normal_access` is that we take the access relatedness from
1081        // the wildcard tracking state of the node instead of from the visitor itself.
1082        //
1083        // Unlike for a normal access, the iteration order is important for improving the
1084        // accuracy of wildcard accesses if `max_local_tag` is `Some`: processing the effects of this
1085        // access further down the tree can cause exposed nodes to lose permissions, thus updating
1086        // the wildcard data structure, which will be taken into account when processing the parent
1087        // nodes. Also see the test `cross_tree_update_older_invalid_exposed2.rs`
1088        // (Doing accesses in the opposite order cannot help with precision but the reasons are complicated;
1089        // see <https://github.com/rust-lang/miri/pull/4707#discussion_r2581661123>.)
1090        //
1091        // Note, however, that this is an approximation: there can be situations where a node is
1092        // marked as having an exposed foreign node, but actually that foreign node cannot be
1093        // the source of the access due to `max_local_tag`. The wildcard tracking cannot know
1094        // about `max_local_tag` so we will incorrectly assume that this might be a foreign access.
1095        TreeVisitor { data: self, nodes }.traverse_children_this(
1096            root,
1097            |args| -> ContinueTraversal {
1098                let node = args.nodes.get(args.idx).unwrap();
1099                let perm = args.data.perms.get(args.idx);
1100
1101                let old_state = perm.copied().unwrap_or_else(|| node.default_location_state());
1102                // If we know where, relative to this node, the wildcard access occurs,
1103                // then check if we can skip the entire subtree.
1104                if let Some(relatedness) = get_relatedness(args.idx, node, args.data)
1105                    && let Some(relatedness) = relatedness.to_relatedness()
1106                {
1107                    // We can use the usual SIFA machinery to skip nodes.
1108                    old_state.skip_if_known_noop(access_kind, relatedness)
1109                } else {
1110                    ContinueTraversal::Recurse
1111                }
1112            },
1113            |args| {
1114                let node = args.nodes.get_mut(args.idx).unwrap();
1115
1116                let protected = global.borrow().protected_tags.contains_key(&node.tag);
1117
1118                let Some(wildcard_relatedness) = get_relatedness(args.idx, node, args.data) else {
1119                    // There doesn't exist a valid exposed reference for this access to
1120                    // happen through.
1121                    // This can only happen if `root` is the main root: We set
1122                    // `max_foreign_access==Write` on all wildcard roots, so at least a foreign access
1123                    // is always possible on all nodes in a wildcard subtree.
1124                    return Err(no_valid_exposed_references_error(diagnostics));
1125                };
1126
1127                let mut entry = args.data.perms.entry(args.idx);
1128                let perm = entry.or_insert(node.default_location_state());
1129
1130                // We only count exposed nodes through which an access could happen.
1131                if node.is_exposed
1132                    && perm.permission.strongest_allowed_local_access(protected).allows(access_kind)
1133                    && max_local_tag.is_none_or(|max_local_tag| max_local_tag >= node.tag)
1134                {
1135                    has_valid_exposed = true;
1136                }
1137
1138                let Some(relatedness) = wildcard_relatedness.to_relatedness() else {
1139                    // If the access type is Either, then we do not apply any transition
1140                    // to this node, but we still update each of its children.
1141                    // This is an imprecision! In the future, maybe we can still do some sort
1142                    // of best-effort update here.
1143                    return Ok(());
1144                };
1145
1146                // We know the exact relatedness, so we can actually do precise checks.
1147                perm.perform_transition(
1148                    args.idx,
1149                    args.nodes,
1150                    &mut args.data.exposed_cache,
1151                    access_kind,
1152                    relatedness,
1153                    protected,
1154                    diagnostics,
1155                )
1156                .map_err(|trans| {
1157                    let node = args.nodes.get(args.idx).unwrap();
1158                    TbError {
1159                        error_kind: trans,
1160                        access_info: diagnostics,
1161                        conflicting_node_info: &node.debug_info,
1162                        accessed_node_info: access_source
1163                            .map(|idx| &args.nodes.get(idx).unwrap().debug_info),
1164                    }
1165                    .build()
1166                })
1167            },
1168        )?;
1169        // If there is no exposed node in this tree that allows this access, then the access *must*
1170        // be foreign to the entire subtree. Foreign accesses are only possible on wildcard subtrees
1171        // as there are no ancestors to the main root. So if we do not find a valid exposed node in
1172        // the main tree then this access is UB.
1173        if !has_valid_exposed && !is_wildcard_tree {
1174            return Err(no_valid_exposed_references_error(diagnostics)).into();
1175        }
1176        interp_ok(())
1177    }
1178}
1179
1180impl Node {
1181    pub fn default_location_state(&self) -> LocationState {
1182        LocationState::new_non_accessed(
1183            self.default_initial_perm,
1184            self.default_initial_idempotent_foreign_access,
1185        )
1186    }
1187}
1188
1189impl VisitProvenance for Tree {
1190    fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
1191        // To ensure that the roots never get removed, we visit them.
1192        // FIXME: it should be possible to GC wildcard tree roots.
1193        for id in self.roots.iter().copied() {
1194            visit(None, Some(self.nodes.get(id).unwrap().tag));
1195        }
1196        // We also need to keep around any exposed tags through which
1197        // an access could still happen.
1198        for (_id, node) in self.nodes.iter() {
1199            if node.is_exposed {
1200                visit(None, Some(node.tag))
1201            }
1202        }
1203    }
1204}
1205
1206/// Relative position of the access
1207#[derive(Clone, Copy, Debug, PartialEq, Eq)]
1208pub enum AccessRelatedness {
1209    /// The access happened either through the node itself or one of
1210    /// its transitive children.
1211    LocalAccess,
1212    /// The access happened through this nodes ancestor or through
1213    /// a sibling/cousin/uncle/etc.
1214    ForeignAccess,
1215}
1216
1217impl AccessRelatedness {
1218    /// Check that access is either Ancestor or Distant, i.e. not
1219    /// a transitive child (initial pointer included).
1220    pub fn is_foreign(self) -> bool {
1221        matches!(self, AccessRelatedness::ForeignAccess)
1222    }
1223}