miri/borrow_tracker/tree_borrows/
tree.rs

1//! In this file we handle the "Tree" part of Tree Borrows, i.e. all tree
2//! traversal functions, optimizations to trim branches, and keeping track of
3//! the relative position of the access to each node being updated. This of course
4//! also includes the definition of the tree structure.
5//!
6//! Functions here manipulate permissions but are oblivious to them: as
7//! the internals of `Permission` are private, the update process is a black
8//! box. All we need to know here are
9//! - the fact that updates depend only on the old state, the status of protectors,
10//!   and the relative position of the access;
11//! - idempotency properties asserted in `perms.rs` (for optimizations)
12
13use std::ops::Range;
14use std::{cmp, fmt, mem};
15
16use rustc_abi::Size;
17use rustc_data_structures::fx::FxHashSet;
18use rustc_span::Span;
19use smallvec::SmallVec;
20
21use super::diagnostics::AccessCause;
22use super::wildcard::WildcardState;
23use crate::borrow_tracker::tree_borrows::Permission;
24use crate::borrow_tracker::tree_borrows::diagnostics::{
25    self, NodeDebugInfo, TbError, TransitionError, no_valid_exposed_references_error,
26};
27use crate::borrow_tracker::tree_borrows::foreign_access_skipping::IdempotentForeignAccess;
28use crate::borrow_tracker::tree_borrows::perms::PermTransition;
29use crate::borrow_tracker::tree_borrows::unimap::{UniIndex, UniKeyMap, UniValMap};
30use crate::borrow_tracker::{AccessKind, GlobalState, ProtectorKind};
31use crate::*;
32
33mod tests;
34
35/// Data for a reference at single *location*.
36#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
37pub(super) struct LocationState {
38    /// A location is "accessed" when it is child-accessed for the first time (and the initial
39    /// retag initializes the location for the range covered by the type), and it then stays
40    /// accessed forever.
41    /// For accessed locations, "permission" is the current permission. However, for
42    /// non-accessed locations, we still need to track the "future initial permission": this will
43    /// start out to be `default_initial_perm`, but foreign accesses need to be taken into account.
44    /// Crucially however, while transitions to `Disabled` would usually be UB if this location is
45    /// protected, that is *not* the case for non-accessed locations. Instead we just have a latent
46    /// "future initial permission" of `Disabled`, causing UB only if an access is ever actually
47    /// performed.
48    /// Note that the tree root is also always accessed, as if the allocation was a write access.
49    accessed: bool,
50    /// This pointer's current permission / future initial permission.
51    permission: Permission,
52    /// See `foreign_access_skipping.rs`.
53    /// Stores an idempotent foreign access for this location and its children.
54    /// For correctness, this must not be too strong, and the recorded idempotent foreign access
55    /// of all children must be at least as strong as this. For performance, it should be as strong as possible.
56    idempotent_foreign_access: IdempotentForeignAccess,
57}
58
59impl LocationState {
60    /// Constructs a new initial state. It has neither been accessed, nor been subjected
61    /// to any foreign access yet.
62    /// The permission is not allowed to be `Unique`.
63    /// `sifa` is the (strongest) idempotent foreign access, see `foreign_access_skipping.rs`
64    pub fn new_non_accessed(permission: Permission, sifa: IdempotentForeignAccess) -> Self {
65        assert!(permission.is_initial() || permission.is_disabled());
66        Self { permission, accessed: false, idempotent_foreign_access: sifa }
67    }
68
69    /// Constructs a new initial state. It has not yet been subjected
70    /// to any foreign access. However, it is already marked as having been accessed.
71    /// `sifa` is the (strongest) idempotent foreign access, see `foreign_access_skipping.rs`
72    pub fn new_accessed(permission: Permission, sifa: IdempotentForeignAccess) -> Self {
73        Self { permission, accessed: true, idempotent_foreign_access: sifa }
74    }
75
76    /// Check if the location has been accessed, i.e. if it has
77    /// ever been accessed through a child pointer.
78    pub fn accessed(&self) -> bool {
79        self.accessed
80    }
81
82    pub fn permission(&self) -> Permission {
83        self.permission
84    }
85
86    /// Performs an access on this index and updates node,
87    /// perm and wildcard_state to reflect the transition.
88    fn perform_transition(
89        &mut self,
90        idx: UniIndex,
91        nodes: &mut UniValMap<Node>,
92        wildcard_accesses: &mut UniValMap<WildcardState>,
93        access_kind: AccessKind,
94        access_cause: AccessCause,
95        access_range: Option<AllocRange>,
96        relatedness: AccessRelatedness,
97        span: Span,
98        location_range: Range<u64>,
99        protected: bool,
100    ) -> Result<(), TransitionError> {
101        // Call this function now (i.e. only if we know `relatedness`), which
102        // ensures it is only called when `skip_if_known_noop` returns
103        // `Recurse`, due to the contract of `traverse_this_parents_children_other`.
104        self.record_new_access(access_kind, relatedness);
105
106        let transition = self.perform_access(access_kind, relatedness, protected)?;
107        if !transition.is_noop() {
108            let node = nodes.get_mut(idx).unwrap();
109            // Record the event as part of the history.
110            node.debug_info.history.push(diagnostics::Event {
111                transition,
112                is_foreign: relatedness.is_foreign(),
113                access_cause,
114                access_range,
115                transition_range: location_range,
116                span,
117            });
118
119            // We need to update the wildcard state, if the permission
120            // of an exposed pointer changes.
121            if node.is_exposed {
122                let access_type = self.permission.strongest_allowed_child_access(protected);
123                WildcardState::update_exposure(idx, access_type, nodes, wildcard_accesses);
124            }
125        }
126        Ok(())
127    }
128
129    /// Apply the effect of an access to one location, including
130    /// - applying `Permission::perform_access` to the inner `Permission`,
131    /// - emitting protector UB if the location is accessed,
132    /// - updating the accessed status (child accesses produce accessed locations).
133    fn perform_access(
134        &mut self,
135        access_kind: AccessKind,
136        rel_pos: AccessRelatedness,
137        protected: bool,
138    ) -> Result<PermTransition, TransitionError> {
139        let old_perm = self.permission;
140        let transition = Permission::perform_access(access_kind, rel_pos, old_perm, protected)
141            .ok_or(TransitionError::ChildAccessForbidden(old_perm))?;
142        self.accessed |= !rel_pos.is_foreign();
143        self.permission = transition.applied(old_perm).unwrap();
144        // Why do only accessed locations cause protector errors?
145        // Consider two mutable references `x`, `y` into disjoint parts of
146        // the same allocation. A priori, these may actually both be used to
147        // access the entire allocation, as long as only reads occur. However,
148        // a write to `y` needs to somehow record that `x` can no longer be used
149        // on that location at all. For these non-accessed locations (i.e., locations
150        // that haven't been accessed with `x` yet), we track the "future initial state":
151        // it defaults to whatever the initial state of the tag is,
152        // but the access to `y` moves that "future initial state" of `x` to `Disabled`.
153        // However, usually a `Reserved -> Disabled` transition would be UB due to the protector!
154        // So clearly protectors shouldn't fire for such "future initial state" transitions.
155        //
156        // See the test `two_mut_protected_same_alloc` in `tests/pass/tree_borrows/tree-borrows.rs`
157        // for an example of safe code that would be UB if we forgot to check `self.accessed`.
158        if protected && self.accessed && transition.produces_disabled() {
159            return Err(TransitionError::ProtectedDisabled(old_perm));
160        }
161        Ok(transition)
162    }
163
164    /// Like `perform_access`, but ignores the concrete error cause and also uses state-passing
165    /// rather than a mutable reference. As such, it returns `Some(x)` if the transition succeeded,
166    /// or `None` if there was an error.
167    #[cfg(test)]
168    fn perform_access_no_fluff(
169        mut self,
170        access_kind: AccessKind,
171        rel_pos: AccessRelatedness,
172        protected: bool,
173    ) -> Option<Self> {
174        match self.perform_access(access_kind, rel_pos, protected) {
175            Ok(_) => Some(self),
176            Err(_) => None,
177        }
178    }
179
180    /// Tree traversal optimizations. See `foreign_access_skipping.rs`.
181    /// This checks if such a foreign access can be skipped.
182    fn skip_if_known_noop(
183        &self,
184        access_kind: AccessKind,
185        rel_pos: AccessRelatedness,
186    ) -> ContinueTraversal {
187        if rel_pos.is_foreign() {
188            let happening_now = IdempotentForeignAccess::from_foreign(access_kind);
189            let mut new_access_noop =
190                self.idempotent_foreign_access.can_skip_foreign_access(happening_now);
191            if self.permission.is_disabled() {
192                // A foreign access to a `Disabled` tag will have almost no observable effect.
193                // It's a theorem that `Disabled` node have no protected accessed children,
194                // and so this foreign access will never trigger any protector.
195                // (Intuition: You're either protected accessed, and thus can't become Disabled
196                // or you're already Disabled protected, but not accessed, and then can't
197                // become accessed since that requires a child access, which Disabled blocks.)
198                // Further, the children will never be able to read or write again, since they
199                // have a `Disabled` parent. So this only affects diagnostics, such that the
200                // blocking write will still be identified directly, just at a different tag.
201                new_access_noop = true;
202            }
203            if self.permission.is_frozen() && access_kind == AccessKind::Read {
204                // A foreign read to a `Frozen` tag will have almost no observable effect.
205                // It's a theorem that `Frozen` nodes have no `Unique` children, so all children
206                // already survive foreign reads. Foreign reads in general have almost no
207                // effect, the only further thing they could do is make protected `Reserved`
208                // nodes become conflicted, i.e. make them reject child writes for the further
209                // duration of their protector. But such a child write is already rejected
210                // because this node is frozen. So this only affects diagnostics, but the
211                // blocking read will still be identified directly, just at a different tag.
212                new_access_noop = true;
213            }
214            if new_access_noop {
215                // Abort traversal if the new access is indeed guaranteed
216                // to be noop.
217                // No need to update `self.idempotent_foreign_access`,
218                // the type of the current streak among nonempty read-only
219                // or nonempty with at least one write has not changed.
220                ContinueTraversal::SkipSelfAndChildren
221            } else {
222                // Otherwise propagate this time, and also record the
223                // access that just occurred so that we can skip the propagation
224                // next time.
225                ContinueTraversal::Recurse
226            }
227        } else {
228            // A child access occurred, this breaks the streak of foreign
229            // accesses in a row and the sequence since the previous child access
230            // is now empty.
231            ContinueTraversal::Recurse
232        }
233    }
234
235    /// Records a new access, so that future access can potentially be skipped
236    /// by `skip_if_known_noop`. This must be called on child accesses, and otherwise
237    /// shoud be called on foreign accesses for increased performance. It should not be called
238    /// when `skip_if_known_noop` indicated skipping, since it then is a no-op.
239    /// See `foreign_access_skipping.rs`
240    fn record_new_access(&mut self, access_kind: AccessKind, rel_pos: AccessRelatedness) {
241        debug_assert!(matches!(
242            self.skip_if_known_noop(access_kind, rel_pos),
243            ContinueTraversal::Recurse
244        ));
245        self.idempotent_foreign_access
246            .record_new(IdempotentForeignAccess::from_acc_and_rel(access_kind, rel_pos));
247    }
248}
249
250impl fmt::Display for LocationState {
251    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
252        write!(f, "{}", self.permission)?;
253        if !self.accessed {
254            write!(f, "?")?;
255        }
256        Ok(())
257    }
258}
259/// The state of the full tree for a particular location: for all nodes, the local permissions
260/// of that node, and the tracking for wildcard accesses.
261#[derive(Clone, Debug, PartialEq, Eq)]
262pub struct LocationTree {
263    /// Maps a tag to a perm, with possible lazy initialization.
264    ///
265    /// NOTE: not all tags registered in `Tree::nodes` are necessarily in all
266    /// ranges of `perms`, because `perms` is in part lazily initialized.
267    /// Just because `nodes.get(key)` is `Some(_)` does not mean you can safely
268    /// `unwrap` any `perm.get(key)`.
269    ///
270    /// We do uphold the fact that `keys(perms)` is a subset of `keys(nodes)`
271    pub perms: UniValMap<LocationState>,
272    /// Maps a tag and a location to its wildcard access tracking information,
273    /// with possible lazy initialization.
274    ///
275    /// If this allocation doesn't have any exposed nodes, then this map doesn't get
276    /// initialized. This way we only need to allocate the map if we need it.
277    ///
278    /// NOTE: same guarantees on entry initialization as for `perms`.
279    pub wildcard_accesses: UniValMap<WildcardState>,
280}
281/// Tree structure with both parents and children since we want to be
282/// able to traverse the tree efficiently in both directions.
283#[derive(Clone, Debug)]
284pub struct Tree {
285    /// Mapping from tags to keys. The key obtained can then be used in
286    /// any of the `UniValMap` relative to this allocation, i.e.
287    /// `nodes`, `LocationTree::perms` and `LocationTree::wildcard_accesses`
288    /// of the same `Tree`.
289    /// The parent-child relationship in `Node` is encoded in terms of these same
290    /// keys, so traversing the entire tree needs exactly one access to
291    /// `tag_mapping`.
292    pub(super) tag_mapping: UniKeyMap<BorTag>,
293    /// All nodes of this tree.
294    pub(super) nodes: UniValMap<Node>,
295    /// Associates with each location its state and wildcard access tracking.
296    pub(super) locations: DedupRangeMap<LocationTree>,
297    /// The index of the root node.
298    pub(super) root: UniIndex,
299}
300
301/// A node in the borrow tree. Each node is uniquely identified by a tag via
302/// the `nodes` map of `Tree`.
303#[derive(Clone, Debug)]
304pub(super) struct Node {
305    /// The tag of this node.
306    pub tag: BorTag,
307    /// All tags except the root have a parent tag.
308    pub parent: Option<UniIndex>,
309    /// If the pointer was reborrowed, it has children.
310    // FIXME: bench to compare this to FxHashSet and to other SmallVec sizes
311    pub children: SmallVec<[UniIndex; 4]>,
312    /// Either `Reserved`,  `Frozen`, or `Disabled`, it is the permission this tag will
313    /// lazily be initialized to on the first access.
314    /// It is only ever `Disabled` for a tree root, since the root is initialized to `Unique` by
315    /// its own separate mechanism.
316    default_initial_perm: Permission,
317    /// The default initial (strongest) idempotent foreign access.
318    /// This participates in the invariant for `LocationState::idempotent_foreign_access`
319    /// in cases where there is no location state yet. See `foreign_access_skipping.rs`,
320    /// and `LocationState::idempotent_foreign_access` for more information
321    default_initial_idempotent_foreign_access: IdempotentForeignAccess,
322    /// Whether a wildcard access could happen through this node.
323    pub is_exposed: bool,
324    /// Some extra information useful only for debugging purposes.
325    pub debug_info: NodeDebugInfo,
326}
327
328/// Data given to the transition function
329struct NodeAppArgs<'visit> {
330    /// The index of the current node.
331    idx: UniIndex,
332    /// Relative position of the access.
333    rel_pos: AccessRelatedness,
334    /// The node map of this tree.
335    nodes: &'visit mut UniValMap<Node>,
336    /// The permissions map of this tree.
337    loc: &'visit mut LocationTree,
338}
339/// Internal contents of `Tree` with the minimum of mutable access for
340/// For soundness do not modify the children or parent indexes of nodes
341/// during traversal.
342struct TreeVisitor<'tree> {
343    nodes: &'tree mut UniValMap<Node>,
344    loc: &'tree mut LocationTree,
345}
346
347/// Whether to continue exploring the children recursively or not.
348enum ContinueTraversal {
349    Recurse,
350    SkipSelfAndChildren,
351}
352
353#[derive(Clone, Copy)]
354pub enum ChildrenVisitMode {
355    VisitChildrenOfAccessed,
356    SkipChildrenOfAccessed,
357}
358
359enum RecursionState {
360    BeforeChildren,
361    AfterChildren,
362}
363
364/// Stack of nodes left to explore in a tree traversal.
365/// See the docs of `traverse_this_parents_children_other` for details on the
366/// traversal order.
367struct TreeVisitorStack<NodeContinue, NodeApp> {
368    /// Function describing whether to continue at a tag.
369    /// This is only invoked for foreign accesses.
370    f_continue: NodeContinue,
371    /// Function to apply to each tag.
372    f_propagate: NodeApp,
373    /// Mutable state of the visit: the tags left to handle.
374    /// Every tag pushed should eventually be handled,
375    /// and the precise order is relevant for diagnostics.
376    /// Since the traversal is piecewise bottom-up, we need to
377    /// remember whether we're here initially, or after visiting all children.
378    /// The last element indicates this.
379    /// This is just an artifact of how you hand-roll recursion,
380    /// it does not have a deeper meaning otherwise.
381    stack: Vec<(UniIndex, AccessRelatedness, RecursionState)>,
382}
383
384impl<NodeContinue, NodeApp, Err> TreeVisitorStack<NodeContinue, NodeApp>
385where
386    NodeContinue: Fn(&NodeAppArgs<'_>) -> ContinueTraversal,
387    NodeApp: Fn(NodeAppArgs<'_>) -> Result<(), Err>,
388{
389    fn should_continue_at(
390        &self,
391        this: &mut TreeVisitor<'_>,
392        idx: UniIndex,
393        rel_pos: AccessRelatedness,
394    ) -> ContinueTraversal {
395        let args = NodeAppArgs { idx, rel_pos, nodes: this.nodes, loc: this.loc };
396        (self.f_continue)(&args)
397    }
398
399    fn propagate_at(
400        &mut self,
401        this: &mut TreeVisitor<'_>,
402        idx: UniIndex,
403        rel_pos: AccessRelatedness,
404    ) -> Result<(), Err> {
405        (self.f_propagate)(NodeAppArgs { idx, rel_pos, nodes: this.nodes, loc: this.loc })
406    }
407
408    fn go_upwards_from_accessed(
409        &mut self,
410        this: &mut TreeVisitor<'_>,
411        accessed_node: UniIndex,
412        visit_children: ChildrenVisitMode,
413    ) -> Result<(), Err> {
414        // We want to visit the accessed node's children first.
415        // However, we will below walk up our parents and push their children (our cousins)
416        // onto the stack. To ensure correct iteration order, this method thus finishes
417        // by reversing the stack. This only works if the stack is empty initially.
418        assert!(self.stack.is_empty());
419        // First, handle accessed node. A bunch of things need to
420        // be handled differently here compared to the further parents
421        // of `accesssed_node`.
422        {
423            self.propagate_at(this, accessed_node, AccessRelatedness::LocalAccess)?;
424            if matches!(visit_children, ChildrenVisitMode::VisitChildrenOfAccessed) {
425                let accessed_node = this.nodes.get(accessed_node).unwrap();
426                // We `rev()` here because we reverse the entire stack later.
427                for &child in accessed_node.children.iter().rev() {
428                    self.stack.push((
429                        child,
430                        AccessRelatedness::ForeignAccess,
431                        RecursionState::BeforeChildren,
432                    ));
433                }
434            }
435        }
436        // Then, handle the accessed node's parents. Here, we need to
437        // make sure we only mark the "cousin" subtrees for later visitation,
438        // not the subtree that contains the accessed node.
439        let mut last_node = accessed_node;
440        while let Some(current) = this.nodes.get(last_node).unwrap().parent {
441            self.propagate_at(this, current, AccessRelatedness::LocalAccess)?;
442            let node = this.nodes.get(current).unwrap();
443            // We `rev()` here because we reverse the entire stack later.
444            for &child in node.children.iter().rev() {
445                if last_node == child {
446                    continue;
447                }
448                self.stack.push((
449                    child,
450                    AccessRelatedness::ForeignAccess,
451                    RecursionState::BeforeChildren,
452                ));
453            }
454            last_node = current;
455        }
456        // Reverse the stack, as discussed above.
457        self.stack.reverse();
458        Ok(())
459    }
460
461    fn finish_foreign_accesses(&mut self, this: &mut TreeVisitor<'_>) -> Result<(), Err> {
462        while let Some((idx, rel_pos, step)) = self.stack.last_mut() {
463            let idx = *idx;
464            let rel_pos = *rel_pos;
465            match *step {
466                // How to do bottom-up traversal, 101: Before you handle a node, you handle all children.
467                // For this, you must first find the children, which is what this code here does.
468                RecursionState::BeforeChildren => {
469                    // Next time we come back will be when all the children are handled.
470                    *step = RecursionState::AfterChildren;
471                    // Now push the children, except if we are told to skip this subtree.
472                    let handle_children = self.should_continue_at(this, idx, rel_pos);
473                    match handle_children {
474                        ContinueTraversal::Recurse => {
475                            let node = this.nodes.get(idx).unwrap();
476                            for &child in node.children.iter() {
477                                self.stack.push((child, rel_pos, RecursionState::BeforeChildren));
478                            }
479                        }
480                        ContinueTraversal::SkipSelfAndChildren => {
481                            // skip self
482                            self.stack.pop();
483                            continue;
484                        }
485                    }
486                }
487                // All the children are handled, let's actually visit this node
488                RecursionState::AfterChildren => {
489                    self.stack.pop();
490                    self.propagate_at(this, idx, rel_pos)?;
491                }
492            }
493        }
494        Ok(())
495    }
496
497    fn new(f_continue: NodeContinue, f_propagate: NodeApp) -> Self {
498        Self { f_continue, f_propagate, stack: Vec::new() }
499    }
500}
501
502impl<'tree> TreeVisitor<'tree> {
503    /// Applies `f_propagate` to every vertex of the tree in a piecewise bottom-up way: First, visit
504    /// all ancestors of `start_idx` (starting with `start_idx` itself), then children of `start_idx`, then the rest,
505    /// going bottom-up in each of these two "pieces" / sections.
506    /// This ensures that errors are triggered in the following order
507    /// - first invalid accesses with insufficient permissions, closest to the accessed node first,
508    /// - then protector violations, bottom-up, starting with the children of the accessed node, and then
509    ///   going upwards and outwards.
510    ///
511    /// The following graphic visualizes it, with numbers indicating visitation order and `start_idx` being
512    /// the node that is visited first ("1"):
513    ///
514    /// ```text
515    ///         3
516    ///        /|
517    ///       / |
518    ///      9  2
519    ///      |  |\
520    ///      |  | \
521    ///      8  1  7
522    ///        / \
523    ///       4   6
524    ///           |
525    ///           5
526    /// ```
527    ///
528    /// `f_propagate` should follow the following format: for a given `Node` it updates its
529    /// `Permission` depending on the position relative to `start_idx` (given by an
530    /// `AccessRelatedness`).
531    /// `f_continue` is called earlier on foreign nodes, and describes whether to even start
532    /// visiting the subtree at that node. If it e.g. returns `SkipSelfAndChildren` on node 6
533    /// above, then nodes 5 _and_ 6 would not be visited by `f_propagate`. It is not used for
534    /// notes having a child access (nodes 1, 2, 3).
535    ///
536    /// Finally, remember that the iteration order is not relevant for UB, it only affects
537    /// diagnostics. It also affects tree traversal optimizations built on top of this, so
538    /// those need to be reviewed carefully as well whenever this changes.
539    fn traverse_this_parents_children_other<Err>(
540        mut self,
541        start_idx: UniIndex,
542        f_continue: impl Fn(&NodeAppArgs<'_>) -> ContinueTraversal,
543        f_propagate: impl Fn(NodeAppArgs<'_>) -> Result<(), Err>,
544    ) -> Result<(), Err> {
545        let mut stack = TreeVisitorStack::new(f_continue, f_propagate);
546        // Visits the accessed node itself, and all its parents, i.e. all nodes
547        // undergoing a child access. Also pushes the children and the other
548        // cousin nodes (i.e. all nodes undergoing a foreign access) to the stack
549        // to be processed later.
550        stack.go_upwards_from_accessed(
551            &mut self,
552            start_idx,
553            ChildrenVisitMode::VisitChildrenOfAccessed,
554        )?;
555        // Now visit all the foreign nodes we remembered earlier.
556        // For this we go bottom-up, but also allow f_continue to skip entire
557        // subtrees from being visited if it would be a NOP.
558        stack.finish_foreign_accesses(&mut self)
559    }
560
561    /// Like `traverse_this_parents_children_other`, but skips the children of `start_idx`.
562    fn traverse_nonchildren<Err>(
563        mut self,
564        start_idx: UniIndex,
565        f_continue: impl Fn(&NodeAppArgs<'_>) -> ContinueTraversal,
566        f_propagate: impl Fn(NodeAppArgs<'_>) -> Result<(), Err>,
567    ) -> Result<(), Err> {
568        let mut stack = TreeVisitorStack::new(f_continue, f_propagate);
569        // Visits the accessed node itself, and all its parents, i.e. all nodes
570        // undergoing a child access. Also pushes the other cousin nodes to the
571        // stack, but not the children of the accessed node.
572        stack.go_upwards_from_accessed(
573            &mut self,
574            start_idx,
575            ChildrenVisitMode::SkipChildrenOfAccessed,
576        )?;
577        // Now visit all the foreign nodes we remembered earlier.
578        // For this we go bottom-up, but also allow f_continue to skip entire
579        // subtrees from being visited if it would be a NOP.
580        stack.finish_foreign_accesses(&mut self)
581    }
582}
583
584impl Tree {
585    /// Create a new tree, with only a root pointer.
586    pub fn new(root_tag: BorTag, size: Size, span: Span) -> Self {
587        // The root has `Disabled` as the default permission,
588        // so that any access out of bounds is invalid.
589        let root_default_perm = Permission::new_disabled();
590        let mut tag_mapping = UniKeyMap::default();
591        let root_idx = tag_mapping.insert(root_tag);
592        let nodes = {
593            let mut nodes = UniValMap::<Node>::default();
594            let mut debug_info = NodeDebugInfo::new(root_tag, root_default_perm, span);
595            // name the root so that all allocations contain one named pointer
596            debug_info.add_name("root of the allocation");
597            nodes.insert(
598                root_idx,
599                Node {
600                    tag: root_tag,
601                    parent: None,
602                    children: SmallVec::default(),
603                    default_initial_perm: root_default_perm,
604                    // The root may never be skipped, all accesses will be local.
605                    default_initial_idempotent_foreign_access: IdempotentForeignAccess::None,
606                    is_exposed: false,
607                    debug_info,
608                },
609            );
610            nodes
611        };
612        let locations = {
613            let mut perms = UniValMap::default();
614            // We manually set it to `Unique` on all in-bounds positions.
615            // We also ensure that it is accessed, so that no `Unique` but
616            // not yet accessed nodes exist. Essentially, we pretend there
617            // was a write that initialized these to `Unique`.
618            perms.insert(
619                root_idx,
620                LocationState::new_accessed(
621                    Permission::new_unique(),
622                    IdempotentForeignAccess::None,
623                ),
624            );
625            let wildcard_accesses = UniValMap::default();
626            DedupRangeMap::new(size, LocationTree { perms, wildcard_accesses })
627        };
628        Self { root: root_idx, nodes, locations, tag_mapping }
629    }
630}
631
632impl<'tcx> Tree {
633    /// Insert a new tag in the tree.
634    ///
635    /// `inside_perm` defines the initial permissions for a block of memory starting at
636    /// `base_offset`. These may nor may not be already marked as "accessed".
637    /// `outside_perm` defines the initial permission for the rest of the allocation.
638    /// These are definitely not "accessed".
639    pub(super) fn new_child(
640        &mut self,
641        base_offset: Size,
642        parent_tag: BorTag,
643        new_tag: BorTag,
644        inside_perms: DedupRangeMap<LocationState>,
645        outside_perm: Permission,
646        protected: bool,
647        span: Span,
648    ) -> InterpResult<'tcx> {
649        let idx = self.tag_mapping.insert(new_tag);
650        let parent_idx = self.tag_mapping.get(&parent_tag).unwrap();
651        assert!(outside_perm.is_initial());
652
653        let default_strongest_idempotent =
654            outside_perm.strongest_idempotent_foreign_access(protected);
655        // Create the node
656        self.nodes.insert(
657            idx,
658            Node {
659                tag: new_tag,
660                parent: Some(parent_idx),
661                children: SmallVec::default(),
662                default_initial_perm: outside_perm,
663                default_initial_idempotent_foreign_access: default_strongest_idempotent,
664                is_exposed: false,
665                debug_info: NodeDebugInfo::new(new_tag, outside_perm, span),
666            },
667        );
668        let parent_node = self.nodes.get_mut(parent_idx).unwrap();
669        // Register new_tag as a child of parent_tag
670        parent_node.children.push(idx);
671
672        // We need to know the weakest SIFA for `update_idempotent_foreign_access_after_retag`.
673        let mut min_sifa = default_strongest_idempotent;
674        for (Range { start, end }, &perm) in
675            inside_perms.iter(Size::from_bytes(0), inside_perms.size())
676        {
677            assert!(perm.permission.is_initial());
678            assert_eq!(
679                perm.idempotent_foreign_access,
680                perm.permission.strongest_idempotent_foreign_access(protected)
681            );
682
683            min_sifa = cmp::min(min_sifa, perm.idempotent_foreign_access);
684            for (_range, loc) in self
685                .locations
686                .iter_mut(Size::from_bytes(start) + base_offset, Size::from_bytes(end - start))
687            {
688                loc.perms.insert(idx, perm);
689            }
690        }
691
692        // We need to ensure the consistency of the wildcard access tracking data structure.
693        // For this, we insert the correct entry for this tag based on its parent, if it exists.
694        for (_range, loc) in self.locations.iter_mut_all() {
695            if let Some(parent_access) = loc.wildcard_accesses.get(parent_idx) {
696                loc.wildcard_accesses.insert(idx, parent_access.for_new_child());
697            }
698        }
699
700        // Inserting the new perms might have broken the SIFA invariant (see
701        // `foreign_access_skipping.rs`) if the SIFA we inserted is weaker than that of some parent.
702        // We now weaken the recorded SIFA for our parents, until the invariant is restored. We
703        // could weaken them all to `None`, but it is more efficient to compute the SIFA for the new
704        // permission statically, and use that. For this we need the *minimum* SIFA (`None` needs
705        // more fixup than `Write`).
706        self.update_idempotent_foreign_access_after_retag(parent_idx, min_sifa);
707
708        interp_ok(())
709    }
710
711    /// Restores the SIFA "children are stronger"/"parents are weaker" invariant after a retag:
712    /// reduce the SIFA of `current` and its parents to be no stronger than `strongest_allowed`.
713    /// See `foreign_access_skipping.rs` and [`Tree::new_child`].
714    fn update_idempotent_foreign_access_after_retag(
715        &mut self,
716        mut current: UniIndex,
717        strongest_allowed: IdempotentForeignAccess,
718    ) {
719        if strongest_allowed == IdempotentForeignAccess::Write {
720            // Nothing is stronger than `Write`.
721            return;
722        }
723        // We walk the tree upwards, until the invariant is restored
724        loop {
725            let current_node = self.nodes.get_mut(current).unwrap();
726            // Call `ensure_no_stronger_than` on all SIFAs for this node: the per-location SIFA, as well
727            // as the default SIFA for not-yet-initialized locations.
728            // Record whether we did any change; if not, the invariant is restored and we can stop the traversal.
729            let mut any_change = false;
730            for (_range, loc) in self.locations.iter_mut_all() {
731                // Check if this node has a state for this location (or range of locations).
732                if let Some(perm) = loc.perms.get_mut(current) {
733                    // Update the per-location SIFA, recording if it changed.
734                    any_change |=
735                        perm.idempotent_foreign_access.ensure_no_stronger_than(strongest_allowed);
736                }
737            }
738            // Now update `default_initial_idempotent_foreign_access`, which stores the default SIFA for not-yet-initialized locations.
739            any_change |= current_node
740                .default_initial_idempotent_foreign_access
741                .ensure_no_stronger_than(strongest_allowed);
742
743            if any_change {
744                let Some(next) = self.nodes.get(current).unwrap().parent else {
745                    // We have arrived at the root.
746                    break;
747                };
748                current = next;
749                continue;
750            } else {
751                break;
752            }
753        }
754    }
755
756    /// Deallocation requires
757    /// - a pointer that permits write accesses
758    /// - the absence of Strong Protectors anywhere in the allocation
759    pub fn dealloc(
760        &mut self,
761        prov: ProvenanceExtra,
762        access_range: AllocRange,
763        global: &GlobalState,
764        alloc_id: AllocId, // diagnostics
765        span: Span,        // diagnostics
766    ) -> InterpResult<'tcx> {
767        self.perform_access(
768            prov,
769            Some((access_range, AccessKind::Write, diagnostics::AccessCause::Dealloc)),
770            global,
771            alloc_id,
772            span,
773        )?;
774
775        // The order in which we check if any nodes are invalidated only
776        // matters to diagnostics, so we use the root as a default tag.
777        let start_idx = match prov {
778            ProvenanceExtra::Concrete(tag) => self.tag_mapping.get(&tag).unwrap(),
779            ProvenanceExtra::Wildcard => self.root,
780        };
781
782        // Check if this breaks any strong protector.
783        // (Weak protectors are already handled by `perform_access`.)
784        for (loc_range, loc) in self.locations.iter_mut(access_range.start, access_range.size) {
785            TreeVisitor { nodes: &mut self.nodes, loc }.traverse_this_parents_children_other(
786                start_idx,
787                // Visit all children, skipping none.
788                |_| ContinueTraversal::Recurse,
789                |args: NodeAppArgs<'_>| {
790                    let node = args.nodes.get(args.idx).unwrap();
791                    let perm = args.loc.perms.entry(args.idx);
792
793                    let perm = perm.get().copied().unwrap_or_else(|| node.default_location_state());
794                    if global.borrow().protected_tags.get(&node.tag)
795                            == Some(&ProtectorKind::StrongProtector)
796                            // Don't check for protector if it is a Cell (see `unsafe_cell_deallocate` in `interior_mutability.rs`).
797                            // Related to https://github.com/rust-lang/rust/issues/55005.
798                            && !perm.permission.is_cell()
799                            // Only trigger UB if the accessed bit is set, i.e. if the protector is actually protecting this offset. See #4579.
800                            && perm.accessed
801                    {
802                        Err(TbError {
803                            conflicting_info: &node.debug_info,
804                            access_cause: diagnostics::AccessCause::Dealloc,
805                            alloc_id,
806                            error_offset: loc_range.start,
807                            error_kind: TransitionError::ProtectedDealloc,
808                            accessed_info: match prov {
809                                ProvenanceExtra::Concrete(_) =>
810                                    Some(&args.nodes.get(start_idx).unwrap().debug_info),
811                                // We don't know from where the access came during a wildcard access.
812                                ProvenanceExtra::Wildcard => None,
813                            },
814                        }
815                        .build())
816                    } else {
817                        Ok(())
818                    }
819                },
820            )?;
821        }
822        interp_ok(())
823    }
824
825    /// Map the per-node and per-location `LocationState::perform_access`
826    /// to each location of the first component of `access_range_and_kind`,
827    /// on every tag of the allocation.
828    ///
829    /// If `access_range_and_kind` is `None`, this is interpreted as the special
830    /// access that is applied on protector release:
831    /// - the access will be applied only to accessed locations of the allocation,
832    /// - it will not be visible to children,
833    /// - it will be recorded as a `FnExit` diagnostic access
834    /// - and it will be a read except if the location is `Unique`, i.e. has been written to,
835    ///   in which case it will be a write.
836    ///
837    /// `LocationState::perform_access` will take care of raising transition
838    /// errors and updating the `accessed` status of each location,
839    /// this traversal adds to that:
840    /// - inserting into the map locations that do not exist yet,
841    /// - trimming the traversal,
842    /// - recording the history.
843    pub fn perform_access(
844        &mut self,
845        prov: ProvenanceExtra,
846        access_range_and_kind: Option<(AllocRange, AccessKind, diagnostics::AccessCause)>,
847        global: &GlobalState,
848        alloc_id: AllocId, // diagnostics
849        span: Span,        // diagnostics
850    ) -> InterpResult<'tcx> {
851        #[cfg(feature = "expensive-consistency-checks")]
852        if matches!(prov, ProvenanceExtra::Wildcard) {
853            self.verify_wildcard_consistency(global);
854        }
855        let source_idx = match prov {
856            ProvenanceExtra::Concrete(tag) => Some(self.tag_mapping.get(&tag).unwrap()),
857            ProvenanceExtra::Wildcard => None,
858        };
859
860        if let Some((access_range, access_kind, access_cause)) = access_range_and_kind {
861            // Default branch: this is a "normal" access through a known range.
862            // We iterate over affected locations and traverse the tree for each of them.
863            for (loc_range, loc) in self.locations.iter_mut(access_range.start, access_range.size) {
864                loc.perform_access(
865                    self.root,
866                    &mut self.nodes,
867                    source_idx,
868                    loc_range,
869                    Some(access_range),
870                    access_kind,
871                    access_cause,
872                    global,
873                    alloc_id,
874                    span,
875                    ChildrenVisitMode::VisitChildrenOfAccessed,
876                )?;
877            }
878        } else {
879            // This is a special access through the entire allocation.
880            // It actually only affects `accessed` locations, so we need
881            // to filter on those before initiating the traversal.
882            //
883            // In addition this implicit access should not be visible to children,
884            // thus the use of `traverse_nonchildren`.
885            // See the test case `returned_mut_is_usable` from
886            // `tests/pass/tree_borrows/tree-borrows.rs` for an example of
887            // why this is important.
888
889            // Wildcard references are never protected. So this can never be
890            // called with a wildcard reference.
891            let source_idx = source_idx.unwrap();
892
893            for (loc_range, loc) in self.locations.iter_mut_all() {
894                // Only visit accessed permissions
895                if let Some(p) = loc.perms.get(source_idx)
896                    && let Some(access_kind) = p.permission.protector_end_access()
897                    && p.accessed
898                {
899                    let access_cause = diagnostics::AccessCause::FnExit(access_kind);
900                    loc.perform_access(
901                        self.root,
902                        &mut self.nodes,
903                        Some(source_idx),
904                        loc_range,
905                        None,
906                        access_kind,
907                        access_cause,
908                        global,
909                        alloc_id,
910                        span,
911                        ChildrenVisitMode::SkipChildrenOfAccessed,
912                    )?;
913                }
914            }
915        }
916        interp_ok(())
917    }
918}
919
920/// Integration with the BorTag garbage collector
921impl Tree {
922    pub fn remove_unreachable_tags(&mut self, live_tags: &FxHashSet<BorTag>) {
923        self.remove_useless_children(self.root, live_tags);
924        // Right after the GC runs is a good moment to check if we can
925        // merge some adjacent ranges that were made equal by the removal of some
926        // tags (this does not necessarily mean that they have identical internal representations,
927        // see the `PartialEq` impl for `UniValMap`)
928        self.locations.merge_adjacent_thorough();
929    }
930
931    /// Checks if a node is useless and should be GC'ed.
932    /// A node is useless if it has no children and also the tag is no longer live.
933    fn is_useless(&self, idx: UniIndex, live: &FxHashSet<BorTag>) -> bool {
934        let node = self.nodes.get(idx).unwrap();
935        node.children.is_empty() && !live.contains(&node.tag)
936    }
937
938    /// Checks whether a node can be replaced by its only child.
939    /// If so, returns the index of said only child.
940    /// If not, returns none.
941    fn can_be_replaced_by_single_child(
942        &self,
943        idx: UniIndex,
944        live: &FxHashSet<BorTag>,
945    ) -> Option<UniIndex> {
946        let node = self.nodes.get(idx).unwrap();
947
948        let [child_idx] = node.children[..] else { return None };
949
950        // We never want to replace the root node, as it is also kept in `root_ptr_tags`.
951        if live.contains(&node.tag) || node.parent.is_none() {
952            return None;
953        }
954        // Since protected nodes are never GC'd (see `borrow_tracker::FrameExtra::visit_provenance`),
955        // we know that `node` is not protected because otherwise `live` would
956        // have contained `node.tag`.
957        let child = self.nodes.get(child_idx).unwrap();
958        // Check that for that one child, `can_be_replaced_by_child` holds for the permission
959        // on all locations.
960        for (_range, loc) in self.locations.iter_all() {
961            let parent_perm = loc
962                .perms
963                .get(idx)
964                .map(|x| x.permission)
965                .unwrap_or_else(|| node.default_initial_perm);
966            let child_perm = loc
967                .perms
968                .get(child_idx)
969                .map(|x| x.permission)
970                .unwrap_or_else(|| child.default_initial_perm);
971            if !parent_perm.can_be_replaced_by_child(child_perm) {
972                return None;
973            }
974        }
975
976        Some(child_idx)
977    }
978
979    /// Properly removes a node.
980    /// The node to be removed should not otherwise be usable. It also
981    /// should have no children, but this is not checked, so that nodes
982    /// whose children were rotated somewhere else can be deleted without
983    /// having to first modify them to clear that array.
984    fn remove_useless_node(&mut self, this: UniIndex) {
985        // Due to the API of UniMap we must make sure to call
986        // `UniValMap::remove` for the key of this node on *all* maps that used it
987        // (which are `self.nodes` and every range of `self.rperms`)
988        // before we can safely apply `UniKeyMap::remove` to truly remove
989        // this tag from the `tag_mapping`.
990        let node = self.nodes.remove(this).unwrap();
991        for (_range, loc) in self.locations.iter_mut_all() {
992            loc.perms.remove(this);
993            loc.wildcard_accesses.remove(this);
994        }
995        self.tag_mapping.remove(&node.tag);
996    }
997
998    /// Traverses the entire tree looking for useless tags.
999    /// Removes from the tree all useless child nodes of root.
1000    /// It will not delete the root itself.
1001    ///
1002    /// NOTE: This leaves in the middle of the tree tags that are unreachable but have
1003    /// reachable children. There is a potential for compacting the tree by reassigning
1004    /// children of dead tags to the nearest live parent, but it must be done with care
1005    /// not to remove UB.
1006    ///
1007    /// Example: Consider the tree `root - parent - child`, with `parent: Frozen` and
1008    /// `child: Reserved`. This tree can exist. If we blindly delete `parent` and reassign
1009    /// `child` to be a direct child of `root` then Writes to `child` are now permitted
1010    /// whereas they were not when `parent` was still there.
1011    fn remove_useless_children(&mut self, root: UniIndex, live: &FxHashSet<BorTag>) {
1012        // To avoid stack overflows, we roll our own stack.
1013        // Each element in the stack consists of the current tag, and the number of the
1014        // next child to be processed.
1015
1016        // The other functions are written using the `TreeVisitorStack`, but that does not work here
1017        // since we need to 1) do a post-traversal and 2) remove nodes from the tree.
1018        // Since we do a post-traversal (by deleting nodes only after handling all children),
1019        // we also need to be a bit smarter than "pop node, push all children."
1020        let mut stack = vec![(root, 0)];
1021        while let Some((tag, nth_child)) = stack.last_mut() {
1022            let node = self.nodes.get(*tag).unwrap();
1023            if *nth_child < node.children.len() {
1024                // Visit the child by pushing it to the stack.
1025                // Also increase `nth_child` so that when we come back to the `tag` node, we
1026                // look at the next child.
1027                let next_child = node.children[*nth_child];
1028                *nth_child += 1;
1029                stack.push((next_child, 0));
1030                continue;
1031            } else {
1032                // We have processed all children of `node`, so now it is time to process `node` itself.
1033                // First, get the current children of `node`. To appease the borrow checker,
1034                // we have to temporarily move the list out of the node, and then put the
1035                // list of remaining children back in.
1036                let mut children_of_node =
1037                    mem::take(&mut self.nodes.get_mut(*tag).unwrap().children);
1038                // Remove all useless children.
1039                children_of_node.retain_mut(|idx| {
1040                    if self.is_useless(*idx, live) {
1041                        // Delete `idx` node everywhere else.
1042                        self.remove_useless_node(*idx);
1043                        // And delete it from children_of_node.
1044                        false
1045                    } else {
1046                        if let Some(nextchild) = self.can_be_replaced_by_single_child(*idx, live) {
1047                            // `nextchild` is our grandchild, and will become our direct child.
1048                            // Delete the in-between node, `idx`.
1049                            self.remove_useless_node(*idx);
1050                            // Set the new child's parent.
1051                            self.nodes.get_mut(nextchild).unwrap().parent = Some(*tag);
1052                            // Save the new child in children_of_node.
1053                            *idx = nextchild;
1054                        }
1055                        // retain it
1056                        true
1057                    }
1058                });
1059                // Put back the now-filtered vector.
1060                self.nodes.get_mut(*tag).unwrap().children = children_of_node;
1061
1062                // We are done, the parent can continue.
1063                stack.pop();
1064                continue;
1065            }
1066        }
1067    }
1068}
1069
1070impl<'tcx> LocationTree {
1071    /// Performs an access on this location.
1072    /// * `access_source`: The index, if any, where the access came from.
1073    /// * `visit_children`: Whether to skip updating the children of `access_source`.
1074    fn perform_access(
1075        &mut self,
1076        root: UniIndex,
1077        nodes: &mut UniValMap<Node>,
1078        access_source: Option<UniIndex>,
1079        loc_range: Range<u64>,
1080        access_range: Option<AllocRange>,
1081        access_kind: AccessKind,
1082        access_cause: diagnostics::AccessCause,
1083        global: &GlobalState,
1084        alloc_id: AllocId, // diagnostics
1085        span: Span,        // diagnostics
1086        visit_children: ChildrenVisitMode,
1087    ) -> InterpResult<'tcx> {
1088        if let Some(idx) = access_source {
1089            self.perform_normal_access(
1090                idx,
1091                nodes,
1092                loc_range.clone(),
1093                access_range,
1094                access_kind,
1095                access_cause,
1096                global,
1097                alloc_id,
1098                span,
1099                visit_children,
1100            )
1101        } else {
1102            // `SkipChildrenOfAccessed` only gets set on protector release.
1103            // Since a wildcard reference are never protected this assert shouldn't fail.
1104            assert!(matches!(visit_children, ChildrenVisitMode::VisitChildrenOfAccessed));
1105            self.perform_wildcard_access(
1106                root,
1107                nodes,
1108                loc_range.clone(),
1109                access_range,
1110                access_kind,
1111                access_cause,
1112                global,
1113                alloc_id,
1114                span,
1115            )
1116        }
1117    }
1118
1119    /// Performs a normal access on the tree containing `access_source`.
1120    /// * `access_source`: The index of the tag being accessed.
1121    /// * `visit_children`: Whether to skip the children of `access_source`
1122    ///   during the access. Used for protector end access.
1123    fn perform_normal_access(
1124        &mut self,
1125        access_source: UniIndex,
1126        nodes: &mut UniValMap<Node>,
1127        loc_range: Range<u64>,
1128        access_range: Option<AllocRange>,
1129        access_kind: AccessKind,
1130        access_cause: diagnostics::AccessCause,
1131        global: &GlobalState,
1132        alloc_id: AllocId, // diagnostics
1133        span: Span,        // diagnostics
1134        visit_children: ChildrenVisitMode,
1135    ) -> InterpResult<'tcx> {
1136        // Performs the per-node work:
1137        // - insert the permission if it does not exist
1138        // - perform the access
1139        // - record the transition
1140        // to which some optimizations are added:
1141        // - skip the traversal of the children in some cases
1142        // - do not record noop transitions
1143        //
1144        // `perms_range` is only for diagnostics (it is the range of
1145        // the `RangeMap` on which we are currently working).
1146        let node_skipper = |args: &NodeAppArgs<'_>| -> ContinueTraversal {
1147            let node = args.nodes.get(args.idx).unwrap();
1148            let perm = args.loc.perms.get(args.idx);
1149
1150            let old_state = perm.copied().unwrap_or_else(|| node.default_location_state());
1151            old_state.skip_if_known_noop(access_kind, args.rel_pos)
1152        };
1153        let node_app = |args: NodeAppArgs<'_>| -> Result<(), _> {
1154            let node = args.nodes.get_mut(args.idx).unwrap();
1155            let mut perm = args.loc.perms.entry(args.idx);
1156
1157            let state = perm.or_insert(node.default_location_state());
1158
1159            let protected = global.borrow().protected_tags.contains_key(&node.tag);
1160            state
1161                .perform_transition(
1162                    args.idx,
1163                    args.nodes,
1164                    &mut args.loc.wildcard_accesses,
1165                    access_kind,
1166                    access_cause,
1167                    /* access_range */ access_range,
1168                    args.rel_pos,
1169                    span,
1170                    loc_range.clone(),
1171                    protected,
1172                )
1173                .map_err(|error_kind| {
1174                    TbError {
1175                        conflicting_info: &args.nodes.get(args.idx).unwrap().debug_info,
1176                        access_cause,
1177                        alloc_id,
1178                        error_offset: loc_range.start,
1179                        error_kind,
1180                        accessed_info: Some(&args.nodes.get(access_source).unwrap().debug_info),
1181                    }
1182                    .build()
1183                })
1184        };
1185        let visitor = TreeVisitor { nodes, loc: self };
1186        match visit_children {
1187            ChildrenVisitMode::VisitChildrenOfAccessed =>
1188                visitor.traverse_this_parents_children_other(access_source, node_skipper, node_app),
1189            ChildrenVisitMode::SkipChildrenOfAccessed =>
1190                visitor.traverse_nonchildren(access_source, node_skipper, node_app),
1191        }
1192        .into()
1193    }
1194    /// Performs a wildcard access on the tree with root `root`. Takes the `access_relatedness`
1195    /// for each node from the `WildcardState` datastructure.
1196    /// * `root`: Root of the tree being accessed.
1197    fn perform_wildcard_access(
1198        &mut self,
1199        root: UniIndex,
1200        nodes: &mut UniValMap<Node>,
1201        loc_range: Range<u64>,
1202        access_range: Option<AllocRange>,
1203        access_kind: AccessKind,
1204        access_cause: diagnostics::AccessCause,
1205        global: &GlobalState,
1206        alloc_id: AllocId, // diagnostics
1207        span: Span,        // diagnostics
1208    ) -> InterpResult<'tcx> {
1209        let f_continue =
1210            |idx: UniIndex, nodes: &UniValMap<Node>, loc: &LocationTree| -> ContinueTraversal {
1211                let node = nodes.get(idx).unwrap();
1212                let perm = loc.perms.get(idx);
1213                let wildcard_state = loc.wildcard_accesses.get(idx).cloned().unwrap_or_default();
1214
1215                let old_state = perm.copied().unwrap_or_else(|| node.default_location_state());
1216                // If we know where, relative to this node, the wildcard access occurs,
1217                // then check if we can skip the entire subtree.
1218                if let Some(relatedness) = wildcard_state.access_relatedness(access_kind)
1219                    && let Some(relatedness) = relatedness.to_relatedness()
1220                {
1221                    // We can use the usual SIFA machinery to skip nodes.
1222                    old_state.skip_if_known_noop(access_kind, relatedness)
1223                } else {
1224                    ContinueTraversal::Recurse
1225                }
1226            };
1227        // This does a traversal starting from the root through the tree updating
1228        // the permissions of each node.
1229        // The difference to `perform_access` is that we take the access
1230        // relatedness from the wildcard tracking state of the node instead of
1231        // from the visitor itself.
1232        TreeVisitor { loc: self, nodes }
1233            .traverse_this_parents_children_other(
1234                root,
1235                |args| f_continue(args.idx, args.nodes, args.loc),
1236                |args| {
1237                    let node = args.nodes.get_mut(args.idx).unwrap();
1238                    let mut entry = args.loc.perms.entry(args.idx);
1239                    let perm = entry.or_insert(node.default_location_state());
1240
1241                    let protected = global.borrow().protected_tags.contains_key(&node.tag);
1242
1243                    let Some(wildcard_relatedness) = args
1244                        .loc
1245                        .wildcard_accesses
1246                        .get(args.idx)
1247                        .and_then(|s| s.access_relatedness(access_kind))
1248                    else {
1249                        // There doesn't exist a valid exposed reference for this access to
1250                        // happen through.
1251                        // If this fails for one id, then it fails for all ids so this.
1252                        // Since we always check the root first, this means it should always
1253                        // fail on the root.
1254                        assert_eq!(root, args.idx);
1255                        return Err(no_valid_exposed_references_error(
1256                            alloc_id,
1257                            loc_range.start,
1258                            access_cause,
1259                        ));
1260                    };
1261
1262                    let Some(relatedness) = wildcard_relatedness.to_relatedness() else {
1263                        // If the access type is Either, then we do not apply any transition
1264                        // to this node, but we still update each of its children.
1265                        // This is an imprecision! In the future, maybe we can still do some sort
1266                        // of best-effort update here.
1267                        return Ok(());
1268                    };
1269                    // We know the exact relatedness, so we can actually do precise checks.
1270                    perm.perform_transition(
1271                        args.idx,
1272                        args.nodes,
1273                        &mut args.loc.wildcard_accesses,
1274                        access_kind,
1275                        access_cause,
1276                        access_range,
1277                        relatedness,
1278                        span,
1279                        loc_range.clone(),
1280                        protected,
1281                    )
1282                    .map_err(|trans| {
1283                        let node = args.nodes.get(args.idx).unwrap();
1284                        TbError {
1285                            conflicting_info: &node.debug_info,
1286                            access_cause,
1287                            alloc_id,
1288                            error_offset: loc_range.start,
1289                            error_kind: trans,
1290                            // We don't know from where the access came during a wildcard access.
1291                            accessed_info: None,
1292                        }
1293                        .build()
1294                    })
1295                },
1296            )
1297            .into()
1298    }
1299}
1300
1301impl Node {
1302    pub fn default_location_state(&self) -> LocationState {
1303        LocationState::new_non_accessed(
1304            self.default_initial_perm,
1305            self.default_initial_idempotent_foreign_access,
1306        )
1307    }
1308}
1309
1310impl VisitProvenance for Tree {
1311    fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
1312        // To ensure that the root never gets removed, we visit it
1313        // (the `root` node of `Tree` is not an `Option<_>`)
1314        visit(None, Some(self.nodes.get(self.root).unwrap().tag));
1315
1316        // We also need to keep around any exposed tags through which
1317        // an access could still happen.
1318        for (_id, node) in self.nodes.iter() {
1319            if node.is_exposed {
1320                visit(None, Some(node.tag))
1321            }
1322        }
1323    }
1324}
1325
1326/// Relative position of the access
1327#[derive(Clone, Copy, Debug, PartialEq, Eq)]
1328pub enum AccessRelatedness {
1329    /// The access happened either through the node itself or one of
1330    /// its transitive children.
1331    LocalAccess,
1332    /// The access happened through this nodes ancestor or through
1333    /// a sibling/cousin/uncle/etc.
1334    ForeignAccess,
1335}
1336
1337impl AccessRelatedness {
1338    /// Check that access is either Ancestor or Distant, i.e. not
1339    /// a transitive child (initial pointer included).
1340    pub fn is_foreign(self) -> bool {
1341        matches!(self, AccessRelatedness::ForeignAccess)
1342    }
1343}