alloc/collections/btree/
fix.rs

1use core::alloc::Allocator;
2
3use super::map::MIN_LEN;
4use super::node::ForceResult::*;
5use super::node::LeftOrRight::*;
6use super::node::{Handle, NodeRef, Root, marker};
7
8impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
9    /// Stocks up a possibly underfull node by merging with or stealing from a
10    /// sibling. If successful but at the cost of shrinking the parent node,
11    /// returns that shrunk parent node. Returns an `Err` if the node is
12    /// an empty root.
13    fn fix_node_through_parent<A: Allocator + Clone>(
14        self,
15        alloc: A,
16    ) -> Result<Option<NodeRef<marker::Mut<'a>, K, V, marker::Internal>>, Self> {
17        let len = self.len();
18        if len >= MIN_LEN {
19            Ok(None)
20        } else {
21            match self.choose_parent_kv() {
22                Ok(Left(mut left_parent_kv)) => {
23                    if left_parent_kv.can_merge() {
24                        let parent = left_parent_kv.merge_tracking_parent(alloc);
25                        Ok(Some(parent))
26                    } else {
27                        left_parent_kv.bulk_steal_left(MIN_LEN - len);
28                        Ok(None)
29                    }
30                }
31                Ok(Right(mut right_parent_kv)) => {
32                    if right_parent_kv.can_merge() {
33                        let parent = right_parent_kv.merge_tracking_parent(alloc);
34                        Ok(Some(parent))
35                    } else {
36                        right_parent_kv.bulk_steal_right(MIN_LEN - len);
37                        Ok(None)
38                    }
39                }
40                Err(root) => {
41                    if len > 0 {
42                        Ok(None)
43                    } else {
44                        Err(root)
45                    }
46                }
47            }
48        }
49    }
50}
51
52impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
53    /// Stocks up a possibly underfull node, and if that causes its parent node
54    /// to shrink, stocks up the parent, recursively.
55    /// Returns `true` if it fixed the tree, `false` if it couldn't because the
56    /// root node became empty.
57    ///
58    /// This method does not expect ancestors to already be underfull upon entry
59    /// and panics if it encounters an empty ancestor.
60    pub(super) fn fix_node_and_affected_ancestors<A: Allocator + Clone>(
61        mut self,
62        alloc: A,
63    ) -> bool {
64        loop {
65            match self.fix_node_through_parent(alloc.clone()) {
66                Ok(Some(parent)) => self = parent.forget_type(),
67                Ok(None) => return true,
68                Err(_) => return false,
69            }
70        }
71    }
72}
73
74impl<K, V> Root<K, V> {
75    /// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty.
76    pub(super) fn fix_top<A: Allocator + Clone>(&mut self, alloc: A) {
77        while self.height() > 0 && self.len() == 0 {
78            self.pop_internal_level(alloc.clone());
79        }
80    }
81
82    /// Stocks up or merge away any underfull nodes on the right border of the
83    /// tree. The other nodes, those that are not the root nor a rightmost edge,
84    /// must already have at least MIN_LEN elements.
85    pub(super) fn fix_right_border<A: Allocator + Clone>(&mut self, alloc: A) {
86        self.fix_top(alloc.clone());
87        if self.len() > 0 {
88            self.borrow_mut().last_kv().fix_right_border_of_right_edge(alloc.clone());
89            self.fix_top(alloc);
90        }
91    }
92
93    /// The symmetric clone of `fix_right_border`.
94    pub(super) fn fix_left_border<A: Allocator + Clone>(&mut self, alloc: A) {
95        self.fix_top(alloc.clone());
96        if self.len() > 0 {
97            self.borrow_mut().first_kv().fix_left_border_of_left_edge(alloc.clone());
98            self.fix_top(alloc);
99        }
100    }
101
102    /// Stocks up any underfull nodes on the right border of the tree.
103    /// The other nodes, those that are neither the root nor a rightmost edge,
104    /// must be prepared to have up to MIN_LEN elements stolen.
105    pub(super) fn fix_right_border_of_plentiful(&mut self) {
106        let mut cur_node = self.borrow_mut();
107        while let Internal(internal) = cur_node.force() {
108            // Check if rightmost child is underfull.
109            let mut last_kv = internal.last_kv().consider_for_balancing();
110            debug_assert!(last_kv.left_child_len() >= MIN_LEN * 2);
111            let right_child_len = last_kv.right_child_len();
112            if right_child_len < MIN_LEN {
113                // We need to steal.
114                last_kv.bulk_steal_left(MIN_LEN - right_child_len);
115            }
116
117            // Go further down.
118            cur_node = last_kv.into_right_child();
119        }
120    }
121}
122
123impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
124    fn fix_left_border_of_left_edge<A: Allocator + Clone>(mut self, alloc: A) {
125        while let Internal(internal_kv) = self.force() {
126            self = internal_kv.fix_left_child(alloc.clone()).first_kv();
127            debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
128        }
129    }
130
131    fn fix_right_border_of_right_edge<A: Allocator + Clone>(mut self, alloc: A) {
132        while let Internal(internal_kv) = self.force() {
133            self = internal_kv.fix_right_child(alloc.clone()).last_kv();
134            debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
135        }
136    }
137}
138
139impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
140    /// Stocks up the left child, assuming the right child isn't underfull, and
141    /// provisions an extra element to allow merging its children in turn
142    /// without becoming underfull.
143    /// Returns the left child.
144    fn fix_left_child<A: Allocator + Clone>(
145        self,
146        alloc: A,
147    ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
148        let mut internal_kv = self.consider_for_balancing();
149        let left_len = internal_kv.left_child_len();
150        debug_assert!(internal_kv.right_child_len() >= MIN_LEN);
151        if internal_kv.can_merge() {
152            internal_kv.merge_tracking_child(alloc)
153        } else {
154            // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
155            let count = (MIN_LEN + 1).saturating_sub(left_len);
156            if count > 0 {
157                internal_kv.bulk_steal_right(count);
158            }
159            internal_kv.into_left_child()
160        }
161    }
162
163    /// Stocks up the right child, assuming the left child isn't underfull, and
164    /// provisions an extra element to allow merging its children in turn
165    /// without becoming underfull.
166    /// Returns wherever the right child ended up.
167    fn fix_right_child<A: Allocator + Clone>(
168        self,
169        alloc: A,
170    ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
171        let mut internal_kv = self.consider_for_balancing();
172        let right_len = internal_kv.right_child_len();
173        debug_assert!(internal_kv.left_child_len() >= MIN_LEN);
174        if internal_kv.can_merge() {
175            internal_kv.merge_tracking_child(alloc)
176        } else {
177            // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
178            let count = (MIN_LEN + 1).saturating_sub(right_len);
179            if count > 0 {
180                internal_kv.bulk_steal_left(count);
181            }
182            internal_kv.into_right_child()
183        }
184    }
185}