1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
use super::map::MIN_LEN;
use super::node::{marker, ForceResult::*, Handle, LeftOrRight::*, NodeRef, Root};
use core::alloc::Allocator;

impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
    /// Stocks up a possibly underfull node by merging with or stealing from a
    /// sibling. If successful but at the cost of shrinking the parent node,
    /// returns that shrunk parent node. Returns an `Err` if the node is
    /// an empty root.
    fn fix_node_through_parent<A: Allocator + Clone>(
        self,
        alloc: A,
    ) -> Result<Option<NodeRef<marker::Mut<'a>, K, V, marker::Internal>>, Self> {
        let len = self.len();
        if len >= MIN_LEN {
            Ok(None)
        } else {
            match self.choose_parent_kv() {
                Ok(Left(mut left_parent_kv)) => {
                    if left_parent_kv.can_merge() {
                        let parent = left_parent_kv.merge_tracking_parent(alloc);
                        Ok(Some(parent))
                    } else {
                        left_parent_kv.bulk_steal_left(MIN_LEN - len);
                        Ok(None)
                    }
                }
                Ok(Right(mut right_parent_kv)) => {
                    if right_parent_kv.can_merge() {
                        let parent = right_parent_kv.merge_tracking_parent(alloc);
                        Ok(Some(parent))
                    } else {
                        right_parent_kv.bulk_steal_right(MIN_LEN - len);
                        Ok(None)
                    }
                }
                Err(root) => {
                    if len > 0 {
                        Ok(None)
                    } else {
                        Err(root)
                    }
                }
            }
        }
    }
}

impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
    /// Stocks up a possibly underfull node, and if that causes its parent node
    /// to shrink, stocks up the parent, recursively.
    /// Returns `true` if it fixed the tree, `false` if it couldn't because the
    /// root node became empty.
    ///
    /// This method does not expect ancestors to already be underfull upon entry
    /// and panics if it encounters an empty ancestor.
    pub fn fix_node_and_affected_ancestors<A: Allocator + Clone>(mut self, alloc: A) -> bool {
        loop {
            match self.fix_node_through_parent(alloc.clone()) {
                Ok(Some(parent)) => self = parent.forget_type(),
                Ok(None) => return true,
                Err(_) => return false,
            }
        }
    }
}

impl<K, V> Root<K, V> {
    /// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty.
    pub fn fix_top<A: Allocator + Clone>(&mut self, alloc: A) {
        while self.height() > 0 && self.len() == 0 {
            self.pop_internal_level(alloc.clone());
        }
    }

    /// Stocks up or merge away any underfull nodes on the right border of the
    /// tree. The other nodes, those that are not the root nor a rightmost edge,
    /// must already have at least MIN_LEN elements.
    pub fn fix_right_border<A: Allocator + Clone>(&mut self, alloc: A) {
        self.fix_top(alloc.clone());
        if self.len() > 0 {
            self.borrow_mut().last_kv().fix_right_border_of_right_edge(alloc.clone());
            self.fix_top(alloc);
        }
    }

    /// The symmetric clone of `fix_right_border`.
    pub fn fix_left_border<A: Allocator + Clone>(&mut self, alloc: A) {
        self.fix_top(alloc.clone());
        if self.len() > 0 {
            self.borrow_mut().first_kv().fix_left_border_of_left_edge(alloc.clone());
            self.fix_top(alloc);
        }
    }

    /// Stocks up any underfull nodes on the right border of the tree.
    /// The other nodes, those that are neither the root nor a rightmost edge,
    /// must be prepared to have up to MIN_LEN elements stolen.
    pub fn fix_right_border_of_plentiful(&mut self) {
        let mut cur_node = self.borrow_mut();
        while let Internal(internal) = cur_node.force() {
            // Check if right-most child is underfull.
            let mut last_kv = internal.last_kv().consider_for_balancing();
            debug_assert!(last_kv.left_child_len() >= MIN_LEN * 2);
            let right_child_len = last_kv.right_child_len();
            if right_child_len < MIN_LEN {
                // We need to steal.
                last_kv.bulk_steal_left(MIN_LEN - right_child_len);
            }

            // Go further down.
            cur_node = last_kv.into_right_child();
        }
    }
}

impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
    fn fix_left_border_of_left_edge<A: Allocator + Clone>(mut self, alloc: A) {
        while let Internal(internal_kv) = self.force() {
            self = internal_kv.fix_left_child(alloc.clone()).first_kv();
            debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
        }
    }

    fn fix_right_border_of_right_edge<A: Allocator + Clone>(mut self, alloc: A) {
        while let Internal(internal_kv) = self.force() {
            self = internal_kv.fix_right_child(alloc.clone()).last_kv();
            debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
        }
    }
}

impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
    /// Stocks up the left child, assuming the right child isn't underfull, and
    /// provisions an extra element to allow merging its children in turn
    /// without becoming underfull.
    /// Returns the left child.
    fn fix_left_child<A: Allocator + Clone>(
        self,
        alloc: A,
    ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
        let mut internal_kv = self.consider_for_balancing();
        let left_len = internal_kv.left_child_len();
        debug_assert!(internal_kv.right_child_len() >= MIN_LEN);
        if internal_kv.can_merge() {
            internal_kv.merge_tracking_child(alloc)
        } else {
            // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
            let count = (MIN_LEN + 1).saturating_sub(left_len);
            if count > 0 {
                internal_kv.bulk_steal_right(count);
            }
            internal_kv.into_left_child()
        }
    }

    /// Stocks up the right child, assuming the left child isn't underfull, and
    /// provisions an extra element to allow merging its children in turn
    /// without becoming underfull.
    /// Returns wherever the right child ended up.
    fn fix_right_child<A: Allocator + Clone>(
        self,
        alloc: A,
    ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
        let mut internal_kv = self.consider_for_balancing();
        let right_len = internal_kv.right_child_len();
        debug_assert!(internal_kv.left_child_len() >= MIN_LEN);
        if internal_kv.can_merge() {
            internal_kv.merge_tracking_child(alloc)
        } else {
            // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
            let count = (MIN_LEN + 1).saturating_sub(right_len);
            if count > 0 {
                internal_kv.bulk_steal_left(count);
            }
            internal_kv.into_right_child()
        }
    }
}