/rust/registry/src/github.com-1ecc6299db9ec823/regalloc-0.0.31/src/avl_tree.rs
Line | Count | Source (jump to first uncovered line) |
1 | | //! AVL trees with a private allocation pool. |
2 | | //! |
3 | | //! AVL tree internals are public, so that backtracking.rs can do custom |
4 | | //! traversals of the tree as it wishes. |
5 | | |
6 | | use smallvec::SmallVec; |
7 | | use std::cmp::Ordering; |
8 | | |
9 | | //============================================================================= |
10 | | // Data structures for AVLTree |
11 | | |
12 | | #[derive(Clone, PartialEq)] |
13 | | pub enum AVLTag { |
14 | | Free, // This pool entry is not in use |
15 | | None, // This pool entry is in use. Neither subtree is higher. |
16 | | Left, // This pool entry is in use. The left subtree is higher. |
17 | | Right, // This pool entry is in use. The right subtree is higher. |
18 | | } |
19 | | |
20 | | #[derive(Clone)] |
21 | | pub struct AVLNode<T> { |
22 | | pub tag: AVLTag, |
23 | | pub left: u32, |
24 | | pub right: u32, |
25 | | pub item: T, |
26 | | } |
27 | | impl<T> AVLNode<T> { |
28 | | fn new(tag: AVLTag, left: u32, right: u32, item: T) -> Self { |
29 | | Self { |
30 | | tag, |
31 | | left, |
32 | | right, |
33 | | item, |
34 | | } |
35 | | } |
36 | | } |
37 | | |
38 | | pub const AVL_NULL: u32 = 0xFFFF_FFFF; |
39 | | |
40 | | pub struct AVLTree<T> { |
41 | | // The storage area. There can be at most 2^32-2 entries, since AVL_NULL |
42 | | // (== 2^32-1) is used to mean "the null pointer". |
43 | | pub pool: Vec<AVLNode<T>>, |
44 | | // A default value for the stored item. We don't care what this is; |
45 | | // unfortunately Rust forces us to have one so that additions to the free |
46 | | // list will be fully initialised. |
47 | | default: T, |
48 | | // The freelist head. This is a list of available entries. Each item on |
49 | | // the freelist must have its .tag be AVLTag::Free, and will use its .left |
50 | | // field as the link to the next freelist item. A freelist link value of |
51 | | // AVL_NULL denotes the end of the list. If `freelist` itself is AVL_NULL |
52 | | // then the list is empty. |
53 | | freelist: u32, |
54 | | // Last but not least, the root node. |
55 | | pub root: u32, |
56 | | } |
57 | | |
58 | | //============================================================================= |
59 | | // Storage management functions for AVLTree |
60 | | |
61 | | impl<T: Clone> AVLTree<T> { |
62 | | // Create a new tree and its associated storage pool. This requires knowing |
63 | | // the default item value. |
64 | | pub fn new(default: T) -> Self { |
65 | | // Pre-allocate a few entries so as to save a few reallocs later, on the |
66 | | // assumption that most trees will get quite large. |
67 | | let pool = Vec::with_capacity(16); |
68 | | let freelist = AVL_NULL; |
69 | | let root = AVL_NULL; |
70 | | Self { |
71 | | pool, |
72 | | default, |
73 | | freelist, |
74 | | root, |
75 | | } |
76 | | } |
77 | | |
78 | | // Private function: free a tree node and put it back on the storage pool's |
79 | | // freelist. |
80 | 0 | fn free(&mut self, index: u32) { |
81 | 0 | assert!(index != AVL_NULL); |
82 | 0 | assert!(self.pool[index as usize].tag != AVLTag::Free); |
83 | 0 | self.pool[index as usize] = |
84 | 0 | AVLNode::new(AVLTag::Free, self.freelist, AVL_NULL, self.default.clone()); |
85 | 0 | self.freelist = index; |
86 | 0 | } |
87 | | |
88 | | // Private function: allocate a tree node from the storage pool, resizing |
89 | | // the pool if necessary. This will decline to expand the tree past about |
90 | | // 1.75 billion items. |
91 | 0 | fn alloc(&mut self) -> u32 { |
92 | | // Check to see if the freelist is empty, and if so allocate a bunch more |
93 | | // slots. |
94 | 0 | if self.freelist == AVL_NULL { |
95 | 0 | let start = self.pool.len(); |
96 | 0 | let fill_item = AVLNode::new(AVLTag::Free, AVL_NULL, AVL_NULL, self.default.clone()); |
97 | 0 | // What happens if this OOMs? At least guard against u32 overflow by |
98 | 0 | // doing this: |
99 | 0 | if start >= 0x7000_0000 { |
100 | | // 1.75 billion elements in the tree should be enough for any |
101 | | // reasonable use of this register allocator. |
102 | 0 | panic!("AVLTree<T>::alloc: too many items"); |
103 | 0 | } |
104 | 0 | self.pool.resize(2 * start + 2, fill_item); |
105 | 0 | let end_plus_1 = self.pool.len(); |
106 | | debug_assert!(end_plus_1 >= 2); |
107 | 0 | self.pool[end_plus_1 - 1].left = self.freelist; |
108 | 0 | let mut i = end_plus_1 - 2; |
109 | 0 | while i >= start { |
110 | | // The entry is already marked as free, but we must set the link. |
111 | 0 | self.pool[i].left = i as u32 + 1; |
112 | 0 | if i == 0 { |
113 | 0 | break; |
114 | 0 | } |
115 | 0 | i -= 1; |
116 | | } |
117 | 0 | self.freelist = start as u32; |
118 | | debug_assert!(self.freelist != AVL_NULL); |
119 | 0 | } |
120 | | // And now allocate. |
121 | 0 | let new = self.freelist; |
122 | 0 | assert!(self.pool[new as usize].tag == AVLTag::Free); |
123 | | // The caller is responsible for filling in the entry. But at least set |
124 | | // the tag to non-Free, for sanity. |
125 | 0 | self.pool[new as usize].tag = AVLTag::None; |
126 | 0 | self.freelist = self.pool[new as usize].left; |
127 | 0 | new |
128 | 0 | } Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_commitment_map::RangeFragAndRangeId>>::alloc Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_spillslot_allocator::RangeFragAndRefness>>::alloc |
129 | | } |
130 | | |
131 | | //============================================================================= |
132 | | // Tree-wrangling machinery for AVLTree (private) |
133 | | |
134 | | // For the public interface, see below. |
135 | | |
136 | | // The functions 'insert' and 'delete', and all supporting functions reachable |
137 | | // from them, are derived from a public domain implementation by Georg Kraml. |
138 | | // Unfortunately the relevant web site is long gone, and can only be found on |
139 | | // the Wayback Machine. |
140 | | // |
141 | | // https://web.archive.org/web/20010419134337/ |
142 | | // http://www.kraml.at/georg/avltree/index.html |
143 | | // |
144 | | // https://web.archive.org/web/20030926063347/ |
145 | | // http://www.kraml.at/georg/avltree/avlmonolithic.c |
146 | | // |
147 | | // https://web.archive.org/web/20030401124003/http://www.kraml.at/src/howto/ |
148 | | // |
149 | | // For relicensing clearance, see Mozilla bug 1620332, at |
150 | | // https://bugzilla.mozilla.org/show_bug.cgi?id=1620332. |
151 | | |
152 | | // Did a given insertion/deletion succeed, and what do we do next? |
153 | 0 | #[derive(Clone, Copy, PartialEq)] |
154 | | enum AVLRes { |
155 | | Error, |
156 | | OK, |
157 | | Balance, |
158 | | } |
159 | | |
160 | | impl<T: Clone + PartialOrd> AVLTree<T> { |
161 | | // Private function: rotleft: perform counterclockwise rotation |
162 | | // Takes the root of the tree to rotate, returns the new root |
163 | | fn rotleft(&mut self, old_root: u32) -> u32 { |
164 | | let new_root = self.pool[old_root as usize].right; |
165 | | self.pool[old_root as usize].right = self.pool[new_root as usize].left; |
166 | | self.pool[new_root as usize].left = old_root; |
167 | | new_root |
168 | | } |
169 | | |
170 | | // Private function: rotright: perform clockwise rotation |
171 | | // Takes the root of the tree to rotate, returns the new root |
172 | | fn rotright(&mut self, old_root: u32) -> u32 { |
173 | | let new_root = self.pool[old_root as usize].left; |
174 | | self.pool[old_root as usize].left = self.pool[new_root as usize].right; |
175 | | self.pool[new_root as usize].right = old_root; |
176 | | new_root |
177 | | } |
178 | | |
179 | | // Private function: leftgrown: helper function for `insert` |
180 | | // |
181 | | // Parameters: |
182 | | // |
183 | | // root Root of a tree. This node's left |
184 | | // subtree has just grown due to item insertion; its |
185 | | // "tag" flag needs adjustment, and the local tree |
186 | | // (the subtree of which this node is the root node) may |
187 | | // have become unbalanced. |
188 | | // |
189 | | // Return values: |
190 | | // |
191 | | // The new root of the subtree, plus either: |
192 | | // |
193 | | // OK The local tree could be rebalanced or was balanced |
194 | | // from the start. The parent activations of the avlinsert |
195 | | // activation that called this function may assume the |
196 | | // entire tree is valid. |
197 | | // or |
198 | | // BALANCE The local tree was balanced, but has grown in height. |
199 | | // Do not assume the entire tree is valid. |
200 | | // |
201 | | // This function has been split into two pieces: `leftgrown`, which is small and hot, and is |
202 | | // marked always-inline, and `leftgrown_left`, which handles a more complex and less |
203 | | // frequent case, and is marked never-inline. The intent is to have the common case always |
204 | | // inlined without having to deal with the extra register pressure from inlining the less |
205 | | // frequent code. The dual function `rightgrown` is split similarly. |
206 | | #[inline(never)] |
207 | 0 | fn leftgrown_left(&mut self, mut root: u32) -> (u32, AVLRes) { |
208 | 0 | if self.pool[self.pool[root as usize].left as usize].tag == AVLTag::Left { |
209 | 0 | self.pool[root as usize].tag = AVLTag::None; |
210 | 0 | let t = self.pool[root as usize].left; |
211 | 0 | self.pool[t as usize].tag = AVLTag::None; |
212 | 0 | root = self.rotright(root); |
213 | 0 | } else { |
214 | 0 | match self.pool[self.pool[self.pool[root as usize].left as usize].right as usize].tag { |
215 | 0 | AVLTag::Left => { |
216 | 0 | self.pool[root as usize].tag = AVLTag::Right; |
217 | 0 | let t = self.pool[root as usize].left; |
218 | 0 | self.pool[t as usize].tag = AVLTag::None; |
219 | 0 | } |
220 | 0 | AVLTag::Right => { |
221 | 0 | self.pool[root as usize].tag = AVLTag::None; |
222 | 0 | let t = self.pool[root as usize].left; |
223 | 0 | self.pool[t as usize].tag = AVLTag::Left; |
224 | 0 | } |
225 | 0 | AVLTag::None => { |
226 | 0 | self.pool[root as usize].tag = AVLTag::None; |
227 | 0 | let t = self.pool[root as usize].left; |
228 | 0 | self.pool[t as usize].tag = AVLTag::None; |
229 | 0 | } |
230 | 0 | AVLTag::Free => panic!("AVLTree::leftgrown_left: unallocated node in tree"), |
231 | | } |
232 | 0 | let t = self.pool[self.pool[root as usize].left as usize].right; |
233 | 0 | self.pool[t as usize].tag = AVLTag::None; |
234 | 0 | self.pool[root as usize].left = self.rotleft(self.pool[root as usize].left); |
235 | 0 | root = self.rotright(root); |
236 | | } |
237 | 0 | return (root, AVLRes::OK); |
238 | 0 | } Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_commitment_map::RangeFragAndRangeId>>::leftgrown_left Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_spillslot_allocator::RangeFragAndRefness>>::leftgrown_left |
239 | | |
240 | | #[inline(always)] |
241 | 0 | fn leftgrown(&mut self, root: u32) -> (u32, AVLRes) { |
242 | 0 | let root_node = &mut self.pool[root as usize]; |
243 | 0 | match root_node.tag { |
244 | 0 | AVLTag::Left => self.leftgrown_left(root), |
245 | | AVLTag::Right => { |
246 | 0 | root_node.tag = AVLTag::None; |
247 | 0 | return (root, AVLRes::OK); |
248 | | } |
249 | | AVLTag::None => { |
250 | 0 | root_node.tag = AVLTag::Left; |
251 | 0 | return (root, AVLRes::Balance); |
252 | | } |
253 | 0 | AVLTag::Free => panic!("AVLTree::leftgrown: unallocated node in tree"), |
254 | | } |
255 | 0 | } Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_commitment_map::RangeFragAndRangeId>>::leftgrown Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_spillslot_allocator::RangeFragAndRefness>>::leftgrown |
256 | | |
257 | | // Private function: rightgrown: helper function for `insert` |
258 | | // |
259 | | // See leftgrown for details. |
260 | | #[inline(never)] |
261 | 0 | fn rightgrown_right(&mut self, mut root: u32) -> (u32, AVLRes) { |
262 | 0 | if self.pool[self.pool[root as usize].right as usize].tag == AVLTag::Right { |
263 | 0 | self.pool[root as usize].tag = AVLTag::None; |
264 | 0 | let t = self.pool[root as usize].right as usize; |
265 | 0 | self.pool[t].tag = AVLTag::None; |
266 | 0 | root = self.rotleft(root); |
267 | 0 | } else { |
268 | 0 | match self.pool[self.pool[self.pool[root as usize].right as usize].left as usize].tag { |
269 | 0 | AVLTag::Right => { |
270 | 0 | self.pool[root as usize].tag = AVLTag::Left; |
271 | 0 | let t = self.pool[root as usize].right; |
272 | 0 | self.pool[t as usize].tag = AVLTag::None; |
273 | 0 | } |
274 | 0 | AVLTag::Left => { |
275 | 0 | self.pool[root as usize].tag = AVLTag::None; |
276 | 0 | let t = self.pool[root as usize].right; |
277 | 0 | self.pool[t as usize].tag = AVLTag::Right; |
278 | 0 | } |
279 | 0 | AVLTag::None => { |
280 | 0 | self.pool[root as usize].tag = AVLTag::None; |
281 | 0 | let t = self.pool[root as usize].right; |
282 | 0 | self.pool[t as usize].tag = AVLTag::None; |
283 | 0 | } |
284 | 0 | AVLTag::Free => panic!("AVLTree::rightgrown_right: unallocated node in tree"), |
285 | | } |
286 | 0 | let t = self.pool[self.pool[root as usize].right as usize].left; |
287 | 0 | self.pool[t as usize].tag = AVLTag::None; |
288 | 0 | self.pool[root as usize].right = self.rotright(self.pool[root as usize].right); |
289 | 0 | root = self.rotleft(root); |
290 | | } |
291 | 0 | return (root, AVLRes::OK); |
292 | 0 | } Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_commitment_map::RangeFragAndRangeId>>::rightgrown_right Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_spillslot_allocator::RangeFragAndRefness>>::rightgrown_right |
293 | | |
294 | | #[inline(always)] |
295 | 0 | fn rightgrown(&mut self, root: u32) -> (u32, AVLRes) { |
296 | 0 | match self.pool[root as usize].tag { |
297 | 0 | AVLTag::Left => { |
298 | 0 | self.pool[root as usize].tag = AVLTag::None; |
299 | 0 | return (root, AVLRes::OK); |
300 | | } |
301 | 0 | AVLTag::Right => self.rightgrown_right(root), |
302 | | AVLTag::None => { |
303 | 0 | self.pool[root as usize].tag = AVLTag::Right; |
304 | 0 | return (root, AVLRes::Balance); |
305 | | } |
306 | 0 | AVLTag::Free => panic!("AVLTree::rightgrown: unallocated node in tree"), |
307 | | } |
308 | 0 | } Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_commitment_map::RangeFragAndRangeId>>::rightgrown Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_spillslot_allocator::RangeFragAndRefness>>::rightgrown |
309 | | |
310 | | // Private function: insert_wrk: insert a node into the AVL tree |
311 | | // (worker function) |
312 | | // |
313 | | // Parameters: |
314 | | // |
315 | | // root Root of the tree in whch to insert `d`. |
316 | | // |
317 | | // item Item to be inserted. |
318 | | // |
319 | | // Returns AVL_NULL if the value is already in the tree. Otherwise returns the index of the |
320 | | // new root (which is obviously always non-AVL_NULL). This is infallible in the sense that, |
321 | | // if allocation of a new node fails, it won't return -- `self.alloc()` will panic. |
322 | | // |
323 | | // This function relies on the fact that any non-AVL_NULL value will have its top bit (bit |
324 | | // 31) clear, since that bit is used as a boolean in the `stack`. That property is |
325 | | // guaranteed us by `fn alloc`, which ensures that the max number of nodes in the tree is |
326 | | // 0x70000000. |
327 | 0 | fn insert_wrk<F>(&mut self, mut root: u32, item: T, mb_cmp: Option<&F>) -> u32 |
328 | 0 | where |
329 | 0 | F: Fn(T, T) -> Option<Ordering>, |
330 | 0 | { |
331 | | #[inline(always)] |
332 | | fn stack_entry_set_is_left(node: u32) -> u32 { |
333 | | node | 0x8000_0000 |
334 | | } |
335 | | #[inline(always)] |
336 | | fn stack_entry_get_is_left(ent: u32) -> u32 { |
337 | | ent & 0x8000_0000 |
338 | | } |
339 | | #[inline(always)] |
340 | | fn stack_entry_get_node(ent: u32) -> u32 { |
341 | | ent & 0x7FFF_FFFF |
342 | | } |
343 | | |
344 | | // The stack will hold a root-leaf path. Give that the max number of elements allowed |
345 | | // in the tree is 0x70000000, which is (7/8) * 2^31, and that the max depth is 1.44 * |
346 | | // log2(elems), a 64 entry stack should always be sufficient. Hence there should never |
347 | | // be any dynamic allocation here. In fact a 48 entry stack would also suffice, but |
348 | | // SmallVec doesn't provide that size. |
349 | 0 | let mut stack = SmallVec::<[u32; 64]>::new(); |
350 | 0 |
|
351 | 0 | // In the first phase, walk down the tree to find the place where the new node should be |
352 | 0 | // inserted. This loop is cloned so as to allow the test on `mb_cmp` to be done just |
353 | 0 | // once. |
354 | 0 | match mb_cmp { |
355 | 0 | None => { |
356 | 0 | while root != AVL_NULL { |
357 | 0 | let cmp_loc_right = &self.pool[root as usize]; |
358 | 0 | let cmp_arg_right: T = cmp_loc_right.item.clone(); |
359 | 0 | let cmp_arg_left: T = item.clone(); |
360 | | debug_assert!(stack_entry_get_is_left(root) == 0); |
361 | 0 | match cmp_arg_left.partial_cmp(&cmp_arg_right) { |
362 | 0 | None => panic!("AVLTree::insert_wrk: unordered elements(1)"), |
363 | 0 | Some(Ordering::Less) => { |
364 | 0 | stack.push(stack_entry_set_is_left(root)); |
365 | 0 | root = cmp_loc_right.left; |
366 | 0 | } |
367 | 0 | Some(Ordering::Greater) => { |
368 | 0 | stack.push(root); |
369 | 0 | root = cmp_loc_right.right; |
370 | 0 | } |
371 | | Some(Ordering::Equal) => { |
372 | | // Item is already in the tree. |
373 | 0 | return AVL_NULL; |
374 | | } |
375 | | } |
376 | | } |
377 | | } |
378 | 0 | Some(cmp) => { |
379 | 0 | while root != AVL_NULL { |
380 | 0 | let cmp_loc_right = &self.pool[root as usize]; |
381 | 0 | let cmp_arg_right: T = cmp_loc_right.item.clone(); |
382 | 0 | let cmp_arg_left: T = item.clone(); |
383 | | debug_assert!(stack_entry_get_is_left(root) == 0); |
384 | 0 | match cmp(cmp_arg_left, cmp_arg_right) { |
385 | 0 | None => panic!("AVLTree::insert_wrk: unordered elements(2)"), |
386 | 0 | Some(Ordering::Less) => { |
387 | 0 | stack.push(stack_entry_set_is_left(root)); |
388 | 0 | root = cmp_loc_right.left; |
389 | 0 | } |
390 | 0 | Some(Ordering::Greater) => { |
391 | 0 | stack.push(root); |
392 | 0 | root = cmp_loc_right.right; |
393 | 0 | } |
394 | | Some(Ordering::Equal) => { |
395 | | // Item is already in the tree. |
396 | 0 | return AVL_NULL; |
397 | | } |
398 | | } |
399 | | } |
400 | | } |
401 | | } |
402 | | |
403 | | // Now allocate the new node. |
404 | | debug_assert!(root == AVL_NULL); |
405 | 0 | let new_node = self.alloc(); |
406 | 0 | self.pool[new_node as usize] = AVLNode::new(AVLTag::None, AVL_NULL, AVL_NULL, item.clone()); |
407 | 0 |
|
408 | 0 | // And unwind the stack, back to the root, rebalancing as we go. Once get to a place |
409 | 0 | // where the new subtree doesn't need to be rebalanced, we can stop this upward scan, |
410 | 0 | // because no nodes above it will need to be rebalanced either. |
411 | 0 | let mut curr_node = new_node; |
412 | 0 | let mut curr_node_action = AVLRes::Balance; |
413 | | |
414 | 0 | while let Some(parent_node_tagged) = stack.pop() { |
415 | 0 | let parent_node = stack_entry_get_node(parent_node_tagged); |
416 | 0 | if stack_entry_get_is_left(parent_node_tagged) != 0 { |
417 | 0 | self.pool[parent_node as usize].left = curr_node; |
418 | 0 | if curr_node_action == AVLRes::Balance { |
419 | 0 | let pair = self.leftgrown(parent_node); |
420 | 0 | curr_node = pair.0; |
421 | 0 | curr_node_action = pair.1; |
422 | 0 | } else { |
423 | 0 | curr_node = parent_node; |
424 | 0 | break; |
425 | | } |
426 | | } else { |
427 | 0 | self.pool[parent_node as usize].right = curr_node; |
428 | 0 | if curr_node_action == AVLRes::Balance { |
429 | 0 | let pair = self.rightgrown(parent_node); |
430 | 0 | curr_node = pair.0; |
431 | 0 | curr_node_action = pair.1; |
432 | 0 | } else { |
433 | 0 | curr_node = parent_node; |
434 | 0 | break; |
435 | | } |
436 | | } |
437 | | } |
438 | | |
439 | 0 | if !stack.is_empty() { |
440 | 0 | curr_node = stack_entry_get_node(stack[0]); |
441 | 0 | } |
442 | | |
443 | | debug_assert!(curr_node != AVL_NULL); |
444 | 0 | return curr_node; |
445 | 0 | } Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_spillslot_allocator::RangeFragAndRefness>>::insert_wrk::<regalloc::bt_spillslot_allocator::ssal_add_if_possible::{closure#0}>Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_spillslot_allocator::RangeFragAndRefness>>::insert_wrk::<<regalloc::bt_spillslot_allocator::SpillSlotAllocator>::alloc_reftyped_spillslot_for_frag::{closure#0}>Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_commitment_map::RangeFragAndRangeId>>::insert_wrk::<<regalloc::bt_commitment_map::CommitmentMap>::add::{closure#0}>Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_commitment_map::RangeFragAndRangeId>>::insert_wrk::<<regalloc::bt_commitment_map::CommitmentMap>::add_indirect::{closure#0}> |
446 | | |
447 | | // Private function: leftshrunk: helper function for delete and |
448 | | // findlowest |
449 | | // |
450 | | // Parameters: |
451 | | // |
452 | | // n Address of a pointer to a node. The node's left |
453 | | // subtree has just shrunk due to item removal; its |
454 | | // "skew" flag needs adjustment, and the local tree |
455 | | // (the subtree of which this node is the root node) may |
456 | | // have become unbalanced. |
457 | | // |
458 | | // Return values: |
459 | | // |
460 | | // OK The parent activation of the delete activation |
461 | | // that called this function may assume the entire |
462 | | // tree is valid. |
463 | | // |
464 | | // BALANCE Do not assume the entire tree is valid. |
465 | 0 | fn leftshrunk(&mut self, mut n: u32) -> (u32, AVLRes) { |
466 | 0 | match self.pool[n as usize].tag { |
467 | 0 | AVLTag::Left => { |
468 | 0 | self.pool[n as usize].tag = AVLTag::None; |
469 | 0 | return (n, AVLRes::Balance); |
470 | | } |
471 | | AVLTag::Right => { |
472 | 0 | if self.pool[self.pool[n as usize].right as usize].tag == AVLTag::Right { |
473 | 0 | self.pool[n as usize].tag = AVLTag::None; |
474 | 0 | let t = self.pool[n as usize].right; |
475 | 0 | self.pool[t as usize].tag = AVLTag::None; |
476 | 0 | n = self.rotleft(n); |
477 | 0 | return (n, AVLRes::Balance); |
478 | 0 | } else if self.pool[self.pool[n as usize].right as usize].tag == AVLTag::None { |
479 | 0 | self.pool[n as usize].tag = AVLTag::Right; |
480 | 0 | let t = self.pool[n as usize].right; |
481 | 0 | self.pool[t as usize].tag = AVLTag::Left; |
482 | 0 | n = self.rotleft(n); |
483 | 0 | return (n, AVLRes::OK); |
484 | | } else { |
485 | 0 | match self.pool[self.pool[self.pool[n as usize].right as usize].left as usize] |
486 | 0 | .tag |
487 | 0 | { |
488 | 0 | AVLTag::Left => { |
489 | 0 | self.pool[n as usize].tag = AVLTag::None; |
490 | 0 | let t = self.pool[n as usize].right; |
491 | 0 | self.pool[t as usize].tag = AVLTag::Right; |
492 | 0 | } |
493 | 0 | AVLTag::Right => { |
494 | 0 | self.pool[n as usize].tag = AVLTag::Left; |
495 | 0 | let t = self.pool[n as usize].right; |
496 | 0 | self.pool[t as usize].tag = AVLTag::None; |
497 | 0 | } |
498 | 0 | AVLTag::None => { |
499 | 0 | self.pool[n as usize].tag = AVLTag::None; |
500 | 0 | let t = self.pool[n as usize].right; |
501 | 0 | self.pool[t as usize].tag = AVLTag::None; |
502 | 0 | } |
503 | | AVLTag::Free => { |
504 | 0 | panic!("AVLTree::leftshrunk(1): unallocated node in tree"); |
505 | | } |
506 | | } |
507 | 0 | { |
508 | 0 | let t = self.pool[self.pool[n as usize].right as usize].left; |
509 | 0 | self.pool[t as usize].tag = AVLTag::None; |
510 | 0 | } |
511 | 0 | { |
512 | 0 | let t = self.rotright(self.pool[n as usize].right); |
513 | 0 | self.pool[n as usize].right = t; |
514 | 0 | } |
515 | 0 | n = self.rotleft(n); |
516 | 0 | return (n, AVLRes::Balance); |
517 | | } |
518 | | } |
519 | | AVLTag::None => { |
520 | 0 | self.pool[n as usize].tag = AVLTag::Right; |
521 | 0 | return (n, AVLRes::OK); |
522 | | } |
523 | | AVLTag::Free => { |
524 | 0 | panic!("AVLTree::leftshrunk(2): unallocated node in tree"); |
525 | | } |
526 | | } |
527 | 0 | } |
528 | | |
529 | | // Private function: rightshrunk: helper function for delete and |
530 | | // findhighest |
531 | | // |
532 | | // See leftshrunk for details. |
533 | 0 | fn rightshrunk(&mut self, mut n: u32) -> (u32, AVLRes) { |
534 | 0 | match self.pool[n as usize].tag { |
535 | 0 | AVLTag::Right => { |
536 | 0 | self.pool[n as usize].tag = AVLTag::None; |
537 | 0 | return (n, AVLRes::Balance); |
538 | | } |
539 | | AVLTag::Left => { |
540 | 0 | if self.pool[self.pool[n as usize].left as usize].tag == AVLTag::Left { |
541 | 0 | self.pool[n as usize].tag = AVLTag::None; |
542 | 0 | let t = self.pool[n as usize].left; |
543 | 0 | self.pool[t as usize].tag = AVLTag::None; |
544 | 0 | n = self.rotright(n); |
545 | 0 | return (n, AVLRes::Balance); |
546 | 0 | } else if self.pool[self.pool[n as usize].left as usize].tag == AVLTag::None { |
547 | 0 | self.pool[n as usize].tag = AVLTag::Left; |
548 | 0 | let t = self.pool[n as usize].left; |
549 | 0 | self.pool[t as usize].tag = AVLTag::Right; |
550 | 0 | n = self.rotright(n); |
551 | 0 | return (n, AVLRes::OK); |
552 | | } else { |
553 | 0 | match self.pool[self.pool[self.pool[n as usize].left as usize].right as usize] |
554 | 0 | .tag |
555 | 0 | { |
556 | 0 | AVLTag::Left => { |
557 | 0 | self.pool[n as usize].tag = AVLTag::Right; |
558 | 0 | let t = self.pool[n as usize].left; |
559 | 0 | self.pool[t as usize].tag = AVLTag::None; |
560 | 0 | } |
561 | 0 | AVLTag::Right => { |
562 | 0 | self.pool[n as usize].tag = AVLTag::None; |
563 | 0 | let t = self.pool[n as usize].left; |
564 | 0 | self.pool[t as usize].tag = AVLTag::Left; |
565 | 0 | } |
566 | 0 | AVLTag::None => { |
567 | 0 | self.pool[n as usize].tag = AVLTag::None; |
568 | 0 | let t = self.pool[n as usize].left; |
569 | 0 | self.pool[t as usize].tag = AVLTag::None; |
570 | 0 | } |
571 | | AVLTag::Free => { |
572 | 0 | panic!("AVLTree::rightshrunk(1): unallocated node in tree"); |
573 | | } |
574 | | } |
575 | 0 | { |
576 | 0 | let t = self.pool[self.pool[n as usize].left as usize].right; |
577 | 0 | self.pool[t as usize].tag = AVLTag::None; |
578 | 0 | } |
579 | 0 | { |
580 | 0 | let t = self.rotleft(self.pool[n as usize].left); |
581 | 0 | self.pool[n as usize].left = t; |
582 | 0 | } |
583 | 0 | n = self.rotright(n); |
584 | 0 | return (n, AVLRes::Balance); |
585 | | } |
586 | | } |
587 | | AVLTag::None => { |
588 | 0 | self.pool[n as usize].tag = AVLTag::Left; |
589 | 0 | return (n, AVLRes::OK); |
590 | | } |
591 | | AVLTag::Free => { |
592 | 0 | panic!("AVLTree::rightshrunk(2): unallocated node in tree"); |
593 | | } |
594 | | } |
595 | 0 | } |
596 | | |
597 | | // Private function: findhighest: replace a node with a subtree's |
598 | | // highest-ranking item. |
599 | | // |
600 | | // Parameters: |
601 | | // |
602 | | // target Pointer to node to be replaced. |
603 | | // |
604 | | // n Address of pointer to subtree. |
605 | | // |
606 | | // res Pointer to variable used to tell the caller whether |
607 | | // further checks are necessary; analog to the return |
608 | | // values of leftgrown and leftshrunk (see there). |
609 | | // |
610 | | // Return values: |
611 | | // |
612 | | // True A node was found; the target node has been replaced. |
613 | | // |
614 | | // False The target node could not be replaced because |
615 | | // the subtree provided was empty. |
616 | | // |
617 | 0 | fn findhighest(&mut self, target: u32, mut n: u32) -> Option<(u32, AVLRes)> { |
618 | 0 | if n == AVL_NULL { |
619 | 0 | return None; |
620 | 0 | } |
621 | 0 | let mut res = AVLRes::Balance; |
622 | 0 | if self.pool[n as usize].right != AVL_NULL { |
623 | 0 | let rec = self.findhighest(target, self.pool[n as usize].right); |
624 | 0 | if let Some((new_n_right, new_res)) = rec { |
625 | 0 | self.pool[n as usize].right = new_n_right; |
626 | 0 | res = new_res; |
627 | 0 | if res == AVLRes::Balance { |
628 | 0 | let (new_n, new_res) = self.rightshrunk(n); |
629 | 0 | n = new_n; |
630 | 0 | res = new_res; |
631 | 0 | } |
632 | 0 | return Some((n, res)); |
633 | | } else { |
634 | 0 | return None; |
635 | | } |
636 | 0 | } |
637 | 0 | self.pool[target as usize].item = self.pool[n as usize].item.clone(); |
638 | 0 | let tmp = n; |
639 | 0 | n = self.pool[n as usize].left; |
640 | 0 | self.free(tmp); |
641 | 0 | Some((n, res)) |
642 | 0 | } |
643 | | |
644 | | // Private function: findlowest: replace node with a subtree's |
645 | | // lowest-ranking item. |
646 | | // |
647 | | // See findhighest for the details. |
648 | 0 | fn findlowest(&mut self, target: u32, mut n: u32) -> Option<(u32, AVLRes)> { |
649 | 0 | if n == AVL_NULL { |
650 | 0 | return None; |
651 | 0 | } |
652 | 0 | let mut res = AVLRes::Balance; |
653 | 0 | if self.pool[n as usize].left != AVL_NULL { |
654 | 0 | let rec = self.findlowest(target, self.pool[n as usize].left); |
655 | 0 | if let Some((new_n_left, new_res)) = rec { |
656 | 0 | self.pool[n as usize].left = new_n_left; |
657 | 0 | res = new_res; |
658 | 0 | if res == AVLRes::Balance { |
659 | 0 | let (new_n, new_res) = self.leftshrunk(n); |
660 | 0 | n = new_n; |
661 | 0 | res = new_res; |
662 | 0 | } |
663 | 0 | return Some((n, res)); |
664 | | } else { |
665 | 0 | return None; |
666 | | } |
667 | 0 | } |
668 | 0 | self.pool[target as usize].item = self.pool[n as usize].item.clone(); |
669 | 0 | let tmp = n; |
670 | 0 | n = self.pool[n as usize].right; |
671 | 0 | self.free(tmp); |
672 | 0 | Some((n, res)) |
673 | 0 | } |
674 | | |
675 | | // Private function: delete_wrk: delete an item from the tree. |
676 | | // (worker function) |
677 | | // |
678 | | // Parameters: |
679 | | // |
680 | | // n Address of a pointer to a node. |
681 | | // |
682 | | // key AVLKEY of item to be removed. |
683 | | // |
684 | | // Return values: |
685 | | // |
686 | | // nonzero The item has been removed. The exact value of |
687 | | // nonzero yields if of no concern to user code; when |
688 | | // delete recursively calls itself, the number |
689 | | // returned tells the parent activation if the AVL tree |
690 | | // may have become unbalanced; specifically: |
691 | | // |
692 | | // OK None of the subtrees of the node that n points to |
693 | | // has shrunk, the AVL tree is valid. |
694 | | // |
695 | | // BALANCE One of the subtrees of the node that n points to |
696 | | // has shrunk, the node's "skew" flag needs adjustment, |
697 | | // and the AVL tree may have become unbalanced. |
698 | | // |
699 | | // zero The tree does not contain an item yielding the |
700 | | // AVLKEY value provided by the caller. |
701 | 0 | fn delete_wrk<F>(&mut self, mut root: u32, item: T, mb_cmp: Option<&F>) -> (u32, AVLRes) |
702 | 0 | where |
703 | 0 | F: Fn(T, T) -> Option<Ordering>, |
704 | 0 | { |
705 | 0 | let mut tmp = AVLRes::Balance; |
706 | 0 | if root == AVL_NULL { |
707 | 0 | return (root, AVLRes::Error); |
708 | 0 | } |
709 | 0 |
|
710 | 0 | let cmp_arg_left: T = item.clone(); |
711 | 0 | let cmp_arg_right: T = self.pool[root as usize].item.clone(); |
712 | 0 | let cmp_res = match mb_cmp { |
713 | 0 | None => cmp_arg_left.partial_cmp(&cmp_arg_right), |
714 | 0 | Some(cmp) => cmp(cmp_arg_left, cmp_arg_right), |
715 | | }; |
716 | 0 | match cmp_res { |
717 | 0 | None => panic!("AVLTree::delete_wrk: unordered elements"), |
718 | 0 | Some(Ordering::Less) => { |
719 | 0 | let root_left = self.pool[root as usize].left; |
720 | 0 | let (new_root_left, new_tmp) = self.delete_wrk(root_left, item, mb_cmp); |
721 | 0 | self.pool[root as usize].left = new_root_left; |
722 | 0 | tmp = new_tmp; |
723 | 0 | if tmp == AVLRes::Balance { |
724 | 0 | let (new_root, new_res) = self.leftshrunk(root); |
725 | 0 | root = new_root; |
726 | 0 | tmp = new_res; |
727 | 0 | } |
728 | 0 | return (root, tmp); |
729 | | } |
730 | | Some(Ordering::Greater) => { |
731 | 0 | let root_right = self.pool[root as usize].right; |
732 | 0 | let (new_root_right, new_tmp) = self.delete_wrk(root_right, item, mb_cmp); |
733 | 0 | self.pool[root as usize].right = new_root_right; |
734 | 0 | tmp = new_tmp; |
735 | 0 | if tmp == AVLRes::Balance { |
736 | 0 | let (new_root, new_res) = self.rightshrunk(root); |
737 | 0 | root = new_root; |
738 | 0 | tmp = new_res; |
739 | 0 | } |
740 | 0 | return (root, tmp); |
741 | | } |
742 | | Some(Ordering::Equal) => { |
743 | 0 | if self.pool[root as usize].left != AVL_NULL { |
744 | 0 | let root_left = self.pool[root as usize].left; |
745 | 0 | if let Some((new_root_left, new_tmp)) = self.findhighest(root, root_left) { |
746 | 0 | self.pool[root as usize].left = new_root_left; |
747 | 0 | tmp = new_tmp; |
748 | 0 | if new_tmp == AVLRes::Balance { |
749 | 0 | let (new_root, new_res) = self.leftshrunk(root); |
750 | 0 | root = new_root; |
751 | 0 | tmp = new_res; |
752 | 0 | } |
753 | 0 | } |
754 | 0 | return (root, tmp); |
755 | 0 | } |
756 | 0 | if self.pool[root as usize].right != AVL_NULL { |
757 | 0 | let root_right = self.pool[root as usize].right; |
758 | 0 | if let Some((new_root_right, new_tmp)) = self.findlowest(root, root_right) { |
759 | 0 | self.pool[root as usize].right = new_root_right; |
760 | 0 | tmp = new_tmp; |
761 | 0 | if new_tmp == AVLRes::Balance { |
762 | 0 | let (new_root, new_res) = self.rightshrunk(root); |
763 | 0 | root = new_root; |
764 | 0 | tmp = new_res; |
765 | 0 | } |
766 | 0 | } |
767 | 0 | return (root, tmp); |
768 | 0 | } |
769 | 0 | self.free(root); |
770 | 0 | root = AVL_NULL; |
771 | 0 | return (root, AVLRes::Balance); |
772 | | } |
773 | | } |
774 | 0 | } |
775 | | |
776 | | // Private fn: count the number of items in the tree. Warning: costs O(N) ! |
777 | | #[cfg(test)] |
778 | | fn count_wrk(&self, n: u32) -> usize { |
779 | | if n == AVL_NULL { |
780 | | return 0; |
781 | | } |
782 | | 1 + self.count_wrk(self.pool[n as usize].left) + self.count_wrk(self.pool[n as usize].right) |
783 | | } |
784 | | |
785 | | // Private fn: find the max depth of the tree. Warning: costs O(N) ! |
786 | | #[cfg(test)] |
787 | | fn depth_wrk(&self, n: u32) -> usize { |
788 | | if n == AVL_NULL { |
789 | | return 0; |
790 | | } |
791 | | let d_left = self.depth_wrk(self.pool[n as usize].left); |
792 | | let d_right = self.depth_wrk(self.pool[n as usize].right); |
793 | | 1 + if d_left > d_right { d_left } else { d_right } |
794 | | } |
795 | | } |
796 | | |
797 | | // Machinery for iterating over the tree, enumerating nodes in ascending order. |
798 | | // Unfortunately AVLTreeIter has to be public. |
799 | | pub struct AVLTreeIter<'t, 's, T> { |
800 | | tree: &'t AVLTree<T>, |
801 | | stack: &'s mut Vec<u32>, |
802 | | } |
803 | | |
804 | | impl<'t, 's, T> AVLTreeIter<'t, 's, T> { |
805 | | #[allow(dead_code)] |
806 | 0 | fn new(tree: &'t AVLTree<T>, stack: &'s mut Vec<u32>) -> Self { |
807 | 0 | let mut iter = AVLTreeIter { tree, stack }; |
808 | 0 | if tree.root != AVL_NULL { |
809 | 0 | iter.stack.push(tree.root); |
810 | 0 | iter.visit_left_children(tree.root); |
811 | 0 | } |
812 | 0 | iter |
813 | 0 | } |
814 | | |
815 | 0 | fn visit_left_children(&mut self, root: u32) { |
816 | 0 | let mut cur = root; |
817 | | loop { |
818 | 0 | let left = self.tree.pool[cur as usize].left; |
819 | 0 | if left == AVL_NULL { |
820 | 0 | break; |
821 | 0 | } |
822 | 0 | self.stack.push(left); |
823 | 0 | cur = left; |
824 | | } |
825 | 0 | } |
826 | | } |
827 | | |
828 | | impl<'s, 't, T: Copy> Iterator for AVLTreeIter<'s, 't, T> { |
829 | | type Item = T; |
830 | 0 | fn next(&mut self) -> Option<Self::Item> { |
831 | 0 | let ret = match self.stack.pop() { |
832 | 0 | Some(ret) => ret, |
833 | 0 | None => return None, |
834 | | }; |
835 | 0 | let right = self.tree.pool[ret as usize].right; |
836 | 0 | if right != AVL_NULL { |
837 | 0 | self.stack.push(right); |
838 | 0 | self.visit_left_children(right); |
839 | 0 | } |
840 | 0 | Some(self.tree.pool[ret as usize].item) |
841 | 0 | } |
842 | | } |
843 | | |
844 | | //============================================================================= |
845 | | // Public interface for AVLTree |
846 | | |
847 | | impl<T: Clone + PartialOrd> AVLTree<T> { |
848 | | // The core functions (insert, delete, contains) take a comparator argument |
849 | | // |
850 | | // mb_cmp: Option<&F> |
851 | | // where |
852 | | // F: Fn(T, T) -> Option<Ordering> |
853 | | // |
854 | | // which allows control over how node comparison is done. If this is None, |
855 | | // then comparison is done directly using PartialOrd for the T values. |
856 | | // |
857 | | // If this is Some(cmp), then comparison is done by passing the two T values |
858 | | // to `cmp`. In this case, the routines will complain (panic) if `cmp` |
859 | | // indicates that its arguments are unordered. |
860 | | |
861 | | // Insert a value in the tree. Returns true if an insert happened, false if |
862 | | // the item was already present. |
863 | 0 | pub fn insert<F>(&mut self, item: T, mb_cmp: Option<&F>) -> bool |
864 | 0 | where |
865 | 0 | F: Fn(T, T) -> Option<Ordering>, |
866 | 0 | { |
867 | 0 | let new_root = self.insert_wrk(self.root, item, mb_cmp); |
868 | 0 | if new_root == AVL_NULL { |
869 | 0 | return false; // already in tree |
870 | | } else { |
871 | 0 | self.root = new_root; |
872 | 0 | return true; |
873 | | } |
874 | 0 | } Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_commitment_map::RangeFragAndRangeId>>::insert::<<regalloc::bt_commitment_map::CommitmentMap>::add_indirect::{closure#0}>Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_spillslot_allocator::RangeFragAndRefness>>::insert::<regalloc::bt_spillslot_allocator::ssal_add_if_possible::{closure#0}>Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_spillslot_allocator::RangeFragAndRefness>>::insert::<<regalloc::bt_spillslot_allocator::SpillSlotAllocator>::alloc_reftyped_spillslot_for_frag::{closure#0}>Unexecuted instantiation: <regalloc::avl_tree::AVLTree<regalloc::bt_commitment_map::RangeFragAndRangeId>>::insert::<<regalloc::bt_commitment_map::CommitmentMap>::add::{closure#0}> |
875 | | |
876 | | // Remove an item from the tree. Returns a bool which indicates whether the |
877 | | // value was in there in the first place. (meaning, true == a removal |
878 | | // actually occurred). |
879 | 0 | pub fn delete<F>(&mut self, item: T, mb_cmp: Option<&F>) -> bool |
880 | 0 | where |
881 | 0 | F: Fn(T, T) -> Option<Ordering>, |
882 | 0 | { |
883 | 0 | let (new_root, res) = self.delete_wrk(self.root, item, mb_cmp); |
884 | 0 | if res == AVLRes::Error { |
885 | 0 | return false; |
886 | | } else { |
887 | 0 | self.root = new_root; |
888 | 0 | return true; |
889 | | } |
890 | 0 | } |
891 | | |
892 | | // Find `item` in the tree, and replace it with `replacement`. `item` and `replacement` |
893 | | // must compare equal per the comparison function `cmp`. Returns a bool indicating whether |
894 | | // `item` was found (and hence, replaced). There's no comparison fast-path here |
895 | | // (meaning, `cmp` is `&F` and not `Option<&F>`) only because so far there is no use case |
896 | | // for it. |
897 | 0 | pub fn find_and_replace<F>(&mut self, item: T, replacement: T, cmp: &F) -> bool |
898 | 0 | where |
899 | 0 | F: Fn(T, T) -> Option<Ordering>, |
900 | 0 | { |
901 | 0 | let mut n = self.root; |
902 | 0 | loop { |
903 | 0 | if n == AVL_NULL { |
904 | 0 | return false; |
905 | 0 | } |
906 | 0 | let cmp_arg_left: T = item.clone(); |
907 | 0 | let cmp_arg_right: T = self.pool[n as usize].item.clone(); |
908 | 0 | match cmp(cmp_arg_left, cmp_arg_right) { |
909 | 0 | Some(Ordering::Less) => { |
910 | 0 | n = self.pool[n as usize].left; |
911 | 0 | } |
912 | 0 | Some(Ordering::Greater) => { |
913 | 0 | n = self.pool[n as usize].right; |
914 | 0 | } |
915 | | Some(Ordering::Equal) => { |
916 | | // Do what we can to ensure the caller can't mess up the total ordering in |
917 | | // the tree. This is more restrictive than it needs to be, but loosening |
918 | | // it requires finding the largest item below `item` and the smallest one |
919 | | // above it, which is expensive. |
920 | 0 | assert!(cmp(item, replacement.clone()) == Some(Ordering::Equal)); |
921 | 0 | self.pool[n as usize].item = replacement.clone(); |
922 | 0 | return true; |
923 | | } |
924 | | None => { |
925 | 0 | panic!("AVLTree::find_and_replace: unordered elements in search!"); |
926 | | } |
927 | | } |
928 | | } |
929 | 0 | } |
930 | | |
931 | | // Determine whether an item is in the tree. |
932 | | // sewardj 2020Mar31: this is not used; I assume all users of the trees |
933 | | // do their own custom traversals. Remove #[cfg(test)] if any real uses |
934 | | // appear. |
935 | | #[cfg(test)] |
936 | | pub fn contains<F>(&self, item: T, mb_cmp: Option<&F>) -> bool |
937 | | where |
938 | | F: Fn(T, T) -> Option<Ordering>, |
939 | | { |
940 | | let mut n = self.root; |
941 | | // Lookup needs to be really fast, so have two versions of the loop, one |
942 | | // for direct comparison, one for indirect. |
943 | | match mb_cmp { |
944 | | None => { |
945 | | // Do comparisons directly on the items. |
946 | | loop { |
947 | | if n == AVL_NULL { |
948 | | return false; |
949 | | } |
950 | | let cmp_arg_left: T = item.clone(); |
951 | | let cmp_arg_right: T = self.pool[n as usize].item.clone(); |
952 | | match cmp_arg_left.partial_cmp(&cmp_arg_right) { |
953 | | Some(Ordering::Less) => { |
954 | | n = self.pool[n as usize].left; |
955 | | } |
956 | | Some(Ordering::Greater) => { |
957 | | n = self.pool[n as usize].right; |
958 | | } |
959 | | Some(Ordering::Equal) => { |
960 | | return true; |
961 | | } |
962 | | None => { |
963 | | panic!("AVLTree::contains(1): unordered elements in search!"); |
964 | | } |
965 | | } |
966 | | } |
967 | | } |
968 | | Some(cmp) => { |
969 | | // Do comparisons by handing off to the supplied function. |
970 | | loop { |
971 | | if n == AVL_NULL { |
972 | | return false; |
973 | | } |
974 | | let cmp_arg_left: T = item.clone(); |
975 | | let cmp_arg_right: T = self.pool[n as usize].item.clone(); |
976 | | match cmp(cmp_arg_left, cmp_arg_right) { |
977 | | Some(Ordering::Less) => { |
978 | | n = self.pool[n as usize].left; |
979 | | } |
980 | | Some(Ordering::Greater) => { |
981 | | n = self.pool[n as usize].right; |
982 | | } |
983 | | Some(Ordering::Equal) => { |
984 | | return true; |
985 | | } |
986 | | None => { |
987 | | panic!("AVLTree::contains(2): unordered elements in search!"); |
988 | | } |
989 | | } |
990 | | } |
991 | | } |
992 | | } |
993 | | } |
994 | | |
995 | | // Count the number of items in the tree. Warning: costs O(N) ! |
996 | | #[cfg(test)] |
997 | | fn count(&self) -> usize { |
998 | | self.count_wrk(self.root) |
999 | | } |
1000 | | |
1001 | | // Private fn: find the max depth of the tree. Warning: costs O(N) ! |
1002 | | #[cfg(test)] |
1003 | | fn depth(&self) -> usize { |
1004 | | self.depth_wrk(self.root) |
1005 | | } |
1006 | | |
1007 | 0 | pub fn to_vec(&self) -> Vec<T> { |
1008 | 0 | // BEGIN helper fn |
1009 | 0 | fn walk<U: Clone>(res: &mut Vec<U>, root: u32, pool: &Vec<AVLNode<U>>) { |
1010 | 0 | let root_left = pool[root as usize].left; |
1011 | 0 | if root_left != AVL_NULL { |
1012 | 0 | walk(res, root_left, pool); |
1013 | 0 | } |
1014 | 0 | res.push(pool[root as usize].item.clone()); |
1015 | 0 | let root_right = pool[root as usize].right; |
1016 | 0 | if root_right != AVL_NULL { |
1017 | 0 | walk(res, root_right, pool); |
1018 | 0 | } |
1019 | 0 | } |
1020 | 0 | // END helper fn |
1021 | 0 |
|
1022 | 0 | let mut res = Vec::<T>::new(); |
1023 | 0 | if self.root != AVL_NULL { |
1024 | 0 | walk(&mut res, self.root, &self.pool); |
1025 | 0 | } |
1026 | 0 | res |
1027 | 0 | } |
1028 | | |
1029 | | #[allow(dead_code)] |
1030 | 0 | pub fn iter<'t, 's>(&'t self, storage: &'s mut Vec<u32>) -> AVLTreeIter<'t, 's, T> { |
1031 | 0 | storage.clear(); |
1032 | 0 | AVLTreeIter::new(self, storage) |
1033 | 0 | } |
1034 | | |
1035 | | // Show the tree. (For debugging only.) |
1036 | | //pub fn show(&self, depth: i32, node: u32) { |
1037 | | // if node != AVL_NULL { |
1038 | | // self.show(depth + 1, self.pool[node as usize].left); |
1039 | | // for _ in 0..depth { |
1040 | | // print!(" "); |
1041 | | // } |
1042 | | // println!("{}", ToFromU32::to_u32(self.pool[node as usize].item)); |
1043 | | // self.show(depth + 1, self.pool[node as usize].right); |
1044 | | // } |
1045 | | //} |
1046 | | } |
1047 | | |
1048 | | //============================================================================= |
1049 | | // Testing machinery for AVLTree |
1050 | | |
1051 | | #[cfg(test)] |
1052 | | mod avl_tree_test_utils { |
1053 | | use crate::data_structures::Set; |
1054 | | use std::cmp::Ordering; |
1055 | | |
1056 | | // Perform various checks on the tree, and assert if it is not OK. |
1057 | | pub fn check_tree( |
1058 | | tree: &super::AVLTree<u32>, |
1059 | | should_be_in_tree: &Set<u32>, |
1060 | | univ_min: u32, |
1061 | | univ_max: u32, |
1062 | | ) { |
1063 | | // Same cardinality |
1064 | | let n_in_set = should_be_in_tree.card(); |
1065 | | let n_in_tree = tree.count(); |
1066 | | assert!(n_in_set == n_in_tree); |
1067 | | |
1068 | | // Tree is not wildly out of balance. Depth should not exceed 1.44 * |
1069 | | // log2(size). |
1070 | | let tree_depth = tree.depth(); |
1071 | | let mut log2_size = 0; |
1072 | | { |
1073 | | let mut n: usize = n_in_tree; |
1074 | | while n > 0 { |
1075 | | n = n >> 1; |
1076 | | log2_size += 1; |
1077 | | } |
1078 | | } |
1079 | | // Actually a tighter limit than stated above. For these test cases, the |
1080 | | // tree is either perfectly balanced or within one level of being so |
1081 | | // (hence the +1). |
1082 | | assert!(tree_depth <= log2_size + 1); |
1083 | | |
1084 | | // Check that everything that should be in the tree is in it, and vice |
1085 | | // versa. |
1086 | | for i in univ_min..univ_max { |
1087 | | let should_be_in = should_be_in_tree.contains(i); |
1088 | | |
1089 | | // Look it up with a null comparator (so `contains` compares |
1090 | | // directly) |
1091 | | let is_in = tree.contains::<fn(u32, u32) -> Option<Ordering>>(i, None); |
1092 | | assert!(is_in == should_be_in); |
1093 | | |
1094 | | // We should get the same result with a custom comparator that does the |
1095 | | // same as the null comparator. |
1096 | | let is_in_w_cmp = tree.contains( |
1097 | | i, |
1098 | | Some(&(|x_left: u32, x_right: u32| x_left.partial_cmp(&x_right))), |
1099 | | ); |
1100 | | assert!(is_in_w_cmp == is_in); |
1101 | | |
1102 | | // And even when the comparator is actually a closure |
1103 | | let forty_two: u32 = 52; |
1104 | | let is_in_w_cmp_closure = tree.contains( |
1105 | | i, |
1106 | | Some( |
1107 | | &(|x_left: u32, x_right: u32| { |
1108 | | (x_left + forty_two).partial_cmp(&(x_right + forty_two)) |
1109 | | }), |
1110 | | ), |
1111 | | ); |
1112 | | assert!(is_in_w_cmp_closure == is_in); |
1113 | | } |
1114 | | |
1115 | | // We could even test that the tree items are in-order, but it hardly |
1116 | | // seems worth the hassle, since the previous test would surely have |
1117 | | // failed if that wasn't the case. |
1118 | | } |
1119 | | } |
1120 | | |
1121 | | #[test] |
1122 | | fn test_avl_tree1() { |
1123 | | use crate::data_structures::Set; |
1124 | | |
1125 | | // Perform tests on an AVL tree. Use as values, every third number between |
1126 | | // 5000 and 5999 inclusive. This is to ensure that there's no confusion |
1127 | | // between element values and internal tree indices (although I think the |
1128 | | // typechecker guarantees that anyway). |
1129 | | // |
1130 | | // Also carry along a Set<u32>, which keeps track of which values should be |
1131 | | // in the tree at the current point. |
1132 | | const UNIV_MIN: u32 = 5000; |
1133 | | const UNIV_MAX: u32 = 5999; |
1134 | | const UNIV_SIZE: u32 = UNIV_MAX - UNIV_MIN + 1; |
1135 | | |
1136 | | let mut tree = AVLTree::<u32>::new(0); |
1137 | | let mut should_be_in_tree = Set::<u32>::empty(); |
1138 | | |
1139 | | // Add numbers to the tree, checking as we go. |
1140 | | for i in UNIV_MIN..UNIV_MAX { |
1141 | | // Idiotic but simple |
1142 | | if i % 3 != 0 { |
1143 | | continue; |
1144 | | } |
1145 | | let was_added = tree.insert::<fn(u32, u32) -> Option<Ordering>>(i, None); |
1146 | | should_be_in_tree.insert(i); |
1147 | | assert!(was_added == true); |
1148 | | avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX); |
1149 | | } |
1150 | | |
1151 | | // Then remove the middle half of the tree, also checking. |
1152 | | for i in UNIV_MIN + UNIV_SIZE / 4..UNIV_MIN + 3 * (UNIV_SIZE / 4) { |
1153 | | // Note that here, we're asking to delete a bunch of numbers that aren't |
1154 | | // in the tree. It should remain valid throughout. |
1155 | | let was_removed = tree.delete::<fn(u32, u32) -> Option<Ordering>>(i, None); |
1156 | | let should_have_been_removed = should_be_in_tree.contains(i); |
1157 | | assert!(was_removed == should_have_been_removed); |
1158 | | should_be_in_tree.delete(i); |
1159 | | avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX); |
1160 | | } |
1161 | | |
1162 | | // Now add some numbers which are already in the tree. |
1163 | | for i in UNIV_MIN..UNIV_MIN + UNIV_SIZE / 4 { |
1164 | | if i % 3 != 0 { |
1165 | | continue; |
1166 | | } |
1167 | | let was_added = tree.insert::<fn(u32, u32) -> Option<Ordering>>(i, None); |
1168 | | let should_have_been_added = !should_be_in_tree.contains(i); |
1169 | | assert!(was_added == should_have_been_added); |
1170 | | should_be_in_tree.insert(i); |
1171 | | avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX); |
1172 | | } |
1173 | | |
1174 | | // Then remove all numbers from the tree, in reverse order. |
1175 | | for ir in UNIV_MIN..UNIV_MAX { |
1176 | | let i = UNIV_MIN + (UNIV_MAX - ir); |
1177 | | let was_removed = tree.delete::<fn(u32, u32) -> Option<Ordering>>(i, None); |
1178 | | let should_have_been_removed = should_be_in_tree.contains(i); |
1179 | | assert!(was_removed == should_have_been_removed); |
1180 | | should_be_in_tree.delete(i); |
1181 | | avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX); |
1182 | | } |
1183 | | |
1184 | | // Now the tree should be empty. |
1185 | | assert!(should_be_in_tree.is_empty()); |
1186 | | assert!(tree.count() == 0); |
1187 | | |
1188 | | // Now delete some more stuff. Tree should still be empty :-) |
1189 | | for i in UNIV_MIN + 10..UNIV_MIN + 100 { |
1190 | | assert!(should_be_in_tree.is_empty()); |
1191 | | assert!(tree.count() == 0); |
1192 | | tree.delete::<fn(u32, u32) -> Option<Ordering>>(i, None); |
1193 | | avl_tree_test_utils::check_tree(&tree, &should_be_in_tree, UNIV_MIN, UNIV_MAX); |
1194 | | } |
1195 | | |
1196 | | // The tree root should be NULL. |
1197 | | assert!(tree.root == AVL_NULL); |
1198 | | assert!(tree.freelist != AVL_NULL); |
1199 | | |
1200 | | // Check the freelist: all entries are of the expected form. |
1201 | | for e in &tree.pool { |
1202 | | assert!(e.tag == AVLTag::Free); |
1203 | | assert!(e.left == AVL_NULL || (e.left as usize) < tree.pool.len()); |
1204 | | assert!(e.right == AVL_NULL); |
1205 | | assert!(e.item == 0); |
1206 | | } |
1207 | | |
1208 | | // Check the freelist: it's non-circular, and contains the expected number |
1209 | | // of elements. |
1210 | | let mut n_in_freelist = 0; |
1211 | | let mut cursor: u32 = tree.freelist; |
1212 | | while cursor != AVL_NULL { |
1213 | | assert!((cursor as usize) < tree.pool.len()); |
1214 | | n_in_freelist += 1; |
1215 | | assert!(n_in_freelist < 100000 /*arbitrary*/); // else it has a cycle |
1216 | | cursor = tree.pool[cursor as usize].left; |
1217 | | } |
1218 | | // If we get here, the freelist at least doesn't have a cycle. |
1219 | | |
1220 | | // All elements in the pool are on the freelist. |
1221 | | assert!(n_in_freelist == tree.pool.len()); |
1222 | | } |
1223 | | |
1224 | | #[test] |
1225 | | fn test_avl_tree2() { |
1226 | | use std::cmp::Ordering; |
1227 | | |
1228 | | // Do some simple testing using a custom comparator, which inverts the order |
1229 | | // of items in the tree, so as to check custom comparators work right. |
1230 | | let mut tree = AVLTree::<u32>::new(0); |
1231 | | |
1232 | | let nums = [31, 41, 59, 27, 14, 35, 62, 25, 18, 28, 45, 90, 61]; |
1233 | | |
1234 | | fn reverse_cmp(x: u32, y: u32) -> Option<Ordering> { |
1235 | | y.partial_cmp(&x) |
1236 | | } |
1237 | | |
1238 | | // Insert |
1239 | | for n in &nums { |
1240 | | let insert_happened = tree.insert(*n, Some(&reverse_cmp)); |
1241 | | assert!(insert_happened == true); |
1242 | | } |
1243 | | |
1244 | | // Check membership |
1245 | | for n in 0..100 { |
1246 | | let is_in = tree.contains(n, Some(&reverse_cmp)); |
1247 | | let should_be_in = nums.iter().any(|m| n == *m); |
1248 | | assert!(is_in == should_be_in); |
1249 | | } |
1250 | | |
1251 | | // Delete |
1252 | | for n in 0..100 { |
1253 | | let remove_happened = tree.delete(n, Some(&reverse_cmp)); |
1254 | | let remove_should_have_happened = nums.iter().any(|m| n == *m); |
1255 | | assert!(remove_happened == remove_should_have_happened); |
1256 | | } |
1257 | | |
1258 | | // Final check |
1259 | | assert!(tree.root == AVL_NULL); |
1260 | | assert!(tree.count() == 0); |
1261 | | } |
1262 | | |
1263 | | #[test] |
1264 | | fn test_avl_tree_iter() { |
1265 | | let mut storage = Vec::new(); |
1266 | | let tree = AVLTree::<u32>::new(0); |
1267 | | assert!(tree.iter(&mut storage).next().is_none()); |
1268 | | |
1269 | | const FROM: u32 = 0; |
1270 | | const TO: u32 = 10000; |
1271 | | |
1272 | | let mut tree = AVLTree::<u32>::new(0); |
1273 | | for i in FROM..TO { |
1274 | | tree.insert(i, Some(&|a: u32, b: u32| a.partial_cmp(&b))); |
1275 | | } |
1276 | | |
1277 | | let as_vec = tree.to_vec(); |
1278 | | for (i, val) in tree.iter(&mut storage).enumerate() { |
1279 | | assert_eq!(as_vec[i], val, "not equal for i={}", i); |
1280 | | } |
1281 | | } |