/src/open62541/deps/ziptree.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* This Source Code Form is subject to the terms of the Mozilla Public |
2 | | * License, v. 2.0. If a copy of the MPL was not distributed with this |
3 | | * file, You can obtain one at http://mozilla.org/MPL/2.0/. |
4 | | * |
5 | | * Copyright 2021-2022 (c) Julius Pfrommer |
6 | | */ |
7 | | |
8 | | #include "ziptree.h" |
9 | | |
10 | | /* Dummy types */ |
11 | | struct zip_elem; |
12 | | typedef struct zip_elem zip_elem; |
13 | | typedef ZIP_ENTRY(zip_elem) zip_entry; |
14 | | typedef ZIP_HEAD(, zip_elem) zip_head; |
15 | | |
16 | | /* Access macros */ |
17 | 30.0M | #define ZIP_ENTRY_PTR(x) ((zip_entry*)((char*)x + fieldoffset)) |
18 | 14.7M | #define ZIP_KEY_PTR(x) (const void*)((const char*)x + keyoffset) |
19 | | |
20 | | /* Hash pointers to keep the tie-breeaking of equal keys (mostly) uncorrelated |
21 | | * from the rank (pointer order). Hashing code taken from sdbm-hash |
22 | | * (http://www.cse.yorku.ca/~oz/hash.html). */ |
23 | | static unsigned int |
24 | 12.2M | __ZIP_PTR_HASH(const void *p) { |
25 | 12.2M | unsigned int h = 0; |
26 | 12.2M | const unsigned char *data = (const unsigned char*)&p; |
27 | 110M | for(size_t i = 0; i < (sizeof(void*) / sizeof(char)); i++) |
28 | 98.1M | h = data[i] + (h << 6) + (h << 16) - h; |
29 | 12.2M | return h; |
30 | 12.2M | } |
31 | | |
32 | | static ZIP_INLINE enum ZIP_CMP |
33 | 6.13M | __ZIP_RANK_CMP(const void *p1, const void *p2) { |
34 | | /* assert(p1 != p2); */ |
35 | 6.13M | unsigned int h1 = __ZIP_PTR_HASH(p1); |
36 | 6.13M | unsigned int h2 = __ZIP_PTR_HASH(p2); |
37 | 6.13M | if(h1 == h2) |
38 | 0 | return (p1 < p2) ? ZIP_CMP_LESS : ZIP_CMP_MORE; |
39 | 6.13M | return (h1 < h2) ? ZIP_CMP_LESS : ZIP_CMP_MORE; |
40 | 6.13M | } |
41 | | |
42 | | static ZIP_INLINE enum ZIP_CMP |
43 | 10.2M | __ZIP_UNIQUE_CMP(zip_cmp_cb cmp, const void *p1, const void *p2) { |
44 | 10.2M | if(p1 == p2) |
45 | 1.87M | return ZIP_CMP_EQ; |
46 | 8.40M | enum ZIP_CMP order = cmp(p1, p2); |
47 | 8.40M | if(order == ZIP_CMP_EQ) |
48 | 237k | return (p1 < p2) ? ZIP_CMP_LESS : ZIP_CMP_MORE; |
49 | 8.16M | return order; |
50 | 8.40M | } |
51 | | |
52 | | #if 0 |
53 | | #include <assert.h> |
54 | | ZIP_UNUSED static ZIP_INLINE void |
55 | | __ZIP_VALIDATE(zip_cmp_cb cmp, unsigned short fieldoffset, |
56 | | unsigned short keyoffset, void *elm, |
57 | | void *min_elm, void *max_elm) { |
58 | | if(!elm) |
59 | | return; |
60 | | enum ZIP_CMP c1 = __ZIP_UNIQUE_CMP(cmp, ZIP_KEY_PTR(min_elm), ZIP_KEY_PTR(elm)); |
61 | | assert((elm == min_elm && c1 == ZIP_CMP_EQ) || c1 == ZIP_CMP_LESS); |
62 | | |
63 | | enum ZIP_CMP c2 = __ZIP_UNIQUE_CMP(cmp, ZIP_KEY_PTR(max_elm), ZIP_KEY_PTR(elm)); |
64 | | assert((elm == max_elm && c2 == ZIP_CMP_EQ) || c2 == ZIP_CMP_MORE); |
65 | | |
66 | | assert(!ZIP_ENTRY_PTR(elm)->right || |
67 | | __ZIP_RANK_CMP(elm, ZIP_ENTRY_PTR(elm)->right) == ZIP_CMP_MORE); |
68 | | assert(!ZIP_ENTRY_PTR(elm)->left || |
69 | | __ZIP_RANK_CMP(elm, ZIP_ENTRY_PTR(elm)->left) == ZIP_CMP_MORE); |
70 | | |
71 | | __ZIP_VALIDATE(cmp, fieldoffset, keyoffset, ZIP_ENTRY_PTR(elm)->right, elm, max_elm); |
72 | | __ZIP_VALIDATE(cmp, fieldoffset, keyoffset, ZIP_ENTRY_PTR(elm)->left, min_elm, elm); |
73 | | } |
74 | | #endif |
75 | | |
76 | | /* Walk down the right-side spine of cur. Elements that are larger than x_key |
77 | | * are moved under x->right. */ |
78 | | static void |
79 | | __ZIP_INSERT_MOVE_RIGHT(zip_cmp_cb cmp, unsigned short fieldoffset, |
80 | | unsigned short keyoffset, const void *x_key, |
81 | 368k | zip_elem **fix_edge, zip_elem *cur) { |
82 | 851k | while(ZIP_ENTRY_PTR(cur)->right) { |
83 | 482k | zip_elem *move_candidate = ZIP_ENTRY_PTR(cur)->right; |
84 | 482k | if(__ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(move_candidate)) == ZIP_CMP_MORE) { |
85 | 165k | cur = ZIP_ENTRY_PTR(cur)->right; |
86 | 165k | continue; |
87 | 165k | } |
88 | 316k | ZIP_ENTRY_PTR(cur)->right = ZIP_ENTRY_PTR(move_candidate)->left; |
89 | 316k | ZIP_ENTRY_PTR(move_candidate)->left = NULL; |
90 | 316k | *fix_edge = move_candidate; |
91 | 316k | fix_edge = &ZIP_ENTRY_PTR(move_candidate)->left; |
92 | 316k | } |
93 | 368k | } |
94 | | |
95 | | static void |
96 | | __ZIP_INSERT_MOVE_LEFT(zip_cmp_cb cmp, unsigned short fieldoffset, |
97 | | unsigned short keyoffset, const void *x_key, |
98 | 874k | zip_elem **fix_edge, zip_elem *cur) { |
99 | 1.61M | while(ZIP_ENTRY_PTR(cur)->left) { |
100 | 743k | zip_elem *move_candidate = ZIP_ENTRY_PTR(cur)->left; |
101 | 743k | if(__ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(move_candidate)) == ZIP_CMP_LESS) { |
102 | 480k | cur = ZIP_ENTRY_PTR(cur)->left; |
103 | 480k | continue; |
104 | 480k | } |
105 | 263k | ZIP_ENTRY_PTR(cur)->left = ZIP_ENTRY_PTR(move_candidate)->right; |
106 | 263k | ZIP_ENTRY_PTR(move_candidate)->right = NULL; |
107 | 263k | *fix_edge = move_candidate; |
108 | 263k | fix_edge = &ZIP_ENTRY_PTR(move_candidate)->right; |
109 | 263k | } |
110 | 874k | } |
111 | | |
112 | | void |
113 | | __ZIP_INSERT(void *h, zip_cmp_cb cmp, unsigned short fieldoffset, |
114 | 2.63M | unsigned short keyoffset, void *elm) { |
115 | 2.63M | zip_elem *x = (zip_elem*)elm; |
116 | 2.63M | ZIP_ENTRY_PTR(x)->left = NULL; |
117 | 2.63M | ZIP_ENTRY_PTR(x)->right = NULL; |
118 | | |
119 | 2.63M | const void *x_key = ZIP_KEY_PTR(x); |
120 | 2.63M | zip_head *head = (zip_head*)h; |
121 | 2.63M | if(!head->root) { |
122 | 535k | head->root = x; |
123 | 535k | return; |
124 | 535k | } |
125 | | |
126 | | /* Go down the tree to find the top element "cur" that has a rank smaller |
127 | | * than "x" */ |
128 | 2.09M | zip_elem *prev = NULL; |
129 | 2.09M | zip_elem *cur = head->root; |
130 | 2.09M | enum ZIP_CMP cur_order, prev_order = ZIP_CMP_EQ; |
131 | 5.94M | do { |
132 | 5.94M | cur_order = __ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(cur)); |
133 | 5.94M | if(cur_order == ZIP_CMP_EQ) |
134 | 0 | return; /* x is already inserted */ |
135 | 5.94M | if(__ZIP_RANK_CMP(cur, x) == ZIP_CMP_LESS) |
136 | 1.24M | break; |
137 | 4.70M | prev = cur; |
138 | 4.70M | prev_order = cur_order; |
139 | 4.70M | cur = (cur_order == ZIP_CMP_MORE) ? |
140 | 2.69M | ZIP_ENTRY_PTR(cur)->right : ZIP_ENTRY_PTR(cur)->left; |
141 | 4.70M | } while(cur); |
142 | | |
143 | | /* Insert "x" instead of "cur" under its parent "prev" */ |
144 | 2.09M | if(cur == head->root) { |
145 | 648k | head->root = x; |
146 | 1.44M | } else { |
147 | 1.44M | if(prev_order == ZIP_CMP_MORE) |
148 | 354k | ZIP_ENTRY_PTR(prev)->right = x; |
149 | 1.09M | else |
150 | 1.09M | ZIP_ENTRY_PTR(prev)->left = x; |
151 | 1.44M | } |
152 | | |
153 | 2.09M | if(!cur) |
154 | 852k | return; |
155 | | |
156 | | /* Re-insert "cur" under "x". Repair by moving elements that ended up on the |
157 | | * wrong side of "x". */ |
158 | 1.24M | if(cur_order == ZIP_CMP_MORE) { |
159 | 368k | ZIP_ENTRY_PTR(x)->left = cur; |
160 | 368k | __ZIP_INSERT_MOVE_RIGHT(cmp, fieldoffset, keyoffset, |
161 | 368k | x_key, &ZIP_ENTRY_PTR(x)->right, cur); |
162 | 874k | } else { |
163 | 874k | ZIP_ENTRY_PTR(x)->right = cur; |
164 | 874k | __ZIP_INSERT_MOVE_LEFT(cmp, fieldoffset, keyoffset, |
165 | 874k | x_key, &ZIP_ENTRY_PTR(x)->left, cur); |
166 | 874k | } |
167 | 1.24M | } |
168 | | |
169 | | void * |
170 | | __ZIP_REMOVE(void *h, zip_cmp_cb cmp, unsigned short fieldoffset, |
171 | 1.87M | unsigned short keyoffset, void *elm) { |
172 | 1.87M | zip_head *head = (zip_head*)h; |
173 | 1.87M | zip_elem *x = (zip_elem*)elm; |
174 | 1.87M | zip_elem *cur = head->root; |
175 | 1.87M | if(!cur) |
176 | 0 | return NULL; |
177 | | |
178 | 1.87M | const void *x_key = ZIP_KEY_PTR(x); |
179 | 1.87M | zip_elem **prev_edge = &head->root; |
180 | 1.87M | enum ZIP_CMP cur_order = __ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(cur)); |
181 | 3.10M | while(cur_order != ZIP_CMP_EQ) { |
182 | 1.22M | prev_edge = (cur_order == ZIP_CMP_LESS) ? |
183 | 957k | &ZIP_ENTRY_PTR(cur)->left : &ZIP_ENTRY_PTR(cur)->right; |
184 | 1.22M | cur = *prev_edge; |
185 | 1.22M | if(!cur) |
186 | 0 | return NULL; |
187 | 1.22M | cur_order = __ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(cur)); |
188 | 1.22M | } |
189 | 1.87M | *prev_edge = (zip_elem*)__ZIP_ZIP(fieldoffset, |
190 | 1.87M | ZIP_ENTRY_PTR(cur)->left, |
191 | 1.87M | ZIP_ENTRY_PTR(cur)->right); |
192 | 1.87M | return cur; |
193 | 1.87M | } |
194 | | |
195 | | void * |
196 | | __ZIP_ITER(unsigned short fieldoffset, zip_iter_cb cb, |
197 | 4.15M | void *context, void *elm) { |
198 | 4.15M | if(!elm) |
199 | 2.09M | return NULL; |
200 | 2.05M | zip_elem *left = ZIP_ENTRY_PTR(elm)->left; |
201 | 2.05M | zip_elem *right = ZIP_ENTRY_PTR(elm)->right; |
202 | 2.05M | void *res = __ZIP_ITER(fieldoffset, cb, context, left); |
203 | 2.05M | if(res) |
204 | 0 | return res; |
205 | 2.05M | res = cb(context, elm); |
206 | 2.05M | if(res) |
207 | 0 | return res; |
208 | 2.05M | return __ZIP_ITER(fieldoffset, cb, context, right); |
209 | 2.05M | } |
210 | | |
211 | | void * |
212 | | __ZIP_ITER_KEY(zip_cmp_cb cmp, unsigned short fieldoffset, |
213 | | unsigned short keyoffset, const void *key, |
214 | 0 | zip_iter_cb cb, void *context, void *elm) { |
215 | 0 | if(!elm) |
216 | 0 | return NULL; |
217 | | |
218 | 0 | void *res; |
219 | 0 | enum ZIP_CMP eq = cmp(key, ZIP_KEY_PTR(elm)); |
220 | 0 | if(eq != ZIP_CMP_MORE) { |
221 | 0 | res = __ZIP_ITER_KEY(cmp, fieldoffset, keyoffset, key, |
222 | 0 | cb, context, ZIP_ENTRY_PTR(elm)->left); |
223 | 0 | if(res) |
224 | 0 | return res; |
225 | 0 | } |
226 | | |
227 | 0 | if(eq == ZIP_CMP_EQ) { |
228 | 0 | res = cb(context, elm); |
229 | 0 | if(res) |
230 | 0 | return res; |
231 | 0 | } |
232 | | |
233 | 0 | if(eq != ZIP_CMP_LESS) { |
234 | 0 | res = __ZIP_ITER_KEY(cmp, fieldoffset, keyoffset, key, |
235 | 0 | cb, context, ZIP_ENTRY_PTR(elm)->right); |
236 | 0 | if(res) |
237 | 0 | return res; |
238 | 0 | } |
239 | | |
240 | 0 | return NULL; |
241 | 0 | } |
242 | | |
243 | | void * |
244 | 1.87M | __ZIP_ZIP(unsigned short fieldoffset, void *left, void *right) { |
245 | 1.87M | if(!left) |
246 | 1.59M | return right; |
247 | 275k | if(!right) |
248 | 97.8k | return left; |
249 | 177k | zip_elem *l = (zip_elem*)left; |
250 | 177k | zip_elem *r = (zip_elem*)right; |
251 | 177k | zip_elem *root = NULL; |
252 | 177k | zip_elem **prev_edge = &root; |
253 | 362k | while(l && r) { |
254 | 184k | if(__ZIP_RANK_CMP(l, r) == ZIP_CMP_LESS) { |
255 | 109k | *prev_edge = r; |
256 | 109k | prev_edge = &ZIP_ENTRY_PTR(r)->left; |
257 | 109k | r = ZIP_ENTRY_PTR(r)->left; |
258 | 109k | } else { |
259 | 75.7k | *prev_edge = l; |
260 | 75.7k | prev_edge = &ZIP_ENTRY_PTR(l)->right; |
261 | 75.7k | l = ZIP_ENTRY_PTR(l)->right; |
262 | 75.7k | } |
263 | 184k | } |
264 | 177k | *prev_edge = (l) ? l : r; |
265 | 177k | return root; |
266 | 275k | } |
267 | | |
268 | | /* Walk down from cur and move all elements <= split-key to the left side. All |
269 | | * elements that are moved over have to be below left_rightmost. Returns the |
270 | | * hierarchy of elements that remain on the right side. */ |
271 | | static void |
272 | | __ZIP_UNZIP_MOVE_LEFT(zip_cmp_cb cmp, unsigned short fieldoffset, |
273 | | unsigned short keyoffset, const void *key, |
274 | 0 | zip_elem **fix_edge, zip_elem *cur) { |
275 | 0 | while(ZIP_ENTRY_PTR(cur)->left) { |
276 | 0 | zip_elem *next = ZIP_ENTRY_PTR(cur)->left; |
277 | 0 | if(cmp(key, ZIP_KEY_PTR(next)) == ZIP_CMP_LESS) { |
278 | 0 | cur = next; |
279 | 0 | continue; |
280 | 0 | } |
281 | 0 | *fix_edge = next; |
282 | 0 | ZIP_ENTRY_PTR(cur)->left = ZIP_ENTRY_PTR(next)->right; |
283 | 0 | ZIP_ENTRY_PTR(next)->right = NULL; |
284 | 0 | fix_edge = &ZIP_ENTRY_PTR(next)->right; |
285 | 0 | } |
286 | 0 | } |
287 | | |
288 | | static void |
289 | | __ZIP_UNZIP_MOVE_RIGHT(zip_cmp_cb cmp, unsigned short fieldoffset, |
290 | | unsigned short keyoffset, const void *key, |
291 | 0 | zip_elem **fix_edge, zip_elem *cur) { |
292 | 0 | while(ZIP_ENTRY_PTR(cur)->right) { |
293 | 0 | zip_elem *next = ZIP_ENTRY_PTR(cur)->right; |
294 | 0 | if(cmp(key, ZIP_KEY_PTR(next)) != ZIP_CMP_LESS) { |
295 | 0 | cur = next; |
296 | 0 | continue; |
297 | 0 | } |
298 | 0 | *fix_edge = next; |
299 | 0 | ZIP_ENTRY_PTR(cur)->right = ZIP_ENTRY_PTR(next)->left; |
300 | 0 | ZIP_ENTRY_PTR(next)->left = NULL; |
301 | 0 | fix_edge = &ZIP_ENTRY_PTR(next)->left; |
302 | 0 | } |
303 | 0 | } |
304 | | |
305 | | /* Split the tree into a left side with keys <= split-key and a right side with |
306 | | * key > split-key. */ |
307 | | void |
308 | | __ZIP_UNZIP(zip_cmp_cb cmp, unsigned short fieldoffset, |
309 | | unsigned short keyoffset, const void *key, |
310 | 201 | void *h, void *l, void *r) { |
311 | 201 | zip_elem *prev; |
312 | 201 | zip_head *head = (zip_head*)h; |
313 | 201 | zip_head *left = (zip_head*)l; |
314 | 201 | zip_head *right = (zip_head*)r; |
315 | 201 | if(!head->root) { |
316 | 201 | left->root = NULL; |
317 | 201 | right->root = NULL; |
318 | 201 | return; |
319 | 201 | } |
320 | 0 | zip_elem *cur = head->root; |
321 | 0 | if(cmp(key, ZIP_KEY_PTR(cur)) != ZIP_CMP_LESS) { |
322 | 0 | left->root = cur; |
323 | 0 | do { |
324 | 0 | prev = cur; |
325 | 0 | cur = ZIP_ENTRY_PTR(cur)->right; |
326 | 0 | if(!cur) { |
327 | 0 | right->root = NULL; |
328 | 0 | return; |
329 | 0 | } |
330 | 0 | } while(cmp(key, ZIP_KEY_PTR(cur)) != ZIP_CMP_LESS); |
331 | 0 | ZIP_ENTRY_PTR(prev)->right = NULL; |
332 | 0 | right->root = cur; |
333 | 0 | __ZIP_UNZIP_MOVE_LEFT(cmp, fieldoffset, keyoffset, key, |
334 | 0 | &ZIP_ENTRY_PTR(prev)->right, cur); |
335 | 0 | } else { |
336 | 0 | right->root = cur; |
337 | 0 | do { |
338 | 0 | prev = cur; |
339 | 0 | cur = ZIP_ENTRY_PTR(cur)->left; |
340 | 0 | if(!cur) { |
341 | 0 | left->root = NULL; |
342 | 0 | return; |
343 | 0 | } |
344 | 0 | } while(cmp(key, ZIP_KEY_PTR(cur)) == ZIP_CMP_LESS); |
345 | 0 | ZIP_ENTRY_PTR(prev)->left = NULL; |
346 | 0 | left->root = cur; |
347 | 0 | __ZIP_UNZIP_MOVE_RIGHT(cmp, fieldoffset, keyoffset, key, |
348 | 0 | &ZIP_ENTRY_PTR(prev)->left, cur); |
349 | 0 | } |
350 | 0 | } |