Coverage Report

Created: 2026-02-14 06:32

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/open62541/deps/ziptree.c
Line
Count
Source
1
/* This Source Code Form is subject to the terms of the Mozilla Public
2
 * License, v. 2.0. If a copy of the MPL was not distributed with this
3
 * file, You can obtain one at http://mozilla.org/MPL/2.0/. 
4
 *
5
 *    Copyright 2021-2022 (c) Julius Pfrommer
6
 */
7
8
#include "ziptree.h"
9
10
/* Dummy types */
11
struct zip_elem;
12
typedef struct zip_elem zip_elem;
13
typedef ZIP_ENTRY(zip_elem) zip_entry;
14
typedef ZIP_HEAD(, zip_elem) zip_head;
15
16
/* Access macros */
17
93.5M
#define ZIP_ENTRY_PTR(x) ((zip_entry*)((char*)x + fieldoffset))
18
46.5M
#define ZIP_KEY_PTR(x) (const void*)((const char*)x + keyoffset)
19
20
/* Hash pointers to keep the tie-breeaking of equal keys (mostly) uncorrelated
21
 * from the rank (pointer order). Hashing code taken from sdbm-hash
22
 * (http://www.cse.yorku.ca/~oz/hash.html). */
23
static unsigned int
24
45.0M
__ZIP_PTR_HASH(const void *p) {
25
45.0M
    unsigned int h = 0;
26
45.0M
    const unsigned char *data = (const unsigned char*)&p;
27
405M
    for(size_t i = 0; i < (sizeof(void*) / sizeof(char)); i++)
28
360M
        h = data[i] + (h << 6) + (h << 16) - h;
29
45.0M
    return h;
30
45.0M
}
31
32
static ZIP_INLINE enum ZIP_CMP
33
22.5M
__ZIP_RANK_CMP(const void *p1, const void *p2) {
34
    /* assert(p1 != p2); */
35
22.5M
    unsigned int h1 = __ZIP_PTR_HASH(p1);
36
22.5M
    unsigned int h2 = __ZIP_PTR_HASH(p2);
37
22.5M
    if(h1 == h2)
38
0
        return (p1 < p2) ? ZIP_CMP_LESS : ZIP_CMP_MORE;
39
22.5M
    return (h1 < h2) ? ZIP_CMP_LESS : ZIP_CMP_MORE;
40
22.5M
}
41
42
static ZIP_INLINE enum ZIP_CMP
43
33.4M
__ZIP_UNIQUE_CMP(zip_cmp_cb cmp, const void *p1, const void *p2) {
44
33.4M
    if(p1 == p2)
45
5.10M
        return ZIP_CMP_EQ;
46
28.3M
    enum ZIP_CMP order = cmp(p1, p2);
47
28.3M
    if(order == ZIP_CMP_EQ)
48
669k
        return (p1 < p2) ? ZIP_CMP_LESS : ZIP_CMP_MORE;
49
27.6M
    return order;
50
28.3M
}
51
52
#if 0
53
#include <assert.h>
54
ZIP_UNUSED static ZIP_INLINE void
55
__ZIP_VALIDATE(zip_cmp_cb cmp, unsigned short fieldoffset,
56
               unsigned short keyoffset, void *elm,
57
               void *min_elm, void *max_elm) {
58
    if(!elm)
59
        return;
60
    enum ZIP_CMP c1 = __ZIP_UNIQUE_CMP(cmp, ZIP_KEY_PTR(min_elm), ZIP_KEY_PTR(elm));
61
    assert((elm == min_elm && c1 == ZIP_CMP_EQ) || c1 == ZIP_CMP_LESS);
62
63
    enum ZIP_CMP c2 = __ZIP_UNIQUE_CMP(cmp, ZIP_KEY_PTR(max_elm), ZIP_KEY_PTR(elm));
64
    assert((elm == max_elm && c2 == ZIP_CMP_EQ) || c2 == ZIP_CMP_MORE);
65
66
    assert(!ZIP_ENTRY_PTR(elm)->right ||
67
           __ZIP_RANK_CMP(elm, ZIP_ENTRY_PTR(elm)->right) == ZIP_CMP_MORE);
68
    assert(!ZIP_ENTRY_PTR(elm)->left ||
69
           __ZIP_RANK_CMP(elm, ZIP_ENTRY_PTR(elm)->left) == ZIP_CMP_MORE);
70
71
    __ZIP_VALIDATE(cmp, fieldoffset, keyoffset, ZIP_ENTRY_PTR(elm)->right, elm, max_elm);
72
    __ZIP_VALIDATE(cmp, fieldoffset, keyoffset, ZIP_ENTRY_PTR(elm)->left, min_elm, elm);
73
}
74
#endif
75
76
/* Walk down the right-side spine of cur. Elements that are larger than x_key
77
 * are moved under x->right. */
78
static void
79
__ZIP_INSERT_MOVE_RIGHT(zip_cmp_cb cmp, unsigned short fieldoffset,
80
                        unsigned short keyoffset, const void *x_key,
81
1.40M
                        zip_elem **fix_edge, zip_elem *cur) {
82
3.29M
    while(ZIP_ENTRY_PTR(cur)->right) {
83
1.89M
        zip_elem *move_candidate = ZIP_ENTRY_PTR(cur)->right;
84
1.89M
        if(__ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(move_candidate)) == ZIP_CMP_MORE) {
85
776k
            cur = ZIP_ENTRY_PTR(cur)->right;
86
776k
            continue;
87
776k
        }
88
1.11M
        ZIP_ENTRY_PTR(cur)->right = ZIP_ENTRY_PTR(move_candidate)->left;
89
1.11M
        ZIP_ENTRY_PTR(move_candidate)->left = NULL;
90
1.11M
        *fix_edge = move_candidate;
91
1.11M
        fix_edge = &ZIP_ENTRY_PTR(move_candidate)->left;
92
1.11M
    }
93
1.40M
}
94
95
static void
96
__ZIP_INSERT_MOVE_LEFT(zip_cmp_cb cmp, unsigned short fieldoffset,
97
                       unsigned short keyoffset, const void *x_key,
98
3.21M
                       zip_elem **fix_edge, zip_elem *cur) {
99
5.33M
    while(ZIP_ENTRY_PTR(cur)->left) {
100
2.11M
        zip_elem *move_candidate = ZIP_ENTRY_PTR(cur)->left;
101
2.11M
        if(__ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(move_candidate)) == ZIP_CMP_LESS) {
102
1.03M
            cur = ZIP_ENTRY_PTR(cur)->left;
103
1.03M
            continue;
104
1.03M
        }
105
1.08M
        ZIP_ENTRY_PTR(cur)->left = ZIP_ENTRY_PTR(move_candidate)->right;
106
1.08M
        ZIP_ENTRY_PTR(move_candidate)->right = NULL;
107
1.08M
        *fix_edge = move_candidate;
108
1.08M
        fix_edge = &ZIP_ENTRY_PTR(move_candidate)->right;
109
1.08M
    }
110
3.21M
}
111
112
void
113
__ZIP_INSERT(void *h, zip_cmp_cb cmp, unsigned short fieldoffset,
114
8.06M
             unsigned short keyoffset, void *elm) {
115
8.06M
    zip_elem *x = (zip_elem*)elm;
116
8.06M
    ZIP_ENTRY_PTR(x)->left = NULL;
117
8.06M
    ZIP_ENTRY_PTR(x)->right = NULL;
118
119
8.06M
    const void *x_key = ZIP_KEY_PTR(x);
120
8.06M
    zip_head *head = (zip_head*)h;
121
8.06M
    if(!head->root) {
122
1.46M
        head->root = x;
123
1.46M
        return;
124
1.46M
    }
125
126
    /* Go down the tree to find the top element "cur" that has a rank smaller
127
     * than "x" */
128
6.60M
    zip_elem *prev = NULL;
129
6.60M
    zip_elem *cur = head->root;
130
6.60M
    enum ZIP_CMP cur_order, prev_order = ZIP_CMP_EQ;
131
21.4M
    do {
132
21.4M
        cur_order = __ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(cur));
133
21.4M
        if(cur_order == ZIP_CMP_EQ)
134
0
            return; /* x is already inserted */
135
21.4M
        if(__ZIP_RANK_CMP(cur, x) == ZIP_CMP_LESS)
136
4.61M
            break;
137
16.8M
        prev = cur;
138
16.8M
        prev_order = cur_order;
139
16.8M
        cur = (cur_order == ZIP_CMP_MORE) ?
140
9.66M
            ZIP_ENTRY_PTR(cur)->right : ZIP_ENTRY_PTR(cur)->left;
141
16.8M
    } while(cur);
142
143
    /* Insert "x" instead of "cur" under its parent "prev" */
144
6.60M
    if(cur == head->root) {
145
1.92M
        head->root = x;
146
4.67M
    } else {
147
4.67M
        if(prev_order == ZIP_CMP_MORE)
148
1.25M
            ZIP_ENTRY_PTR(prev)->right = x;
149
3.42M
        else
150
3.42M
            ZIP_ENTRY_PTR(prev)->left = x;
151
4.67M
    }
152
153
6.60M
    if(!cur)
154
1.98M
        return;
155
156
    /* Re-insert "cur" under "x". Repair by moving elements that ended up on the
157
     * wrong side of "x". */
158
4.61M
    if(cur_order == ZIP_CMP_MORE) {
159
1.40M
        ZIP_ENTRY_PTR(x)->left = cur;
160
1.40M
        __ZIP_INSERT_MOVE_RIGHT(cmp, fieldoffset, keyoffset,
161
1.40M
                                x_key, &ZIP_ENTRY_PTR(x)->right, cur);
162
3.21M
    } else {
163
3.21M
        ZIP_ENTRY_PTR(x)->right = cur;
164
3.21M
        __ZIP_INSERT_MOVE_LEFT(cmp, fieldoffset, keyoffset,
165
3.21M
                               x_key, &ZIP_ENTRY_PTR(x)->left, cur);
166
3.21M
    }
167
4.61M
}
168
169
void *
170
__ZIP_REMOVE(void *h, zip_cmp_cb cmp, unsigned short fieldoffset,
171
5.10M
             unsigned short keyoffset, void *elm) {
172
5.10M
    zip_head *head = (zip_head*)h;
173
5.10M
    zip_elem *x = (zip_elem*)elm;
174
5.10M
    zip_elem *cur = head->root;
175
5.10M
    if(!cur)
176
0
        return NULL;
177
178
5.10M
    const void *x_key = ZIP_KEY_PTR(x);
179
5.10M
    zip_elem **prev_edge = &head->root;
180
5.10M
    enum ZIP_CMP cur_order = __ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(cur));
181
7.97M
    while(cur_order != ZIP_CMP_EQ) {
182
2.86M
        prev_edge = (cur_order == ZIP_CMP_LESS) ?
183
2.64M
            &ZIP_ENTRY_PTR(cur)->left : &ZIP_ENTRY_PTR(cur)->right;
184
2.86M
        cur = *prev_edge;
185
2.86M
        if(!cur)
186
0
            return NULL;
187
2.86M
        cur_order = __ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(cur));
188
2.86M
    }
189
5.10M
    *prev_edge = (zip_elem*)__ZIP_ZIP(fieldoffset,
190
5.10M
                                      ZIP_ENTRY_PTR(cur)->left,
191
5.10M
                                      ZIP_ENTRY_PTR(cur)->right);
192
5.10M
    return cur;
193
5.10M
}
194
195
void *
196
__ZIP_ITER(unsigned short fieldoffset, zip_iter_cb cb,
197
7.98M
           void *context, void *elm) {
198
7.98M
    if(!elm)
199
3.86M
        return NULL;
200
4.11M
    zip_elem *left = ZIP_ENTRY_PTR(elm)->left;
201
4.11M
    zip_elem *right = ZIP_ENTRY_PTR(elm)->right;
202
4.11M
    void *res = __ZIP_ITER(fieldoffset, cb, context, left);
203
4.11M
    if(res)
204
271k
        return res;
205
3.84M
    res = cb(context, elm);
206
3.84M
    if(res)
207
112k
        return res;
208
3.73M
    return __ZIP_ITER(fieldoffset, cb, context, right);
209
3.84M
}
210
211
void *
212
__ZIP_ITER_KEY(zip_cmp_cb cmp, unsigned short fieldoffset,
213
               unsigned short keyoffset, const void *key,
214
0
               zip_iter_cb cb, void *context, void *elm) {
215
0
    if(!elm)
216
0
        return NULL;
217
218
0
    void *res;
219
0
    enum ZIP_CMP eq = cmp(key, ZIP_KEY_PTR(elm));
220
0
    if(eq != ZIP_CMP_MORE) {
221
0
        res = __ZIP_ITER_KEY(cmp, fieldoffset, keyoffset, key,
222
0
                             cb, context, ZIP_ENTRY_PTR(elm)->left);
223
0
        if(res)
224
0
            return res;
225
0
    }
226
227
0
    if(eq == ZIP_CMP_EQ) {
228
0
        res = cb(context, elm);
229
0
        if(res)
230
0
            return res;
231
0
    }
232
233
0
    if(eq != ZIP_CMP_LESS) {
234
0
        res = __ZIP_ITER_KEY(cmp, fieldoffset, keyoffset, key,
235
0
                             cb, context, ZIP_ENTRY_PTR(elm)->right);
236
0
        if(res)
237
0
            return res;
238
0
    }
239
240
0
    return NULL;
241
0
}
242
243
void *
244
5.10M
__ZIP_ZIP(unsigned short fieldoffset, void *left, void *right) {
245
5.10M
    if(!left)
246
4.00M
        return right;
247
1.09M
    if(!right)
248
222k
        return left;
249
873k
    zip_elem *l = (zip_elem*)left;
250
873k
    zip_elem *r = (zip_elem*)right;
251
873k
    zip_elem *root = NULL;
252
873k
    zip_elem **prev_edge = &root;
253
1.93M
    while(l && r) {
254
1.06M
        if(__ZIP_RANK_CMP(l, r) == ZIP_CMP_LESS) {
255
587k
            *prev_edge = r;
256
587k
            prev_edge = &ZIP_ENTRY_PTR(r)->left;
257
587k
            r = ZIP_ENTRY_PTR(r)->left;
258
587k
        } else {
259
474k
            *prev_edge = l;
260
474k
            prev_edge = &ZIP_ENTRY_PTR(l)->right;
261
474k
            l = ZIP_ENTRY_PTR(l)->right;
262
474k
        }
263
1.06M
    }
264
873k
    *prev_edge = (l) ? l : r;
265
873k
    return root;
266
1.09M
}
267
268
/* Walk down from cur and move all elements <= split-key to the left side. All
269
 * elements that are moved over have to be below left_rightmost. Returns the
270
 * hierarchy of elements that remain on the right side. */
271
static void
272
__ZIP_UNZIP_MOVE_LEFT(zip_cmp_cb cmp, unsigned short fieldoffset,
273
                      unsigned short keyoffset, const void *key,
274
0
                      zip_elem **fix_edge, zip_elem *cur) {
275
0
    while(ZIP_ENTRY_PTR(cur)->left) {
276
0
        zip_elem *next = ZIP_ENTRY_PTR(cur)->left;
277
0
        if(cmp(key, ZIP_KEY_PTR(next)) == ZIP_CMP_LESS) {
278
0
            cur = next;
279
0
            continue;
280
0
        }
281
0
        *fix_edge = next;
282
0
        ZIP_ENTRY_PTR(cur)->left = ZIP_ENTRY_PTR(next)->right;
283
0
        ZIP_ENTRY_PTR(next)->right = NULL;
284
0
        fix_edge = &ZIP_ENTRY_PTR(next)->right;
285
0
    }
286
0
}
287
288
static void
289
__ZIP_UNZIP_MOVE_RIGHT(zip_cmp_cb cmp, unsigned short fieldoffset,
290
                       unsigned short keyoffset, const void *key,
291
0
                       zip_elem **fix_edge, zip_elem *cur) {
292
0
    while(ZIP_ENTRY_PTR(cur)->right) {
293
0
        zip_elem *next = ZIP_ENTRY_PTR(cur)->right;
294
0
        if(cmp(key, ZIP_KEY_PTR(next)) != ZIP_CMP_LESS) {
295
0
            cur = next;
296
0
            continue;
297
0
        }
298
0
        *fix_edge = next;
299
0
        ZIP_ENTRY_PTR(cur)->right = ZIP_ENTRY_PTR(next)->left;
300
0
        ZIP_ENTRY_PTR(next)->left = NULL;
301
0
        fix_edge = &ZIP_ENTRY_PTR(next)->left;
302
0
    }
303
0
}
304
305
/* Split the tree into a left side with keys <= split-key and a right side with
306
 * key > split-key. */
307
void
308
__ZIP_UNZIP(zip_cmp_cb cmp, unsigned short fieldoffset,
309
            unsigned short keyoffset, const void *key,
310
1.03k
            void *h, void *l, void *r) {
311
1.03k
    zip_elem *prev;
312
1.03k
    zip_head *head = (zip_head*)h;
313
1.03k
    zip_head *left = (zip_head*)l;
314
1.03k
    zip_head *right = (zip_head*)r;
315
1.03k
    if(!head->root) {
316
547
        left->root = NULL;
317
547
        right->root = NULL;
318
547
        return;
319
547
    }
320
483
    zip_elem *cur = head->root;
321
483
    if(cmp(key, ZIP_KEY_PTR(cur)) != ZIP_CMP_LESS) {
322
0
        left->root = cur;
323
0
        do {
324
0
            prev = cur;
325
0
            cur = ZIP_ENTRY_PTR(cur)->right;
326
0
            if(!cur) {
327
0
                right->root = NULL;
328
0
                return;
329
0
            }
330
0
        } while(cmp(key, ZIP_KEY_PTR(cur)) != ZIP_CMP_LESS);
331
0
        ZIP_ENTRY_PTR(prev)->right = NULL;
332
0
        right->root = cur;
333
0
        __ZIP_UNZIP_MOVE_LEFT(cmp, fieldoffset, keyoffset, key,
334
0
                              &ZIP_ENTRY_PTR(prev)->right, cur);
335
483
    } else {
336
483
        right->root = cur;
337
960
        do {
338
960
            prev = cur;
339
960
            cur = ZIP_ENTRY_PTR(cur)->left;
340
960
            if(!cur) {
341
483
                left->root = NULL;
342
483
                return;
343
483
            }
344
960
        } while(cmp(key, ZIP_KEY_PTR(cur)) == ZIP_CMP_LESS);
345
0
        ZIP_ENTRY_PTR(prev)->left = NULL;
346
0
        left->root = cur;
347
0
        __ZIP_UNZIP_MOVE_RIGHT(cmp, fieldoffset, keyoffset, key,
348
0
                               &ZIP_ENTRY_PTR(prev)->left, cur);
349
0
    }
350
483
}