Coverage Report

Created: 2025-09-27 06:34

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/open62541/deps/ziptree.c
Line
Count
Source
1
/* This Source Code Form is subject to the terms of the Mozilla Public
2
 * License, v. 2.0. If a copy of the MPL was not distributed with this
3
 * file, You can obtain one at http://mozilla.org/MPL/2.0/. 
4
 *
5
 *    Copyright 2021-2022 (c) Julius Pfrommer
6
 */
7
8
#include "ziptree.h"
9
10
/* Dummy types */
11
struct zip_elem;
12
typedef struct zip_elem zip_elem;
13
typedef ZIP_ENTRY(zip_elem) zip_entry;
14
typedef ZIP_HEAD(, zip_elem) zip_head;
15
16
/* Access macros */
17
85.8M
#define ZIP_ENTRY_PTR(x) ((zip_entry*)((char*)x + fieldoffset))
18
39.8M
#define ZIP_KEY_PTR(x) (const void*)((const char*)x + keyoffset)
19
20
/* Hash pointers to keep the tie-breeaking of equal keys (mostly) uncorrelated
21
 * from the rank (pointer order). Hashing code taken from sdbm-hash
22
 * (http://www.cse.yorku.ca/~oz/hash.html). */
23
static unsigned int
24
38.1M
__ZIP_PTR_HASH(const void *p) {
25
38.1M
    unsigned int h = 0;
26
38.1M
    const unsigned char *data = (const unsigned char*)&p;
27
343M
    for(size_t i = 0; i < (sizeof(void*) / sizeof(char)); i++)
28
304M
        h = data[i] + (h << 6) + (h << 16) - h;
29
38.1M
    return h;
30
38.1M
}
31
32
static ZIP_INLINE enum ZIP_CMP
33
19.0M
__ZIP_RANK_CMP(const void *p1, const void *p2) {
34
    /* assert(p1 != p2); */
35
19.0M
    unsigned int h1 = __ZIP_PTR_HASH(p1);
36
19.0M
    unsigned int h2 = __ZIP_PTR_HASH(p2);
37
19.0M
    if(h1 == h2)
38
0
        return (p1 < p2) ? ZIP_CMP_LESS : ZIP_CMP_MORE;
39
19.0M
    return (h1 < h2) ? ZIP_CMP_LESS : ZIP_CMP_MORE;
40
19.0M
}
41
42
static ZIP_INLINE enum ZIP_CMP
43
28.1M
__ZIP_UNIQUE_CMP(zip_cmp_cb cmp, const void *p1, const void *p2) {
44
28.1M
    if(p1 == p2)
45
4.68M
        return ZIP_CMP_EQ;
46
23.5M
    enum ZIP_CMP order = cmp(p1, p2);
47
23.5M
    if(order == ZIP_CMP_EQ)
48
605k
        return (p1 < p2) ? ZIP_CMP_LESS : ZIP_CMP_MORE;
49
22.9M
    return order;
50
23.5M
}
51
52
#if 0
53
#include <assert.h>
54
ZIP_UNUSED static ZIP_INLINE void
55
__ZIP_VALIDATE(zip_cmp_cb cmp, unsigned short fieldoffset,
56
               unsigned short keyoffset, void *elm,
57
               void *min_elm, void *max_elm) {
58
    if(!elm)
59
        return;
60
    enum ZIP_CMP c1 = __ZIP_UNIQUE_CMP(cmp, ZIP_KEY_PTR(min_elm), ZIP_KEY_PTR(elm));
61
    assert((elm == min_elm && c1 == ZIP_CMP_EQ) || c1 == ZIP_CMP_LESS);
62
63
    enum ZIP_CMP c2 = __ZIP_UNIQUE_CMP(cmp, ZIP_KEY_PTR(max_elm), ZIP_KEY_PTR(elm));
64
    assert((elm == max_elm && c2 == ZIP_CMP_EQ) || c2 == ZIP_CMP_MORE);
65
66
    assert(!ZIP_ENTRY_PTR(elm)->right ||
67
           __ZIP_RANK_CMP(elm, ZIP_ENTRY_PTR(elm)->right) == ZIP_CMP_MORE);
68
    assert(!ZIP_ENTRY_PTR(elm)->left ||
69
           __ZIP_RANK_CMP(elm, ZIP_ENTRY_PTR(elm)->left) == ZIP_CMP_MORE);
70
71
    __ZIP_VALIDATE(cmp, fieldoffset, keyoffset, ZIP_ENTRY_PTR(elm)->right, elm, max_elm);
72
    __ZIP_VALIDATE(cmp, fieldoffset, keyoffset, ZIP_ENTRY_PTR(elm)->left, min_elm, elm);
73
}
74
#endif
75
76
/* Walk down the right-side spine of cur. Elements that are larger than x_key
77
 * are moved under x->right. */
78
static void
79
__ZIP_INSERT_MOVE_RIGHT(zip_cmp_cb cmp, unsigned short fieldoffset,
80
                        unsigned short keyoffset, const void *x_key,
81
1.26M
                        zip_elem **fix_edge, zip_elem *cur) {
82
3.10M
    while(ZIP_ENTRY_PTR(cur)->right) {
83
1.84M
        zip_elem *move_candidate = ZIP_ENTRY_PTR(cur)->right;
84
1.84M
        if(__ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(move_candidate)) == ZIP_CMP_MORE) {
85
721k
            cur = ZIP_ENTRY_PTR(cur)->right;
86
721k
            continue;
87
721k
        }
88
1.12M
        ZIP_ENTRY_PTR(cur)->right = ZIP_ENTRY_PTR(move_candidate)->left;
89
1.12M
        ZIP_ENTRY_PTR(move_candidate)->left = NULL;
90
1.12M
        *fix_edge = move_candidate;
91
1.12M
        fix_edge = &ZIP_ENTRY_PTR(move_candidate)->left;
92
1.12M
    }
93
1.26M
}
94
95
static void
96
__ZIP_INSERT_MOVE_LEFT(zip_cmp_cb cmp, unsigned short fieldoffset,
97
                       unsigned short keyoffset, const void *x_key,
98
2.75M
                       zip_elem **fix_edge, zip_elem *cur) {
99
4.97M
    while(ZIP_ENTRY_PTR(cur)->left) {
100
2.21M
        zip_elem *move_candidate = ZIP_ENTRY_PTR(cur)->left;
101
2.21M
        if(__ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(move_candidate)) == ZIP_CMP_LESS) {
102
1.21M
            cur = ZIP_ENTRY_PTR(cur)->left;
103
1.21M
            continue;
104
1.21M
        }
105
1.00M
        ZIP_ENTRY_PTR(cur)->left = ZIP_ENTRY_PTR(move_candidate)->right;
106
1.00M
        ZIP_ENTRY_PTR(move_candidate)->right = NULL;
107
1.00M
        *fix_edge = move_candidate;
108
1.00M
        fix_edge = &ZIP_ENTRY_PTR(move_candidate)->right;
109
1.00M
    }
110
2.75M
}
111
112
void
113
__ZIP_INSERT(void *h, zip_cmp_cb cmp, unsigned short fieldoffset,
114
7.02M
             unsigned short keyoffset, void *elm) {
115
7.02M
    zip_elem *x = (zip_elem*)elm;
116
7.02M
    ZIP_ENTRY_PTR(x)->left = NULL;
117
7.02M
    ZIP_ENTRY_PTR(x)->right = NULL;
118
119
7.02M
    const void *x_key = ZIP_KEY_PTR(x);
120
7.02M
    zip_head *head = (zip_head*)h;
121
7.02M
    if(!head->root) {
122
1.33M
        head->root = x;
123
1.33M
        return;
124
1.33M
    }
125
126
    /* Go down the tree to find the top element "cur" that has a rank smaller
127
     * than "x" */
128
5.68M
    zip_elem *prev = NULL;
129
5.68M
    zip_elem *cur = head->root;
130
5.68M
    enum ZIP_CMP cur_order, prev_order = ZIP_CMP_EQ;
131
18.0M
    do {
132
18.0M
        cur_order = __ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(cur));
133
18.0M
        if(cur_order == ZIP_CMP_EQ)
134
0
            return; /* x is already inserted */
135
18.0M
        if(__ZIP_RANK_CMP(cur, x) == ZIP_CMP_LESS)
136
4.02M
            break;
137
14.0M
        prev = cur;
138
14.0M
        prev_order = cur_order;
139
14.0M
        cur = (cur_order == ZIP_CMP_MORE) ?
140
7.50M
            ZIP_ENTRY_PTR(cur)->right : ZIP_ENTRY_PTR(cur)->left;
141
14.0M
    } while(cur);
142
143
    /* Insert "x" instead of "cur" under its parent "prev" */
144
5.68M
    if(cur == head->root) {
145
2.45M
        head->root = x;
146
3.22M
    } else {
147
3.22M
        if(prev_order == ZIP_CMP_MORE)
148
1.11M
            ZIP_ENTRY_PTR(prev)->right = x;
149
2.11M
        else
150
2.11M
            ZIP_ENTRY_PTR(prev)->left = x;
151
3.22M
    }
152
153
5.68M
    if(!cur)
154
1.65M
        return;
155
156
    /* Re-insert "cur" under "x". Repair by moving elements that ended up on the
157
     * wrong side of "x". */
158
4.02M
    if(cur_order == ZIP_CMP_MORE) {
159
1.26M
        ZIP_ENTRY_PTR(x)->left = cur;
160
1.26M
        __ZIP_INSERT_MOVE_RIGHT(cmp, fieldoffset, keyoffset,
161
1.26M
                                x_key, &ZIP_ENTRY_PTR(x)->right, cur);
162
2.75M
    } else {
163
2.75M
        ZIP_ENTRY_PTR(x)->right = cur;
164
2.75M
        __ZIP_INSERT_MOVE_LEFT(cmp, fieldoffset, keyoffset,
165
2.75M
                               x_key, &ZIP_ENTRY_PTR(x)->left, cur);
166
2.75M
    }
167
4.02M
}
168
169
void *
170
__ZIP_REMOVE(void *h, zip_cmp_cb cmp, unsigned short fieldoffset,
171
4.68M
             unsigned short keyoffset, void *elm) {
172
4.68M
    zip_head *head = (zip_head*)h;
173
4.68M
    zip_elem *x = (zip_elem*)elm;
174
4.68M
    zip_elem *cur = head->root;
175
4.68M
    if(!cur)
176
0
        return NULL;
177
178
4.68M
    const void *x_key = ZIP_KEY_PTR(x);
179
4.68M
    zip_elem **prev_edge = &head->root;
180
4.68M
    enum ZIP_CMP cur_order = __ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(cur));
181
6.07M
    while(cur_order != ZIP_CMP_EQ) {
182
1.38M
        prev_edge = (cur_order == ZIP_CMP_LESS) ?
183
1.20M
            &ZIP_ENTRY_PTR(cur)->left : &ZIP_ENTRY_PTR(cur)->right;
184
1.38M
        cur = *prev_edge;
185
1.38M
        if(!cur)
186
0
            return NULL;
187
1.38M
        cur_order = __ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(cur));
188
1.38M
    }
189
4.68M
    *prev_edge = (zip_elem*)__ZIP_ZIP(fieldoffset,
190
4.68M
                                      ZIP_ENTRY_PTR(cur)->left,
191
4.68M
                                      ZIP_ENTRY_PTR(cur)->right);
192
4.68M
    return cur;
193
4.68M
}
194
195
void *
196
__ZIP_ITER(unsigned short fieldoffset, zip_iter_cb cb,
197
11.2M
           void *context, void *elm) {
198
11.2M
    if(!elm)
199
5.69M
        return NULL;
200
5.57M
    zip_elem *left = ZIP_ENTRY_PTR(elm)->left;
201
5.57M
    zip_elem *right = ZIP_ENTRY_PTR(elm)->right;
202
5.57M
    void *res = __ZIP_ITER(fieldoffset, cb, context, left);
203
5.57M
    if(res)
204
646
        return res;
205
5.57M
    res = cb(context, elm);
206
5.57M
    if(res)
207
1.27k
        return res;
208
5.57M
    return __ZIP_ITER(fieldoffset, cb, context, right);
209
5.57M
}
210
211
void *
212
__ZIP_ITER_KEY(zip_cmp_cb cmp, unsigned short fieldoffset,
213
               unsigned short keyoffset, const void *key,
214
0
               zip_iter_cb cb, void *context, void *elm) {
215
0
    if(!elm)
216
0
        return NULL;
217
218
0
    void *res;
219
0
    enum ZIP_CMP eq = cmp(key, ZIP_KEY_PTR(elm));
220
0
    if(eq != ZIP_CMP_MORE) {
221
0
        res = __ZIP_ITER_KEY(cmp, fieldoffset, keyoffset, key,
222
0
                             cb, context, ZIP_ENTRY_PTR(elm)->left);
223
0
        if(res)
224
0
            return res;
225
0
    }
226
227
0
    if(eq == ZIP_CMP_EQ) {
228
0
        res = cb(context, elm);
229
0
        if(res)
230
0
            return res;
231
0
    }
232
233
0
    if(eq != ZIP_CMP_LESS) {
234
0
        res = __ZIP_ITER_KEY(cmp, fieldoffset, keyoffset, key,
235
0
                             cb, context, ZIP_ENTRY_PTR(elm)->right);
236
0
        if(res)
237
0
            return res;
238
0
    }
239
240
0
    return NULL;
241
0
}
242
243
void *
244
4.68M
__ZIP_ZIP(unsigned short fieldoffset, void *left, void *right) {
245
4.68M
    if(!left)
246
3.65M
        return right;
247
1.02M
    if(!right)
248
202k
        return left;
249
819k
    zip_elem *l = (zip_elem*)left;
250
819k
    zip_elem *r = (zip_elem*)right;
251
819k
    zip_elem *root = NULL;
252
819k
    zip_elem **prev_edge = &root;
253
1.82M
    while(l && r) {
254
1.00M
        if(__ZIP_RANK_CMP(l, r) == ZIP_CMP_LESS) {
255
558k
            *prev_edge = r;
256
558k
            prev_edge = &ZIP_ENTRY_PTR(r)->left;
257
558k
            r = ZIP_ENTRY_PTR(r)->left;
258
558k
        } else {
259
445k
            *prev_edge = l;
260
445k
            prev_edge = &ZIP_ENTRY_PTR(l)->right;
261
445k
            l = ZIP_ENTRY_PTR(l)->right;
262
445k
        }
263
1.00M
    }
264
819k
    *prev_edge = (l) ? l : r;
265
819k
    return root;
266
1.02M
}
267
268
/* Walk down from cur and move all elements <= split-key to the left side. All
269
 * elements that are moved over have to be below left_rightmost. Returns the
270
 * hierarchy of elements that remain on the right side. */
271
static void
272
__ZIP_UNZIP_MOVE_LEFT(zip_cmp_cb cmp, unsigned short fieldoffset,
273
                      unsigned short keyoffset, const void *key,
274
0
                      zip_elem **fix_edge, zip_elem *cur) {
275
0
    while(ZIP_ENTRY_PTR(cur)->left) {
276
0
        zip_elem *next = ZIP_ENTRY_PTR(cur)->left;
277
0
        if(cmp(key, ZIP_KEY_PTR(next)) == ZIP_CMP_LESS) {
278
0
            cur = next;
279
0
            continue;
280
0
        }
281
0
        *fix_edge = next;
282
0
        ZIP_ENTRY_PTR(cur)->left = ZIP_ENTRY_PTR(next)->right;
283
0
        ZIP_ENTRY_PTR(next)->right = NULL;
284
0
        fix_edge = &ZIP_ENTRY_PTR(next)->right;
285
0
    }
286
0
}
287
288
static void
289
__ZIP_UNZIP_MOVE_RIGHT(zip_cmp_cb cmp, unsigned short fieldoffset,
290
                       unsigned short keyoffset, const void *key,
291
0
                       zip_elem **fix_edge, zip_elem *cur) {
292
0
    while(ZIP_ENTRY_PTR(cur)->right) {
293
0
        zip_elem *next = ZIP_ENTRY_PTR(cur)->right;
294
0
        if(cmp(key, ZIP_KEY_PTR(next)) != ZIP_CMP_LESS) {
295
0
            cur = next;
296
0
            continue;
297
0
        }
298
0
        *fix_edge = next;
299
0
        ZIP_ENTRY_PTR(cur)->right = ZIP_ENTRY_PTR(next)->left;
300
0
        ZIP_ENTRY_PTR(next)->left = NULL;
301
0
        fix_edge = &ZIP_ENTRY_PTR(next)->left;
302
0
    }
303
0
}
304
305
/* Split the tree into a left side with keys <= split-key and a right side with
306
 * key > split-key. */
307
void
308
__ZIP_UNZIP(zip_cmp_cb cmp, unsigned short fieldoffset,
309
            unsigned short keyoffset, const void *key,
310
1.18k
            void *h, void *l, void *r) {
311
1.18k
    zip_elem *prev;
312
1.18k
    zip_head *head = (zip_head*)h;
313
1.18k
    zip_head *left = (zip_head*)l;
314
1.18k
    zip_head *right = (zip_head*)r;
315
1.18k
    if(!head->root) {
316
502
        left->root = NULL;
317
502
        right->root = NULL;
318
502
        return;
319
502
    }
320
683
    zip_elem *cur = head->root;
321
683
    if(cmp(key, ZIP_KEY_PTR(cur)) != ZIP_CMP_LESS) {
322
0
        left->root = cur;
323
0
        do {
324
0
            prev = cur;
325
0
            cur = ZIP_ENTRY_PTR(cur)->right;
326
0
            if(!cur) {
327
0
                right->root = NULL;
328
0
                return;
329
0
            }
330
0
        } while(cmp(key, ZIP_KEY_PTR(cur)) != ZIP_CMP_LESS);
331
0
        ZIP_ENTRY_PTR(prev)->right = NULL;
332
0
        right->root = cur;
333
0
        __ZIP_UNZIP_MOVE_LEFT(cmp, fieldoffset, keyoffset, key,
334
0
                              &ZIP_ENTRY_PTR(prev)->right, cur);
335
683
    } else {
336
683
        right->root = cur;
337
1.57k
        do {
338
1.57k
            prev = cur;
339
1.57k
            cur = ZIP_ENTRY_PTR(cur)->left;
340
1.57k
            if(!cur) {
341
683
                left->root = NULL;
342
683
                return;
343
683
            }
344
1.57k
        } while(cmp(key, ZIP_KEY_PTR(cur)) == ZIP_CMP_LESS);
345
0
        ZIP_ENTRY_PTR(prev)->left = NULL;
346
0
        left->root = cur;
347
0
        __ZIP_UNZIP_MOVE_RIGHT(cmp, fieldoffset, keyoffset, key,
348
0
                               &ZIP_ENTRY_PTR(prev)->left, cur);
349
0
    }
350
683
}