Coverage Report

Created: 2025-11-09 06:21

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/open62541/deps/ziptree.c
Line
Count
Source
1
/* This Source Code Form is subject to the terms of the Mozilla Public
2
 * License, v. 2.0. If a copy of the MPL was not distributed with this
3
 * file, You can obtain one at http://mozilla.org/MPL/2.0/. 
4
 *
5
 *    Copyright 2021-2022 (c) Julius Pfrommer
6
 */
7
8
#include "ziptree.h"
9
10
/* Dummy types */
11
struct zip_elem;
12
typedef struct zip_elem zip_elem;
13
typedef ZIP_ENTRY(zip_elem) zip_entry;
14
typedef ZIP_HEAD(, zip_elem) zip_head;
15
16
/* Access macros */
17
46.6M
#define ZIP_ENTRY_PTR(x) ((zip_entry*)((char*)x + fieldoffset))
18
22.5M
#define ZIP_KEY_PTR(x) (const void*)((const char*)x + keyoffset)
19
20
/* Hash pointers to keep the tie-breeaking of equal keys (mostly) uncorrelated
21
 * from the rank (pointer order). Hashing code taken from sdbm-hash
22
 * (http://www.cse.yorku.ca/~oz/hash.html). */
23
static unsigned int
24
21.6M
__ZIP_PTR_HASH(const void *p) {
25
21.6M
    unsigned int h = 0;
26
21.6M
    const unsigned char *data = (const unsigned char*)&p;
27
194M
    for(size_t i = 0; i < (sizeof(void*) / sizeof(char)); i++)
28
172M
        h = data[i] + (h << 6) + (h << 16) - h;
29
21.6M
    return h;
30
21.6M
}
31
32
static ZIP_INLINE enum ZIP_CMP
33
10.8M
__ZIP_RANK_CMP(const void *p1, const void *p2) {
34
    /* assert(p1 != p2); */
35
10.8M
    unsigned int h1 = __ZIP_PTR_HASH(p1);
36
10.8M
    unsigned int h2 = __ZIP_PTR_HASH(p2);
37
10.8M
    if(h1 == h2)
38
0
        return (p1 < p2) ? ZIP_CMP_LESS : ZIP_CMP_MORE;
39
10.8M
    return (h1 < h2) ? ZIP_CMP_LESS : ZIP_CMP_MORE;
40
10.8M
}
41
42
static ZIP_INLINE enum ZIP_CMP
43
15.7M
__ZIP_UNIQUE_CMP(zip_cmp_cb cmp, const void *p1, const void *p2) {
44
15.7M
    if(p1 == p2)
45
2.60M
        return ZIP_CMP_EQ;
46
13.1M
    enum ZIP_CMP order = cmp(p1, p2);
47
13.1M
    if(order == ZIP_CMP_EQ)
48
345k
        return (p1 < p2) ? ZIP_CMP_LESS : ZIP_CMP_MORE;
49
12.8M
    return order;
50
13.1M
}
51
52
#if 0
53
#include <assert.h>
54
ZIP_UNUSED static ZIP_INLINE void
55
__ZIP_VALIDATE(zip_cmp_cb cmp, unsigned short fieldoffset,
56
               unsigned short keyoffset, void *elm,
57
               void *min_elm, void *max_elm) {
58
    if(!elm)
59
        return;
60
    enum ZIP_CMP c1 = __ZIP_UNIQUE_CMP(cmp, ZIP_KEY_PTR(min_elm), ZIP_KEY_PTR(elm));
61
    assert((elm == min_elm && c1 == ZIP_CMP_EQ) || c1 == ZIP_CMP_LESS);
62
63
    enum ZIP_CMP c2 = __ZIP_UNIQUE_CMP(cmp, ZIP_KEY_PTR(max_elm), ZIP_KEY_PTR(elm));
64
    assert((elm == max_elm && c2 == ZIP_CMP_EQ) || c2 == ZIP_CMP_MORE);
65
66
    assert(!ZIP_ENTRY_PTR(elm)->right ||
67
           __ZIP_RANK_CMP(elm, ZIP_ENTRY_PTR(elm)->right) == ZIP_CMP_MORE);
68
    assert(!ZIP_ENTRY_PTR(elm)->left ||
69
           __ZIP_RANK_CMP(elm, ZIP_ENTRY_PTR(elm)->left) == ZIP_CMP_MORE);
70
71
    __ZIP_VALIDATE(cmp, fieldoffset, keyoffset, ZIP_ENTRY_PTR(elm)->right, elm, max_elm);
72
    __ZIP_VALIDATE(cmp, fieldoffset, keyoffset, ZIP_ENTRY_PTR(elm)->left, min_elm, elm);
73
}
74
#endif
75
76
/* Walk down the right-side spine of cur. Elements that are larger than x_key
77
 * are moved under x->right. */
78
static void
79
__ZIP_INSERT_MOVE_RIGHT(zip_cmp_cb cmp, unsigned short fieldoffset,
80
                        unsigned short keyoffset, const void *x_key,
81
725k
                        zip_elem **fix_edge, zip_elem *cur) {
82
1.74M
    while(ZIP_ENTRY_PTR(cur)->right) {
83
1.01M
        zip_elem *move_candidate = ZIP_ENTRY_PTR(cur)->right;
84
1.01M
        if(__ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(move_candidate)) == ZIP_CMP_MORE) {
85
431k
            cur = ZIP_ENTRY_PTR(cur)->right;
86
431k
            continue;
87
431k
        }
88
586k
        ZIP_ENTRY_PTR(cur)->right = ZIP_ENTRY_PTR(move_candidate)->left;
89
586k
        ZIP_ENTRY_PTR(move_candidate)->left = NULL;
90
586k
        *fix_edge = move_candidate;
91
586k
        fix_edge = &ZIP_ENTRY_PTR(move_candidate)->left;
92
586k
    }
93
725k
}
94
95
static void
96
__ZIP_INSERT_MOVE_LEFT(zip_cmp_cb cmp, unsigned short fieldoffset,
97
                       unsigned short keyoffset, const void *x_key,
98
1.71M
                       zip_elem **fix_edge, zip_elem *cur) {
99
2.96M
    while(ZIP_ENTRY_PTR(cur)->left) {
100
1.25M
        zip_elem *move_candidate = ZIP_ENTRY_PTR(cur)->left;
101
1.25M
        if(__ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(move_candidate)) == ZIP_CMP_LESS) {
102
690k
            cur = ZIP_ENTRY_PTR(cur)->left;
103
690k
            continue;
104
690k
        }
105
562k
        ZIP_ENTRY_PTR(cur)->left = ZIP_ENTRY_PTR(move_candidate)->right;
106
562k
        ZIP_ENTRY_PTR(move_candidate)->right = NULL;
107
562k
        *fix_edge = move_candidate;
108
562k
        fix_edge = &ZIP_ENTRY_PTR(move_candidate)->right;
109
562k
    }
110
1.71M
}
111
112
void
113
__ZIP_INSERT(void *h, zip_cmp_cb cmp, unsigned short fieldoffset,
114
4.11M
             unsigned short keyoffset, void *elm) {
115
4.11M
    zip_elem *x = (zip_elem*)elm;
116
4.11M
    ZIP_ENTRY_PTR(x)->left = NULL;
117
4.11M
    ZIP_ENTRY_PTR(x)->right = NULL;
118
119
4.11M
    const void *x_key = ZIP_KEY_PTR(x);
120
4.11M
    zip_head *head = (zip_head*)h;
121
4.11M
    if(!head->root) {
122
746k
        head->root = x;
123
746k
        return;
124
746k
    }
125
126
    /* Go down the tree to find the top element "cur" that has a rank smaller
127
     * than "x" */
128
3.36M
    zip_elem *prev = NULL;
129
3.36M
    zip_elem *cur = head->root;
130
3.36M
    enum ZIP_CMP cur_order, prev_order = ZIP_CMP_EQ;
131
10.2M
    do {
132
10.2M
        cur_order = __ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(cur));
133
10.2M
        if(cur_order == ZIP_CMP_EQ)
134
0
            return; /* x is already inserted */
135
10.2M
        if(__ZIP_RANK_CMP(cur, x) == ZIP_CMP_LESS)
136
2.43M
            break;
137
7.79M
        prev = cur;
138
7.79M
        prev_order = cur_order;
139
7.79M
        cur = (cur_order == ZIP_CMP_MORE) ?
140
4.19M
            ZIP_ENTRY_PTR(cur)->right : ZIP_ENTRY_PTR(cur)->left;
141
7.79M
    } while(cur);
142
143
    /* Insert "x" instead of "cur" under its parent "prev" */
144
3.36M
    if(cur == head->root) {
145
1.57M
        head->root = x;
146
1.79M
    } else {
147
1.79M
        if(prev_order == ZIP_CMP_MORE)
148
590k
            ZIP_ENTRY_PTR(prev)->right = x;
149
1.20M
        else
150
1.20M
            ZIP_ENTRY_PTR(prev)->left = x;
151
1.79M
    }
152
153
3.36M
    if(!cur)
154
932k
        return;
155
156
    /* Re-insert "cur" under "x". Repair by moving elements that ended up on the
157
     * wrong side of "x". */
158
2.43M
    if(cur_order == ZIP_CMP_MORE) {
159
725k
        ZIP_ENTRY_PTR(x)->left = cur;
160
725k
        __ZIP_INSERT_MOVE_RIGHT(cmp, fieldoffset, keyoffset,
161
725k
                                x_key, &ZIP_ENTRY_PTR(x)->right, cur);
162
1.71M
    } else {
163
1.71M
        ZIP_ENTRY_PTR(x)->right = cur;
164
1.71M
        __ZIP_INSERT_MOVE_LEFT(cmp, fieldoffset, keyoffset,
165
1.71M
                               x_key, &ZIP_ENTRY_PTR(x)->left, cur);
166
1.71M
    }
167
2.43M
}
168
169
void *
170
__ZIP_REMOVE(void *h, zip_cmp_cb cmp, unsigned short fieldoffset,
171
2.60M
             unsigned short keyoffset, void *elm) {
172
2.60M
    zip_head *head = (zip_head*)h;
173
2.60M
    zip_elem *x = (zip_elem*)elm;
174
2.60M
    zip_elem *cur = head->root;
175
2.60M
    if(!cur)
176
0
        return NULL;
177
178
2.60M
    const void *x_key = ZIP_KEY_PTR(x);
179
2.60M
    zip_elem **prev_edge = &head->root;
180
2.60M
    enum ZIP_CMP cur_order = __ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(cur));
181
3.28M
    while(cur_order != ZIP_CMP_EQ) {
182
679k
        prev_edge = (cur_order == ZIP_CMP_LESS) ?
183
620k
            &ZIP_ENTRY_PTR(cur)->left : &ZIP_ENTRY_PTR(cur)->right;
184
679k
        cur = *prev_edge;
185
679k
        if(!cur)
186
0
            return NULL;
187
679k
        cur_order = __ZIP_UNIQUE_CMP(cmp, x_key, ZIP_KEY_PTR(cur));
188
679k
    }
189
2.60M
    *prev_edge = (zip_elem*)__ZIP_ZIP(fieldoffset,
190
2.60M
                                      ZIP_ENTRY_PTR(cur)->left,
191
2.60M
                                      ZIP_ENTRY_PTR(cur)->right);
192
2.60M
    return cur;
193
2.60M
}
194
195
void *
196
__ZIP_ITER(unsigned short fieldoffset, zip_iter_cb cb,
197
4.07M
           void *context, void *elm) {
198
4.07M
    if(!elm)
199
1.97M
        return NULL;
200
2.10M
    zip_elem *left = ZIP_ENTRY_PTR(elm)->left;
201
2.10M
    zip_elem *right = ZIP_ENTRY_PTR(elm)->right;
202
2.10M
    void *res = __ZIP_ITER(fieldoffset, cb, context, left);
203
2.10M
    if(res)
204
140k
        return res;
205
1.96M
    res = cb(context, elm);
206
1.96M
    if(res)
207
58.0k
        return res;
208
1.90M
    return __ZIP_ITER(fieldoffset, cb, context, right);
209
1.96M
}
210
211
void *
212
__ZIP_ITER_KEY(zip_cmp_cb cmp, unsigned short fieldoffset,
213
               unsigned short keyoffset, const void *key,
214
0
               zip_iter_cb cb, void *context, void *elm) {
215
0
    if(!elm)
216
0
        return NULL;
217
218
0
    void *res;
219
0
    enum ZIP_CMP eq = cmp(key, ZIP_KEY_PTR(elm));
220
0
    if(eq != ZIP_CMP_MORE) {
221
0
        res = __ZIP_ITER_KEY(cmp, fieldoffset, keyoffset, key,
222
0
                             cb, context, ZIP_ENTRY_PTR(elm)->left);
223
0
        if(res)
224
0
            return res;
225
0
    }
226
227
0
    if(eq == ZIP_CMP_EQ) {
228
0
        res = cb(context, elm);
229
0
        if(res)
230
0
            return res;
231
0
    }
232
233
0
    if(eq != ZIP_CMP_LESS) {
234
0
        res = __ZIP_ITER_KEY(cmp, fieldoffset, keyoffset, key,
235
0
                             cb, context, ZIP_ENTRY_PTR(elm)->right);
236
0
        if(res)
237
0
            return res;
238
0
    }
239
240
0
    return NULL;
241
0
}
242
243
void *
244
2.60M
__ZIP_ZIP(unsigned short fieldoffset, void *left, void *right) {
245
2.60M
    if(!left)
246
2.02M
        return right;
247
581k
    if(!right)
248
136k
        return left;
249
444k
    zip_elem *l = (zip_elem*)left;
250
444k
    zip_elem *r = (zip_elem*)right;
251
444k
    zip_elem *root = NULL;
252
444k
    zip_elem **prev_edge = &root;
253
1.01M
    while(l && r) {
254
573k
        if(__ZIP_RANK_CMP(l, r) == ZIP_CMP_LESS) {
255
339k
            *prev_edge = r;
256
339k
            prev_edge = &ZIP_ENTRY_PTR(r)->left;
257
339k
            r = ZIP_ENTRY_PTR(r)->left;
258
339k
        } else {
259
233k
            *prev_edge = l;
260
233k
            prev_edge = &ZIP_ENTRY_PTR(l)->right;
261
233k
            l = ZIP_ENTRY_PTR(l)->right;
262
233k
        }
263
573k
    }
264
444k
    *prev_edge = (l) ? l : r;
265
444k
    return root;
266
581k
}
267
268
/* Walk down from cur and move all elements <= split-key to the left side. All
269
 * elements that are moved over have to be below left_rightmost. Returns the
270
 * hierarchy of elements that remain on the right side. */
271
static void
272
__ZIP_UNZIP_MOVE_LEFT(zip_cmp_cb cmp, unsigned short fieldoffset,
273
                      unsigned short keyoffset, const void *key,
274
0
                      zip_elem **fix_edge, zip_elem *cur) {
275
0
    while(ZIP_ENTRY_PTR(cur)->left) {
276
0
        zip_elem *next = ZIP_ENTRY_PTR(cur)->left;
277
0
        if(cmp(key, ZIP_KEY_PTR(next)) == ZIP_CMP_LESS) {
278
0
            cur = next;
279
0
            continue;
280
0
        }
281
0
        *fix_edge = next;
282
0
        ZIP_ENTRY_PTR(cur)->left = ZIP_ENTRY_PTR(next)->right;
283
0
        ZIP_ENTRY_PTR(next)->right = NULL;
284
0
        fix_edge = &ZIP_ENTRY_PTR(next)->right;
285
0
    }
286
0
}
287
288
static void
289
__ZIP_UNZIP_MOVE_RIGHT(zip_cmp_cb cmp, unsigned short fieldoffset,
290
                       unsigned short keyoffset, const void *key,
291
0
                       zip_elem **fix_edge, zip_elem *cur) {
292
0
    while(ZIP_ENTRY_PTR(cur)->right) {
293
0
        zip_elem *next = ZIP_ENTRY_PTR(cur)->right;
294
0
        if(cmp(key, ZIP_KEY_PTR(next)) != ZIP_CMP_LESS) {
295
0
            cur = next;
296
0
            continue;
297
0
        }
298
0
        *fix_edge = next;
299
0
        ZIP_ENTRY_PTR(cur)->right = ZIP_ENTRY_PTR(next)->left;
300
0
        ZIP_ENTRY_PTR(next)->left = NULL;
301
0
        fix_edge = &ZIP_ENTRY_PTR(next)->left;
302
0
    }
303
0
}
304
305
/* Split the tree into a left side with keys <= split-key and a right side with
306
 * key > split-key. */
307
void
308
__ZIP_UNZIP(zip_cmp_cb cmp, unsigned short fieldoffset,
309
            unsigned short keyoffset, const void *key,
310
999
            void *h, void *l, void *r) {
311
999
    zip_elem *prev;
312
999
    zip_head *head = (zip_head*)h;
313
999
    zip_head *left = (zip_head*)l;
314
999
    zip_head *right = (zip_head*)r;
315
999
    if(!head->root) {
316
279
        left->root = NULL;
317
279
        right->root = NULL;
318
279
        return;
319
279
    }
320
720
    zip_elem *cur = head->root;
321
720
    if(cmp(key, ZIP_KEY_PTR(cur)) != ZIP_CMP_LESS) {
322
0
        left->root = cur;
323
0
        do {
324
0
            prev = cur;
325
0
            cur = ZIP_ENTRY_PTR(cur)->right;
326
0
            if(!cur) {
327
0
                right->root = NULL;
328
0
                return;
329
0
            }
330
0
        } while(cmp(key, ZIP_KEY_PTR(cur)) != ZIP_CMP_LESS);
331
0
        ZIP_ENTRY_PTR(prev)->right = NULL;
332
0
        right->root = cur;
333
0
        __ZIP_UNZIP_MOVE_LEFT(cmp, fieldoffset, keyoffset, key,
334
0
                              &ZIP_ENTRY_PTR(prev)->right, cur);
335
720
    } else {
336
720
        right->root = cur;
337
1.01k
        do {
338
1.01k
            prev = cur;
339
1.01k
            cur = ZIP_ENTRY_PTR(cur)->left;
340
1.01k
            if(!cur) {
341
720
                left->root = NULL;
342
720
                return;
343
720
            }
344
1.01k
        } while(cmp(key, ZIP_KEY_PTR(cur)) == ZIP_CMP_LESS);
345
0
        ZIP_ENTRY_PTR(prev)->left = NULL;
346
0
        left->root = cur;
347
0
        __ZIP_UNZIP_MOVE_RIGHT(cmp, fieldoffset, keyoffset, key,
348
0
                               &ZIP_ENTRY_PTR(prev)->left, cur);
349
0
    }
350
720
}