Coverage Report

Created: 2024-06-18 06:29

/src/hdf5/src/H5B2int.c
Line
Count
Source (jump to first uncovered line)
1
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2
 * Copyright by The HDF Group.                                               *
3
 * All rights reserved.                                                      *
4
 *                                                                           *
5
 * This file is part of HDF5.  The full HDF5 copyright notice, including     *
6
 * terms governing use, modification, and redistribution, is contained in    *
7
 * the COPYING file, which can be found at the root of the source code       *
8
 * distribution tree, or in https://www.hdfgroup.org/licenses.               *
9
 * If you do not have access to either file, you may request a copy from     *
10
 * help@hdfgroup.org.                                                        *
11
 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
12
13
/*-------------------------------------------------------------------------
14
 *
15
 * Created:   H5B2int.c
16
 *
17
 * Purpose:   Internal routines for managing v2 B-trees.
18
 *
19
 *-------------------------------------------------------------------------
20
 */
21
22
/****************/
23
/* Module Setup */
24
/****************/
25
26
#include "H5B2module.h" /* This source code file is part of the H5B2 module */
27
28
/***********/
29
/* Headers */
30
/***********/
31
#include "H5private.h"   /* Generic Functions     */
32
#include "H5B2pkg.h"     /* v2 B-trees        */
33
#include "H5Eprivate.h"  /* Error handling        */
34
#include "H5FLprivate.h" /* Free Lists                               */
35
#include "H5MMprivate.h" /* Memory management     */
36
#include "H5VMprivate.h" /* Vectors and arrays      */
37
38
/****************/
39
/* Local Macros */
40
/****************/
41
42
/******************/
43
/* Local Typedefs */
44
/******************/
45
46
/********************/
47
/* Package Typedefs */
48
/********************/
49
50
/********************/
51
/* Local Prototypes */
52
/********************/
53
static herr_t H5B2__update_child_flush_depends(H5B2_hdr_t *hdr, unsigned depth, H5B2_node_ptr_t *node_ptrs,
54
                                               unsigned start_idx, unsigned end_idx, void *old_parent,
55
                                               void *new_parent);
56
57
/*********************/
58
/* Package Variables */
59
/*********************/
60
61
/*****************************/
62
/* Library Private Variables */
63
/*****************************/
64
65
/*******************/
66
/* Local Variables */
67
/*******************/
68
69
/* Declare a free list to manage the 'H5B2_node_info_t' sequence information */
70
H5FL_SEQ_EXTERN(H5B2_node_info_t);
71
72
/*-------------------------------------------------------------------------
73
 * Function:  H5B2__locate_record
74
 *
75
 * Purpose: Performs a binary search to locate a record in a sorted
76
 *              array of records.
77
 *
78
 *              Sets *IDX to location of record greater than or equal to
79
 *              record to locate.
80
 *
81
 * Return:  Comparison value for insertion location.  Negative for record
82
 *              to locate being less than value in *IDX.  Zero for record to
83
 *              locate equal to value in *IDX.  Positive for record to locate
84
 *              being greater than value in *IDX (which should only happen when
85
 *              record to locate is greater than all records to search).
86
 *
87
 *-------------------------------------------------------------------------
88
 */
89
herr_t
90
H5B2__locate_record(const H5B2_class_t *type, unsigned nrec, size_t *rec_off, const uint8_t *native,
91
                    const void *udata, unsigned *idx, int *cmp)
92
0
{
93
0
    unsigned lo        = 0, hi; /* Low & high index values */
94
0
    unsigned my_idx    = 0;     /* Final index value */
95
0
    herr_t   ret_value = SUCCEED;
96
97
0
    FUNC_ENTER_PACKAGE
98
99
0
    *cmp = -1;
100
101
0
    hi = nrec;
102
0
    while (lo < hi && *cmp) {
103
0
        my_idx = (lo + hi) / 2;
104
0
        if ((type->compare)(udata, native + rec_off[my_idx], cmp) < 0)
105
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTCOMPARE, FAIL, "can't compare btree2 records");
106
0
        if (*cmp < 0)
107
0
            hi = my_idx;
108
0
        else
109
0
            lo = my_idx + 1;
110
0
    } /* end while */
111
112
0
    *idx = my_idx;
113
114
0
done:
115
0
    FUNC_LEAVE_NOAPI(ret_value)
116
0
} /* end H5B2__locate_record */
117
118
/*-------------------------------------------------------------------------
119
 * Function:  H5B2__split1
120
 *
121
 * Purpose: Perform a 1->2 node split
122
 *
123
 * Return:  Success:  Non-negative
124
 *    Failure:  Negative
125
 *
126
 *-------------------------------------------------------------------------
127
 */
128
herr_t
129
H5B2__split1(H5B2_hdr_t *hdr, uint16_t depth, H5B2_node_ptr_t *curr_node_ptr,
130
             unsigned *parent_cache_info_flags_ptr, H5B2_internal_t *internal, unsigned *internal_flags_ptr,
131
             unsigned idx)
132
0
{
133
0
    const H5AC_class_t *child_class;                             /* Pointer to child node's class info */
134
0
    haddr_t   left_addr = HADDR_UNDEF, right_addr = HADDR_UNDEF; /* Addresses of left & right child nodes */
135
0
    void     *left_child = NULL, *right_child = NULL;            /* Pointers to child nodes */
136
0
    uint16_t *left_nrec, *right_nrec;                            /* Pointers to child # of records */
137
0
    uint8_t  *left_native, *right_native;                        /* Pointers to childs' native records */
138
0
    H5B2_node_ptr_t *left_node_ptrs  = NULL,
139
0
                    *right_node_ptrs = NULL; /* Pointers to childs' node pointer info */
140
0
    uint16_t mid_record;                     /* Index of "middle" record in current node */
141
0
    uint16_t old_node_nrec;                  /* Number of records in internal node split */
142
0
    unsigned left_child_flags  = H5AC__NO_FLAGS_SET,
143
0
             right_child_flags = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
144
0
    herr_t ret_value           = SUCCEED;            /* Return value */
145
146
0
    FUNC_ENTER_PACKAGE
147
148
    /* Check arguments. */
149
0
    assert(hdr);
150
0
    assert(internal);
151
0
    assert(internal_flags_ptr);
152
153
    /* Slide records in parent node up one space, to make room for promoted record */
154
0
    if (idx < internal->nrec) {
155
0
        memmove(H5B2_INT_NREC(internal, hdr, idx + 1), H5B2_INT_NREC(internal, hdr, idx),
156
0
                hdr->cls->nrec_size * (internal->nrec - idx));
157
0
        memmove(&(internal->node_ptrs[idx + 2]), &(internal->node_ptrs[idx + 1]),
158
0
                sizeof(H5B2_node_ptr_t) * (internal->nrec - idx));
159
0
    } /* end if */
160
161
    /* Check for the kind of B-tree node to split */
162
0
    if (depth > 1) {
163
0
        H5B2_internal_t *left_int = NULL, *right_int = NULL; /* Pointers to old & new internal nodes */
164
165
        /* Create new internal node */
166
0
        internal->node_ptrs[idx + 1].all_nrec = internal->node_ptrs[idx + 1].node_nrec = 0;
167
0
        if (H5B2__create_internal(hdr, internal, &(internal->node_ptrs[idx + 1]), (uint16_t)(depth - 1)) < 0)
168
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTINIT, FAIL, "unable to create new internal node");
169
170
        /* Setup information for unlocking child nodes */
171
0
        child_class = H5AC_BT2_INT;
172
173
        /* Protect both leaves */
174
        /* (Shadow left node if doing SWMR writes) */
175
0
        if (NULL ==
176
0
            (left_int = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx],
177
0
                                               (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
178
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
179
0
        left_addr = internal->node_ptrs[idx].addr;
180
0
        if (NULL == (right_int = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx + 1],
181
0
                                                        (uint16_t)(depth - 1), false, H5AC__NO_FLAGS_SET)))
182
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
183
0
        right_addr = internal->node_ptrs[idx + 1].addr;
184
185
        /* More setup for child nodes */
186
0
        left_child      = left_int;
187
0
        right_child     = right_int;
188
0
        left_nrec       = &(left_int->nrec);
189
0
        right_nrec      = &(right_int->nrec);
190
0
        left_native     = left_int->int_native;
191
0
        right_native    = right_int->int_native;
192
0
        left_node_ptrs  = left_int->node_ptrs;
193
0
        right_node_ptrs = right_int->node_ptrs;
194
0
    } /* end if */
195
0
    else {
196
0
        H5B2_leaf_t *left_leaf = NULL, *right_leaf = NULL; /* Pointers to old & new leaf nodes */
197
198
        /* Create new leaf node */
199
0
        internal->node_ptrs[idx + 1].all_nrec = internal->node_ptrs[idx + 1].node_nrec = 0;
200
0
        if (H5B2__create_leaf(hdr, internal, &(internal->node_ptrs[idx + 1])) < 0)
201
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTINIT, FAIL, "unable to create new leaf node");
202
203
        /* Setup information for unlocking child nodes */
204
0
        child_class = H5AC_BT2_LEAF;
205
206
        /* Protect both leaves */
207
        /* (Shadow the left node if doing SWMR writes) */
208
0
        if (NULL == (left_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx], hdr->swmr_write,
209
0
                                                    H5AC__NO_FLAGS_SET)))
210
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
211
0
        left_addr = internal->node_ptrs[idx].addr;
212
0
        if (NULL == (right_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx + 1], false,
213
0
                                                     H5AC__NO_FLAGS_SET)))
214
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
215
0
        right_addr = internal->node_ptrs[idx + 1].addr;
216
217
        /* More setup for child nodes */
218
0
        left_child   = left_leaf;
219
0
        right_child  = right_leaf;
220
0
        left_nrec    = &(left_leaf->nrec);
221
0
        right_nrec   = &(right_leaf->nrec);
222
0
        left_native  = left_leaf->leaf_native;
223
0
        right_native = right_leaf->leaf_native;
224
0
    } /* end if */
225
226
    /* Get the number of records in node to split */
227
0
    old_node_nrec = internal->node_ptrs[idx].node_nrec;
228
229
    /* Determine "middle" record to promote to internal node */
230
0
    mid_record = (uint16_t)(old_node_nrec / 2);
231
232
    /* Copy "upper half" of records to new child */
233
0
    H5MM_memcpy(H5B2_NAT_NREC(right_native, hdr, 0),
234
0
                H5B2_NAT_NREC(left_native, hdr, mid_record + (unsigned)1),
235
0
                hdr->cls->nrec_size * (old_node_nrec - (mid_record + (unsigned)1)));
236
237
    /* Copy "upper half" of node pointers, if the node is an internal node */
238
0
    if (depth > 1)
239
0
        H5MM_memcpy(&(right_node_ptrs[0]), &(left_node_ptrs[mid_record + (unsigned)1]),
240
0
                    sizeof(H5B2_node_ptr_t) * (size_t)(old_node_nrec - mid_record));
241
242
    /* Copy "middle" record to internal node */
243
0
    H5MM_memcpy(H5B2_INT_NREC(internal, hdr, idx), H5B2_NAT_NREC(left_native, hdr, mid_record),
244
0
                hdr->cls->nrec_size);
245
246
    /* Mark nodes as dirty */
247
0
    left_child_flags |= H5AC__DIRTIED_FLAG;
248
0
    right_child_flags |= H5AC__DIRTIED_FLAG;
249
250
    /* Update record counts in child nodes */
251
0
    internal->node_ptrs[idx].node_nrec = *left_nrec = mid_record;
252
0
    internal->node_ptrs[idx + 1].node_nrec = *right_nrec = (uint16_t)(old_node_nrec - (mid_record + 1));
253
254
    /* Determine total number of records in new child nodes */
255
0
    if (depth > 1) {
256
0
        unsigned u;                  /* Local index variable */
257
0
        hsize_t  new_left_all_nrec;  /* New total number of records in left child */
258
0
        hsize_t  new_right_all_nrec; /* New total number of records in right child */
259
260
        /* Compute total of all records in each child node */
261
0
        new_left_all_nrec = internal->node_ptrs[idx].node_nrec;
262
0
        for (u = 0; u < (*left_nrec + (unsigned)1); u++)
263
0
            new_left_all_nrec += left_node_ptrs[u].all_nrec;
264
265
0
        new_right_all_nrec = internal->node_ptrs[idx + 1].node_nrec;
266
0
        for (u = 0; u < (*right_nrec + (unsigned)1); u++)
267
0
            new_right_all_nrec += right_node_ptrs[u].all_nrec;
268
269
0
        internal->node_ptrs[idx].all_nrec     = new_left_all_nrec;
270
0
        internal->node_ptrs[idx + 1].all_nrec = new_right_all_nrec;
271
0
    } /* end if */
272
0
    else {
273
0
        internal->node_ptrs[idx].all_nrec     = internal->node_ptrs[idx].node_nrec;
274
0
        internal->node_ptrs[idx + 1].all_nrec = internal->node_ptrs[idx + 1].node_nrec;
275
0
    } /* end else */
276
277
    /* Update # of records in parent node */
278
0
    internal->nrec++;
279
280
    /* Mark parent as dirty */
281
0
    *internal_flags_ptr |= H5AC__DIRTIED_FLAG;
282
283
    /* Update grandparent info */
284
0
    curr_node_ptr->node_nrec++;
285
286
    /* Mark grandparent as dirty, if given */
287
0
    if (parent_cache_info_flags_ptr)
288
0
        *parent_cache_info_flags_ptr |= H5AC__DIRTIED_FLAG;
289
290
    /* Update flush dependencies for grandchildren, if using SWMR */
291
0
    if (hdr->swmr_write && depth > 1)
292
0
        if (H5B2__update_child_flush_depends(hdr, depth, right_node_ptrs, 0, (unsigned)(*right_nrec + 1),
293
0
                                             left_child, right_child) < 0)
294
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent");
295
296
#ifdef H5B2_DEBUG
297
    H5B2__assert_internal((hsize_t)0, hdr, internal);
298
    if (depth > 1) {
299
        H5B2__assert_internal2(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)left_child,
300
                               (H5B2_internal_t *)right_child);
301
        H5B2__assert_internal2(internal->node_ptrs[idx + 1].all_nrec, hdr, (H5B2_internal_t *)right_child,
302
                               (H5B2_internal_t *)left_child);
303
    } /* end if */
304
    else {
305
        H5B2__assert_leaf2(hdr, (H5B2_leaf_t *)left_child, (H5B2_leaf_t *)right_child);
306
        H5B2__assert_leaf(hdr, (H5B2_leaf_t *)right_child);
307
    }  /* end else */
308
#endif /* H5B2_DEBUG */
309
310
0
done:
311
    /* Release child nodes (marked as dirty) */
312
0
    if (left_child && H5AC_unprotect(hdr->f, child_class, left_addr, left_child, left_child_flags) < 0)
313
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree leaf node");
314
0
    if (right_child && H5AC_unprotect(hdr->f, child_class, right_addr, right_child, right_child_flags) < 0)
315
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree leaf node");
316
317
0
    FUNC_LEAVE_NOAPI(ret_value)
318
0
} /* end H5B2__split1() */
319
320
/*-------------------------------------------------------------------------
321
 * Function:  H5B2__split_root
322
 *
323
 * Purpose: Split the root node
324
 *
325
 * Return:  Success:  Non-negative
326
 *    Failure:  Negative
327
 *
328
 *-------------------------------------------------------------------------
329
 */
330
herr_t
331
H5B2__split_root(H5B2_hdr_t *hdr)
332
0
{
333
0
    H5B2_internal_t *new_root       = NULL;               /* Pointer to new root node */
334
0
    unsigned         new_root_flags = H5AC__NO_FLAGS_SET; /* Cache flags for new root node */
335
0
    H5B2_node_ptr_t  old_root_ptr;                        /* Old node pointer to root node in B-tree */
336
0
    size_t           sz_max_nrec;                         /* Temporary variable for range checking */
337
0
    unsigned         u_max_nrec_size;                     /* Temporary variable for range checking */
338
0
    herr_t           ret_value = SUCCEED;                 /* Return value */
339
340
0
    FUNC_ENTER_PACKAGE
341
342
    /* Check arguments. */
343
0
    assert(hdr);
344
345
    /* Update depth of B-tree */
346
0
    hdr->depth++;
347
348
    /* Re-allocate array of node info structs */
349
0
    if (NULL ==
350
0
        (hdr->node_info = H5FL_SEQ_REALLOC(H5B2_node_info_t, hdr->node_info, (size_t)(hdr->depth + 1))))
351
0
        HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed");
352
353
    /* Update node info for new depth of tree */
354
0
    sz_max_nrec = H5B2_NUM_INT_REC(hdr, hdr->depth);
355
0
    H5_CHECKED_ASSIGN(hdr->node_info[hdr->depth].max_nrec, unsigned, sz_max_nrec, size_t);
356
0
    hdr->node_info[hdr->depth].split_nrec = (hdr->node_info[hdr->depth].max_nrec * hdr->split_percent) / 100;
357
0
    hdr->node_info[hdr->depth].merge_nrec = (hdr->node_info[hdr->depth].max_nrec * hdr->merge_percent) / 100;
358
0
    hdr->node_info[hdr->depth].cum_max_nrec =
359
0
        ((hdr->node_info[hdr->depth].max_nrec + 1) * hdr->node_info[hdr->depth - 1].cum_max_nrec) +
360
0
        hdr->node_info[hdr->depth].max_nrec;
361
0
    u_max_nrec_size = H5VM_limit_enc_size((uint64_t)hdr->node_info[hdr->depth].cum_max_nrec);
362
0
    H5_CHECKED_ASSIGN(hdr->node_info[hdr->depth].cum_max_nrec_size, uint8_t, u_max_nrec_size, unsigned);
363
0
    if (NULL == (hdr->node_info[hdr->depth].nat_rec_fac =
364
0
                     H5FL_fac_init(hdr->cls->nrec_size * hdr->node_info[hdr->depth].max_nrec)))
365
0
        HGOTO_ERROR(H5E_RESOURCE, H5E_CANTINIT, FAIL, "can't create node native key block factory");
366
0
    if (NULL == (hdr->node_info[hdr->depth].node_ptr_fac =
367
0
                     H5FL_fac_init(sizeof(H5B2_node_ptr_t) * (hdr->node_info[hdr->depth].max_nrec + 1))))
368
0
        HGOTO_ERROR(H5E_RESOURCE, H5E_CANTINIT, FAIL,
369
0
                    "can't create internal 'branch' node node pointer block factory");
370
371
    /* Keep old root node pointer info */
372
0
    old_root_ptr = hdr->root;
373
374
    /* Create new internal node to use as root */
375
0
    hdr->root.node_nrec = 0;
376
0
    if (H5B2__create_internal(hdr, hdr, &(hdr->root), hdr->depth) < 0)
377
0
        HGOTO_ERROR(H5E_BTREE, H5E_CANTINIT, FAIL, "unable to create new internal node");
378
379
    /* Protect new root node */
380
0
    if (NULL ==
381
0
        (new_root = H5B2__protect_internal(hdr, hdr, &hdr->root, hdr->depth, false, H5AC__NO_FLAGS_SET)))
382
0
        HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
383
384
    /* Set first node pointer in root node to old root node pointer info */
385
0
    new_root->node_ptrs[0] = old_root_ptr;
386
387
    /* Split original root node */
388
0
    if (H5B2__split1(hdr, hdr->depth, &(hdr->root), NULL, new_root, &new_root_flags, 0) < 0)
389
0
        HGOTO_ERROR(H5E_BTREE, H5E_CANTSPLIT, FAIL, "unable to split old root node");
390
391
0
done:
392
    /* Release new root node (marked as dirty) */
393
0
    if (new_root && H5AC_unprotect(hdr->f, H5AC_BT2_INT, hdr->root.addr, new_root, new_root_flags) < 0)
394
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree internal node");
395
396
0
    FUNC_LEAVE_NOAPI(ret_value)
397
0
} /* end H5B2__split_root() */
398
399
/*-------------------------------------------------------------------------
400
 * Function:  H5B2__redistribute2
401
 *
402
 * Purpose: Redistribute records between two nodes
403
 *
404
 * Return:  Success:  Non-negative
405
 *    Failure:  Negative
406
 *
407
 *-------------------------------------------------------------------------
408
 */
409
herr_t
410
H5B2__redistribute2(H5B2_hdr_t *hdr, uint16_t depth, H5B2_internal_t *internal, unsigned idx)
411
0
{
412
0
    const H5AC_class_t *child_class;                             /* Pointer to child node's class info */
413
0
    haddr_t   left_addr = HADDR_UNDEF, right_addr = HADDR_UNDEF; /* Addresses of left & right child nodes */
414
0
    void     *left_child = NULL, *right_child = NULL;            /* Pointers to child nodes */
415
0
    uint16_t *left_nrec, *right_nrec;                            /* Pointers to child # of records */
416
0
    uint8_t  *left_native, *right_native;                        /* Pointers to childs' native records */
417
0
    H5B2_node_ptr_t *left_node_ptrs  = NULL,
418
0
                    *right_node_ptrs = NULL;            /* Pointers to childs' node pointer info */
419
0
    hssize_t left_moved_nrec = 0, right_moved_nrec = 0; /* Number of records moved, for internal redistrib */
420
0
    unsigned left_child_flags  = H5AC__NO_FLAGS_SET,
421
0
             right_child_flags = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
422
0
    herr_t ret_value           = SUCCEED;            /* Return value */
423
424
0
    FUNC_ENTER_PACKAGE
425
426
    /* Check arguments. */
427
0
    assert(hdr);
428
0
    assert(internal);
429
430
    /* Check for the kind of B-tree node to redistribute */
431
0
    if (depth > 1) {
432
0
        H5B2_internal_t *left_internal;  /* Pointer to left internal node */
433
0
        H5B2_internal_t *right_internal; /* Pointer to right internal node */
434
435
        /* Setup information for unlocking child nodes */
436
0
        child_class = H5AC_BT2_INT;
437
438
        /* Lock left & right B-tree child nodes */
439
        /* (Shadow both nodes if doing SWMR writes) */
440
0
        if (NULL == (left_internal =
441
0
                         H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx],
442
0
                                                (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
443
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
444
0
        left_addr = internal->node_ptrs[idx].addr;
445
0
        if (NULL == (right_internal =
446
0
                         H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx + 1],
447
0
                                                (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
448
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
449
0
        right_addr = internal->node_ptrs[idx + 1].addr;
450
451
        /* More setup for child nodes */
452
0
        left_child      = left_internal;
453
0
        right_child     = right_internal;
454
0
        left_nrec       = &(left_internal->nrec);
455
0
        right_nrec      = &(right_internal->nrec);
456
0
        left_native     = left_internal->int_native;
457
0
        right_native    = right_internal->int_native;
458
0
        left_node_ptrs  = left_internal->node_ptrs;
459
0
        right_node_ptrs = right_internal->node_ptrs;
460
0
    } /* end if */
461
0
    else {
462
0
        H5B2_leaf_t *left_leaf;  /* Pointer to left leaf node */
463
0
        H5B2_leaf_t *right_leaf; /* Pointer to right leaf node */
464
465
        /* Setup information for unlocking child nodes */
466
0
        child_class = H5AC_BT2_LEAF;
467
468
        /* Lock left & right B-tree child nodes */
469
        /* (Shadow both nodes if doing SWMR writes) */
470
0
        if (NULL == (left_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx], hdr->swmr_write,
471
0
                                                    H5AC__NO_FLAGS_SET)))
472
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
473
0
        left_addr = internal->node_ptrs[idx].addr;
474
0
        if (NULL == (right_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx + 1],
475
0
                                                     hdr->swmr_write, H5AC__NO_FLAGS_SET)))
476
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
477
0
        right_addr = internal->node_ptrs[idx + 1].addr;
478
479
        /* More setup for child nodes */
480
0
        left_child   = left_leaf;
481
0
        right_child  = right_leaf;
482
0
        left_nrec    = &(left_leaf->nrec);
483
0
        right_nrec   = &(right_leaf->nrec);
484
0
        left_native  = left_leaf->leaf_native;
485
0
        right_native = right_leaf->leaf_native;
486
0
    } /* end else */
487
488
#ifdef H5B2_DEBUG
489
    H5B2__assert_internal((hsize_t)0, hdr, internal);
490
    if (depth > 1) {
491
        H5B2__assert_internal2(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)left_child,
492
                               (H5B2_internal_t *)right_child);
493
        H5B2__assert_internal2(internal->node_ptrs[idx + 1].all_nrec, hdr, (H5B2_internal_t *)right_child,
494
                               (H5B2_internal_t *)left_child);
495
    } /* end if */
496
    else {
497
        H5B2__assert_leaf2(hdr, (H5B2_leaf_t *)left_child, (H5B2_leaf_t *)right_child);
498
        H5B2__assert_leaf(hdr, (H5B2_leaf_t *)right_child);
499
    }  /* end else */
500
#endif /* H5B2_DEBUG */
501
502
    /* Determine whether to shuffle records left or right */
503
0
    if (*left_nrec < *right_nrec) {
504
        /* Moving record from right node to left */
505
506
0
        uint16_t new_right_nrec =
507
0
            (uint16_t)((*left_nrec + *right_nrec) / 2); /* New number of records for right child */
508
0
        uint16_t move_nrec =
509
0
            (uint16_t)(*right_nrec - new_right_nrec); /* Number of records to move from right node to left */
510
511
        /* Copy record from parent node down into left child */
512
0
        H5MM_memcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec), H5B2_INT_NREC(internal, hdr, idx),
513
0
                    hdr->cls->nrec_size);
514
515
        /* See if we need to move records from right node */
516
0
        if (move_nrec > 1)
517
0
            H5MM_memcpy(H5B2_NAT_NREC(left_native, hdr, (*left_nrec + 1)),
518
0
                        H5B2_NAT_NREC(right_native, hdr, 0), hdr->cls->nrec_size * (size_t)(move_nrec - 1));
519
520
        /* Move record from right node into parent node */
521
0
        H5MM_memcpy(H5B2_INT_NREC(internal, hdr, idx), H5B2_NAT_NREC(right_native, hdr, (move_nrec - 1)),
522
0
                    hdr->cls->nrec_size);
523
524
        /* Slide records in right node down */
525
0
        memmove(H5B2_NAT_NREC(right_native, hdr, 0), H5B2_NAT_NREC(right_native, hdr, move_nrec),
526
0
                hdr->cls->nrec_size * new_right_nrec);
527
528
        /* Handle node pointers, if we have an internal node */
529
0
        if (depth > 1) {
530
0
            hsize_t  moved_nrec = move_nrec; /* Total number of records moved, for internal redistrib */
531
0
            unsigned u;                      /* Local index variable */
532
533
            /* Count the number of records being moved */
534
0
            for (u = 0; u < move_nrec; u++)
535
0
                moved_nrec += right_node_ptrs[u].all_nrec;
536
0
            H5_CHECKED_ASSIGN(left_moved_nrec, hssize_t, moved_nrec, hsize_t);
537
0
            right_moved_nrec -= (hssize_t)moved_nrec;
538
539
            /* Copy node pointers from right node to left */
540
0
            H5MM_memcpy(&(left_node_ptrs[*left_nrec + 1]), &(right_node_ptrs[0]),
541
0
                        sizeof(H5B2_node_ptr_t) * move_nrec);
542
543
            /* Slide node pointers in right node down */
544
0
            memmove(&(right_node_ptrs[0]), &(right_node_ptrs[move_nrec]),
545
0
                    sizeof(H5B2_node_ptr_t) * (new_right_nrec + (unsigned)1));
546
0
        } /* end if */
547
548
        /* Update flush dependencies for grandchildren, if using SWMR */
549
0
        if (hdr->swmr_write && depth > 1)
550
0
            if (H5B2__update_child_flush_depends(hdr, depth, left_node_ptrs, (unsigned)(*left_nrec + 1),
551
0
                                                 (unsigned)(*left_nrec + move_nrec + 1), right_child,
552
0
                                                 left_child) < 0)
553
0
                HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent");
554
555
        /* Update number of records in child nodes */
556
0
        *left_nrec  = (uint16_t)(*left_nrec + move_nrec);
557
0
        *right_nrec = new_right_nrec;
558
559
        /* Mark nodes as dirty */
560
0
        left_child_flags |= H5AC__DIRTIED_FLAG;
561
0
        right_child_flags |= H5AC__DIRTIED_FLAG;
562
0
    } /* end if */
563
0
    else {
564
        /* Moving record from left node to right */
565
566
0
        uint16_t new_left_nrec =
567
0
            (uint16_t)((*left_nrec + *right_nrec) / 2); /* New number of records for left child */
568
0
        uint16_t move_nrec =
569
0
            (uint16_t)(*left_nrec - new_left_nrec); /* Number of records to move from left node to right */
570
571
        /* Sanity check */
572
0
        assert(*left_nrec > *right_nrec);
573
574
        /* Slide records in right node up */
575
0
        memmove(H5B2_NAT_NREC(right_native, hdr, move_nrec), H5B2_NAT_NREC(right_native, hdr, 0),
576
0
                hdr->cls->nrec_size * (*right_nrec));
577
578
        /* Copy record from parent node down into right child */
579
0
        H5MM_memcpy(H5B2_NAT_NREC(right_native, hdr, (move_nrec - 1)), H5B2_INT_NREC(internal, hdr, idx),
580
0
                    hdr->cls->nrec_size);
581
582
        /* See if we need to move records from left node */
583
0
        if (move_nrec > 1)
584
0
            H5MM_memcpy(H5B2_NAT_NREC(right_native, hdr, 0),
585
0
                        H5B2_NAT_NREC(left_native, hdr, ((*left_nrec - move_nrec) + 1)),
586
0
                        hdr->cls->nrec_size * (size_t)(move_nrec - 1));
587
588
        /* Move record from left node into parent node */
589
0
        H5MM_memcpy(H5B2_INT_NREC(internal, hdr, idx),
590
0
                    H5B2_NAT_NREC(left_native, hdr, (*left_nrec - move_nrec)), hdr->cls->nrec_size);
591
592
        /* Handle node pointers, if we have an internal node */
593
0
        if (depth > 1) {
594
0
            hsize_t  moved_nrec = move_nrec; /* Total number of records moved, for internal redistrib */
595
0
            unsigned u;                      /* Local index variable */
596
597
            /* Slide node pointers in right node up */
598
0
            memmove(&(right_node_ptrs[move_nrec]), &(right_node_ptrs[0]),
599
0
                    sizeof(H5B2_node_ptr_t) * (size_t)(*right_nrec + 1));
600
601
            /* Copy node pointers from left node to right */
602
0
            H5MM_memcpy(&(right_node_ptrs[0]), &(left_node_ptrs[new_left_nrec + 1]),
603
0
                        sizeof(H5B2_node_ptr_t) * move_nrec);
604
605
            /* Count the number of records being moved */
606
0
            for (u = 0; u < move_nrec; u++)
607
0
                moved_nrec += right_node_ptrs[u].all_nrec;
608
0
            left_moved_nrec -= (hssize_t)moved_nrec;
609
0
            H5_CHECKED_ASSIGN(right_moved_nrec, hssize_t, moved_nrec, hsize_t);
610
0
        } /* end if */
611
612
        /* Update flush dependencies for grandchildren, if using SWMR */
613
0
        if (hdr->swmr_write && depth > 1)
614
0
            if (H5B2__update_child_flush_depends(hdr, depth, right_node_ptrs, 0, (unsigned)move_nrec,
615
0
                                                 left_child, right_child) < 0)
616
0
                HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent");
617
618
        /* Update number of records in child nodes */
619
0
        *left_nrec  = new_left_nrec;
620
0
        *right_nrec = (uint16_t)(*right_nrec + move_nrec);
621
622
        /* Mark nodes as dirty */
623
0
        left_child_flags |= H5AC__DIRTIED_FLAG;
624
0
        right_child_flags |= H5AC__DIRTIED_FLAG;
625
0
    } /* end else */
626
627
    /* Update # of records in child nodes */
628
0
    internal->node_ptrs[idx].node_nrec     = *left_nrec;
629
0
    internal->node_ptrs[idx + 1].node_nrec = *right_nrec;
630
631
    /* Update total # of records in child B-trees */
632
0
    if (depth > 1) {
633
0
        internal->node_ptrs[idx].all_nrec =
634
0
            (hsize_t)((hssize_t)internal->node_ptrs[idx].all_nrec + left_moved_nrec);
635
0
        internal->node_ptrs[idx + 1].all_nrec =
636
0
            (hsize_t)((hssize_t)internal->node_ptrs[idx + 1].all_nrec + right_moved_nrec);
637
0
    } /* end if */
638
0
    else {
639
0
        internal->node_ptrs[idx].all_nrec     = internal->node_ptrs[idx].node_nrec;
640
0
        internal->node_ptrs[idx + 1].all_nrec = internal->node_ptrs[idx + 1].node_nrec;
641
0
    } /* end else */
642
643
#ifdef H5B2_DEBUG
644
    H5B2__assert_internal((hsize_t)0, hdr, internal);
645
    if (depth > 1) {
646
        H5B2__assert_internal2(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)left_child,
647
                               (H5B2_internal_t *)right_child);
648
        H5B2__assert_internal2(internal->node_ptrs[idx + 1].all_nrec, hdr, (H5B2_internal_t *)right_child,
649
                               (H5B2_internal_t *)left_child);
650
    } /* end if */
651
    else {
652
        H5B2__assert_leaf2(hdr, (H5B2_leaf_t *)left_child, (H5B2_leaf_t *)right_child);
653
        H5B2__assert_leaf(hdr, (H5B2_leaf_t *)right_child);
654
    }  /* end else */
655
#endif /* H5B2_DEBUG */
656
657
0
done:
658
    /* Release child nodes (marked as dirty) */
659
0
    if (left_child && H5AC_unprotect(hdr->f, child_class, left_addr, left_child, left_child_flags) < 0)
660
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node");
661
0
    if (right_child && H5AC_unprotect(hdr->f, child_class, right_addr, right_child, right_child_flags) < 0)
662
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node");
663
664
0
    FUNC_LEAVE_NOAPI(ret_value)
665
0
} /* end H5B2__redistribute2() */
666
667
/*-------------------------------------------------------------------------
668
 * Function:  H5B2__redistribute3
669
 *
670
 * Purpose: Redistribute records between three nodes
671
 *
672
 * Return:  Success:  Non-negative
673
 *    Failure:  Negative
674
 *
675
 *-------------------------------------------------------------------------
676
 */
677
herr_t
678
H5B2__redistribute3(H5B2_hdr_t *hdr, uint16_t depth, H5B2_internal_t *internal, unsigned *internal_flags_ptr,
679
                    unsigned idx)
680
0
{
681
0
    H5B2_node_ptr_t *left_node_ptrs      = NULL,
682
0
                    *right_node_ptrs     = NULL;                 /* Pointers to childs' node pointer info */
683
0
    H5B2_node_ptr_t    *middle_node_ptrs = NULL;                 /* Pointers to childs' node pointer info */
684
0
    const H5AC_class_t *child_class;                             /* Pointer to child node's class info */
685
0
    haddr_t   left_addr = HADDR_UNDEF, right_addr = HADDR_UNDEF; /* Addresses of left & right child nodes */
686
0
    haddr_t   middle_addr = HADDR_UNDEF;                         /* Address of middle child node */
687
0
    void     *left_child = NULL, *right_child = NULL;            /* Pointers to child nodes */
688
0
    void     *middle_child = NULL;                               /* Pointers to middle child node */
689
0
    uint16_t *left_nrec, *right_nrec;                            /* Pointers to child # of records */
690
0
    uint16_t *middle_nrec;                                       /* Pointers to middle child # of records */
691
0
    uint8_t  *left_native, *right_native;                        /* Pointers to childs' native records */
692
0
    uint8_t  *middle_native;                             /* Pointers to middle child's native records */
693
0
    hssize_t  left_moved_nrec = 0, right_moved_nrec = 0; /* Number of records moved, for internal split */
694
0
    hssize_t  middle_moved_nrec = 0;                     /* Number of records moved, for internal split */
695
0
    unsigned  left_child_flags  = H5AC__NO_FLAGS_SET,
696
0
             right_child_flags  = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
697
0
    unsigned middle_child_flags = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
698
0
    herr_t   ret_value          = SUCCEED;            /* Return value */
699
700
0
    FUNC_ENTER_PACKAGE
701
702
    /* Check arguments. */
703
0
    assert(hdr);
704
0
    assert(internal);
705
0
    assert(internal_flags_ptr);
706
707
    /* Check for the kind of B-tree node to redistribute */
708
0
    if (depth > 1) {
709
0
        H5B2_internal_t *left_internal;   /* Pointer to left internal node */
710
0
        H5B2_internal_t *middle_internal; /* Pointer to middle internal node */
711
0
        H5B2_internal_t *right_internal;  /* Pointer to right internal node */
712
713
        /* Setup information for unlocking child nodes */
714
0
        child_class = H5AC_BT2_INT;
715
716
        /* Lock B-tree child nodes */
717
        /* (Shadow all nodes if doing SWMR writes) */
718
0
        if (NULL == (left_internal =
719
0
                         H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx - 1],
720
0
                                                (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
721
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
722
0
        left_addr = internal->node_ptrs[idx - 1].addr;
723
0
        if (NULL == (middle_internal =
724
0
                         H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx],
725
0
                                                (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
726
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
727
0
        middle_addr = internal->node_ptrs[idx].addr;
728
0
        if (NULL == (right_internal =
729
0
                         H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx + 1],
730
0
                                                (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
731
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
732
0
        right_addr = internal->node_ptrs[idx + 1].addr;
733
734
        /* More setup for child nodes */
735
0
        left_child       = left_internal;
736
0
        middle_child     = middle_internal;
737
0
        right_child      = right_internal;
738
0
        left_nrec        = &(left_internal->nrec);
739
0
        middle_nrec      = &(middle_internal->nrec);
740
0
        right_nrec       = &(right_internal->nrec);
741
0
        left_native      = left_internal->int_native;
742
0
        middle_native    = middle_internal->int_native;
743
0
        right_native     = right_internal->int_native;
744
0
        left_node_ptrs   = left_internal->node_ptrs;
745
0
        middle_node_ptrs = middle_internal->node_ptrs;
746
0
        right_node_ptrs  = right_internal->node_ptrs;
747
0
    } /* end if */
748
0
    else {
749
0
        H5B2_leaf_t *left_leaf;   /* Pointer to left leaf node */
750
0
        H5B2_leaf_t *middle_leaf; /* Pointer to middle leaf node */
751
0
        H5B2_leaf_t *right_leaf;  /* Pointer to right leaf node */
752
753
        /* Setup information for unlocking child nodes */
754
0
        child_class = H5AC_BT2_LEAF;
755
756
        /* Lock B-tree child nodes */
757
        /* (Shadow all nodes if doing SWMR writes) */
758
0
        if (NULL == (left_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx - 1],
759
0
                                                    hdr->swmr_write, H5AC__NO_FLAGS_SET)))
760
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
761
0
        left_addr = internal->node_ptrs[idx - 1].addr;
762
0
        if (NULL == (middle_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx],
763
0
                                                      hdr->swmr_write, H5AC__NO_FLAGS_SET)))
764
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
765
0
        middle_addr = internal->node_ptrs[idx].addr;
766
0
        if (NULL == (right_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx + 1],
767
0
                                                     hdr->swmr_write, H5AC__NO_FLAGS_SET)))
768
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
769
0
        right_addr = internal->node_ptrs[idx + 1].addr;
770
771
        /* More setup for child nodes */
772
0
        left_child    = left_leaf;
773
0
        middle_child  = middle_leaf;
774
0
        right_child   = right_leaf;
775
0
        left_nrec     = &(left_leaf->nrec);
776
0
        middle_nrec   = &(middle_leaf->nrec);
777
0
        right_nrec    = &(right_leaf->nrec);
778
0
        left_native   = left_leaf->leaf_native;
779
0
        middle_native = middle_leaf->leaf_native;
780
0
        right_native  = right_leaf->leaf_native;
781
0
    } /* end else */
782
783
    /* Redistribute records */
784
0
    {
785
        /* Compute new # of records in each node */
786
0
        unsigned total_nrec      = (unsigned)(*left_nrec + *middle_nrec + *right_nrec + 2);
787
0
        uint16_t new_middle_nrec = (uint16_t)((total_nrec - 2) / 3);
788
0
        uint16_t new_left_nrec   = (uint16_t)(((total_nrec - 2) - new_middle_nrec) / 2);
789
0
        uint16_t new_right_nrec  = (uint16_t)((total_nrec - 2) - (unsigned)(new_left_nrec + new_middle_nrec));
790
0
        uint16_t curr_middle_nrec = *middle_nrec;
791
792
        /* Sanity check rounding */
793
0
        assert(new_middle_nrec <= new_left_nrec);
794
0
        assert(new_middle_nrec <= new_right_nrec);
795
796
        /* Move records into left node */
797
0
        if (new_left_nrec > *left_nrec) {
798
0
            uint16_t moved_middle_nrec = 0; /* Number of records moved into left node */
799
800
            /* Move left parent record down to left node */
801
0
            H5MM_memcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec), H5B2_INT_NREC(internal, hdr, idx - 1),
802
0
                        hdr->cls->nrec_size);
803
804
            /* Move records from middle node into left node */
805
0
            if ((new_left_nrec - 1) > *left_nrec) {
806
0
                moved_middle_nrec = (uint16_t)(new_left_nrec - (*left_nrec + 1));
807
0
                H5MM_memcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec + 1),
808
0
                            H5B2_NAT_NREC(middle_native, hdr, 0), hdr->cls->nrec_size * moved_middle_nrec);
809
0
            } /* end if */
810
811
            /* Move record from middle node up to parent node */
812
0
            H5MM_memcpy(H5B2_INT_NREC(internal, hdr, idx - 1),
813
0
                        H5B2_NAT_NREC(middle_native, hdr, moved_middle_nrec), hdr->cls->nrec_size);
814
0
            moved_middle_nrec++;
815
816
            /* Slide records in middle node down */
817
0
            memmove(H5B2_NAT_NREC(middle_native, hdr, 0),
818
0
                    H5B2_NAT_NREC(middle_native, hdr, moved_middle_nrec),
819
0
                    hdr->cls->nrec_size * (size_t)(*middle_nrec - moved_middle_nrec));
820
821
            /* Move node pointers also if this is an internal node */
822
0
            if (depth > 1) {
823
0
                hsize_t  moved_nrec; /* Total number of records moved, for internal redistrib */
824
0
                unsigned move_nptrs; /* Number of node pointers to move */
825
0
                unsigned u;          /* Local index variable */
826
827
                /* Move middle node pointers into left node */
828
0
                move_nptrs = (unsigned)(new_left_nrec - *left_nrec);
829
0
                H5MM_memcpy(&(left_node_ptrs[*left_nrec + 1]), &(middle_node_ptrs[0]),
830
0
                            sizeof(H5B2_node_ptr_t) * move_nptrs);
831
832
                /* Count the number of records being moved into the left node */
833
0
                for (u = 0, moved_nrec = 0; u < move_nptrs; u++)
834
0
                    moved_nrec += middle_node_ptrs[u].all_nrec;
835
0
                left_moved_nrec = (hssize_t)(moved_nrec + move_nptrs);
836
0
                middle_moved_nrec -= (hssize_t)(moved_nrec + move_nptrs);
837
838
                /* Slide the node pointers in middle node down */
839
0
                memmove(&(middle_node_ptrs[0]), &(middle_node_ptrs[move_nptrs]),
840
0
                        sizeof(H5B2_node_ptr_t) * ((*middle_nrec - move_nptrs) + 1));
841
0
            } /* end if */
842
843
            /* Update flush dependencies for grandchildren, if using SWMR */
844
0
            if (hdr->swmr_write && depth > 1)
845
0
                if (H5B2__update_child_flush_depends(hdr, depth, left_node_ptrs, (unsigned)(*left_nrec + 1),
846
0
                                                     (unsigned)(*left_nrec + moved_middle_nrec + 1),
847
0
                                                     middle_child, left_child) < 0)
848
0
                    HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL,
849
0
                                "unable to update child nodes to new parent");
850
851
            /* Update the current number of records in middle node */
852
0
            curr_middle_nrec = (uint16_t)(curr_middle_nrec - moved_middle_nrec);
853
854
            /* Mark nodes as dirty */
855
0
            left_child_flags |= H5AC__DIRTIED_FLAG;
856
0
            middle_child_flags |= H5AC__DIRTIED_FLAG;
857
0
        } /* end if */
858
859
        /* Move records into right node */
860
0
        if (new_right_nrec > *right_nrec) {
861
0
            unsigned right_nrec_move =
862
0
                (unsigned)(new_right_nrec - *right_nrec); /* Number of records to move out of right node */
863
864
            /* Slide records in right node up */
865
0
            memmove(H5B2_NAT_NREC(right_native, hdr, right_nrec_move), H5B2_NAT_NREC(right_native, hdr, 0),
866
0
                    hdr->cls->nrec_size * (*right_nrec));
867
868
            /* Move right parent record down to right node */
869
0
            H5MM_memcpy(H5B2_NAT_NREC(right_native, hdr, right_nrec_move - 1),
870
0
                        H5B2_INT_NREC(internal, hdr, idx), hdr->cls->nrec_size);
871
872
            /* Move records from middle node into right node */
873
0
            if (right_nrec_move > 1)
874
0
                H5MM_memcpy(H5B2_NAT_NREC(right_native, hdr, 0),
875
0
                            H5B2_NAT_NREC(middle_native, hdr, ((curr_middle_nrec - right_nrec_move) + 1)),
876
0
                            hdr->cls->nrec_size * (right_nrec_move - 1));
877
878
            /* Move record from middle node up to parent node */
879
0
            H5MM_memcpy(H5B2_INT_NREC(internal, hdr, idx),
880
0
                        H5B2_NAT_NREC(middle_native, hdr, (curr_middle_nrec - right_nrec_move)),
881
0
                        hdr->cls->nrec_size);
882
883
            /* Move node pointers also if this is an internal node */
884
0
            if (depth > 1) {
885
0
                hsize_t  moved_nrec; /* Total number of records moved, for internal redistrib */
886
0
                unsigned u;          /* Local index variable */
887
888
                /* Slide the node pointers in right node up */
889
0
                memmove(&(right_node_ptrs[right_nrec_move]), &(right_node_ptrs[0]),
890
0
                        sizeof(H5B2_node_ptr_t) * (size_t)(*right_nrec + 1));
891
892
                /* Move middle node pointers into right node */
893
0
                H5MM_memcpy(&(right_node_ptrs[0]),
894
0
                            &(middle_node_ptrs[(curr_middle_nrec - right_nrec_move) + 1]),
895
0
                            sizeof(H5B2_node_ptr_t) * right_nrec_move);
896
897
                /* Count the number of records being moved into the right node */
898
0
                for (u = 0, moved_nrec = 0; u < right_nrec_move; u++)
899
0
                    moved_nrec += right_node_ptrs[u].all_nrec;
900
0
                right_moved_nrec = (hssize_t)(moved_nrec + right_nrec_move);
901
0
                middle_moved_nrec -= (hssize_t)(moved_nrec + right_nrec_move);
902
0
            } /* end if */
903
904
            /* Update flush dependencies for grandchildren, if using SWMR */
905
0
            if (hdr->swmr_write && depth > 1)
906
0
                if (H5B2__update_child_flush_depends(hdr, depth, right_node_ptrs, 0,
907
0
                                                     (unsigned)right_nrec_move, middle_child,
908
0
                                                     right_child) < 0)
909
0
                    HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL,
910
0
                                "unable to update child nodes to new parent");
911
912
            /* Update the current number of records in middle node */
913
0
            curr_middle_nrec = (uint16_t)(curr_middle_nrec - right_nrec_move);
914
915
            /* Mark nodes as dirty */
916
0
            middle_child_flags |= H5AC__DIRTIED_FLAG;
917
0
            right_child_flags |= H5AC__DIRTIED_FLAG;
918
0
        } /* end if */
919
920
        /* Move records out of left node */
921
0
        if (new_left_nrec < *left_nrec) {
922
0
            unsigned left_nrec_move =
923
0
                (unsigned)(*left_nrec - new_left_nrec); /* Number of records to move out of left node */
924
925
            /* Slide middle records up */
926
0
            memmove(H5B2_NAT_NREC(middle_native, hdr, left_nrec_move), H5B2_NAT_NREC(middle_native, hdr, 0),
927
0
                    hdr->cls->nrec_size * curr_middle_nrec);
928
929
            /* Move left parent record down to middle node */
930
0
            H5MM_memcpy(H5B2_NAT_NREC(middle_native, hdr, left_nrec_move - 1),
931
0
                        H5B2_INT_NREC(internal, hdr, idx - 1), hdr->cls->nrec_size);
932
933
            /* Move left records to middle node */
934
0
            if (left_nrec_move > 1)
935
0
                memmove(H5B2_NAT_NREC(middle_native, hdr, 0),
936
0
                        H5B2_NAT_NREC(left_native, hdr, new_left_nrec + 1),
937
0
                        hdr->cls->nrec_size * (left_nrec_move - 1));
938
939
            /* Move left parent record up from left node */
940
0
            H5MM_memcpy(H5B2_INT_NREC(internal, hdr, idx - 1), H5B2_NAT_NREC(left_native, hdr, new_left_nrec),
941
0
                        hdr->cls->nrec_size);
942
943
            /* Move node pointers also if this is an internal node */
944
0
            if (depth > 1) {
945
0
                hsize_t  moved_nrec; /* Total number of records moved, for internal redistrib */
946
0
                unsigned u;          /* Local index variable */
947
948
                /* Slide the node pointers in middle node up */
949
0
                memmove(&(middle_node_ptrs[left_nrec_move]), &(middle_node_ptrs[0]),
950
0
                        sizeof(H5B2_node_ptr_t) * (size_t)(curr_middle_nrec + 1));
951
952
                /* Move left node pointers into middle node */
953
0
                H5MM_memcpy(&(middle_node_ptrs[0]), &(left_node_ptrs[new_left_nrec + 1]),
954
0
                            sizeof(H5B2_node_ptr_t) * left_nrec_move);
955
956
                /* Count the number of records being moved into the left node */
957
0
                for (u = 0, moved_nrec = 0; u < left_nrec_move; u++)
958
0
                    moved_nrec += middle_node_ptrs[u].all_nrec;
959
0
                left_moved_nrec -= (hssize_t)(moved_nrec + left_nrec_move);
960
0
                middle_moved_nrec += (hssize_t)(moved_nrec + left_nrec_move);
961
0
            } /* end if */
962
963
            /* Update flush dependencies for grandchildren, if using SWMR */
964
0
            if (hdr->swmr_write && depth > 1)
965
0
                if (H5B2__update_child_flush_depends(hdr, depth, middle_node_ptrs, 0,
966
0
                                                     (unsigned)left_nrec_move, left_child, middle_child) < 0)
967
0
                    HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL,
968
0
                                "unable to update child nodes to new parent");
969
970
            /* Update the current number of records in middle node */
971
0
            curr_middle_nrec = (uint16_t)(curr_middle_nrec + left_nrec_move);
972
973
            /* Mark nodes as dirty */
974
0
            left_child_flags |= H5AC__DIRTIED_FLAG;
975
0
            middle_child_flags |= H5AC__DIRTIED_FLAG;
976
0
        } /* end if */
977
978
        /* Move records out of right node */
979
0
        if (new_right_nrec < *right_nrec) {
980
0
            unsigned right_nrec_move =
981
0
                (unsigned)(*right_nrec - new_right_nrec); /* Number of records to move out of right node */
982
983
            /* Move right parent record down to middle node */
984
0
            H5MM_memcpy(H5B2_NAT_NREC(middle_native, hdr, curr_middle_nrec),
985
0
                        H5B2_INT_NREC(internal, hdr, idx), hdr->cls->nrec_size);
986
987
            /* Move right records to middle node */
988
0
            memmove(H5B2_NAT_NREC(middle_native, hdr, (curr_middle_nrec + 1)),
989
0
                    H5B2_NAT_NREC(right_native, hdr, 0), hdr->cls->nrec_size * (right_nrec_move - 1));
990
991
            /* Move right parent record up from right node */
992
0
            H5MM_memcpy(H5B2_INT_NREC(internal, hdr, idx),
993
0
                        H5B2_NAT_NREC(right_native, hdr, right_nrec_move - 1), hdr->cls->nrec_size);
994
995
            /* Slide right records down */
996
0
            memmove(H5B2_NAT_NREC(right_native, hdr, 0), H5B2_NAT_NREC(right_native, hdr, right_nrec_move),
997
0
                    hdr->cls->nrec_size * new_right_nrec);
998
999
            /* Move node pointers also if this is an internal node */
1000
0
            if (depth > 1) {
1001
0
                hsize_t  moved_nrec; /* Total number of records moved, for internal redistrib */
1002
0
                unsigned u;          /* Local index variable */
1003
1004
                /* Move right node pointers into middle node */
1005
0
                H5MM_memcpy(&(middle_node_ptrs[curr_middle_nrec + 1]), &(right_node_ptrs[0]),
1006
0
                            sizeof(H5B2_node_ptr_t) * right_nrec_move);
1007
1008
                /* Count the number of records being moved into the right node */
1009
0
                for (u = 0, moved_nrec = 0; u < right_nrec_move; u++)
1010
0
                    moved_nrec += right_node_ptrs[u].all_nrec;
1011
0
                right_moved_nrec -= (hssize_t)(moved_nrec + right_nrec_move);
1012
0
                middle_moved_nrec += (hssize_t)(moved_nrec + right_nrec_move);
1013
1014
                /* Slide the node pointers in right node down */
1015
0
                memmove(&(right_node_ptrs[0]), &(right_node_ptrs[right_nrec_move]),
1016
0
                        sizeof(H5B2_node_ptr_t) * (size_t)(new_right_nrec + 1));
1017
0
            } /* end if */
1018
1019
            /* Update flush dependencies for grandchildren, if using SWMR */
1020
0
            if (hdr->swmr_write && depth > 1)
1021
0
                if (H5B2__update_child_flush_depends(
1022
0
                        hdr, depth, middle_node_ptrs, (unsigned)(curr_middle_nrec + 1),
1023
0
                        (unsigned)(curr_middle_nrec + right_nrec_move + 1), right_child, middle_child) < 0)
1024
0
                    HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL,
1025
0
                                "unable to update child nodes to new parent");
1026
1027
            /* Mark nodes as dirty */
1028
0
            middle_child_flags |= H5AC__DIRTIED_FLAG;
1029
0
            right_child_flags |= H5AC__DIRTIED_FLAG;
1030
0
        } /* end if */
1031
1032
        /* Update # of records in nodes */
1033
0
        *left_nrec   = new_left_nrec;
1034
0
        *middle_nrec = new_middle_nrec;
1035
0
        *right_nrec  = new_right_nrec;
1036
0
    } /* end block */
1037
1038
    /* Update # of records in child nodes */
1039
0
    internal->node_ptrs[idx - 1].node_nrec = *left_nrec;
1040
0
    internal->node_ptrs[idx].node_nrec     = *middle_nrec;
1041
0
    internal->node_ptrs[idx + 1].node_nrec = *right_nrec;
1042
1043
    /* Update total # of records in child B-trees */
1044
0
    if (depth > 1) {
1045
0
        internal->node_ptrs[idx - 1].all_nrec =
1046
0
            (hsize_t)((hssize_t)internal->node_ptrs[idx - 1].all_nrec + left_moved_nrec);
1047
0
        internal->node_ptrs[idx].all_nrec =
1048
0
            (hsize_t)((hssize_t)internal->node_ptrs[idx].all_nrec + middle_moved_nrec);
1049
0
        internal->node_ptrs[idx + 1].all_nrec =
1050
0
            (hsize_t)((hssize_t)internal->node_ptrs[idx + 1].all_nrec + right_moved_nrec);
1051
0
    } /* end if */
1052
0
    else {
1053
0
        internal->node_ptrs[idx - 1].all_nrec = internal->node_ptrs[idx - 1].node_nrec;
1054
0
        internal->node_ptrs[idx].all_nrec     = internal->node_ptrs[idx].node_nrec;
1055
0
        internal->node_ptrs[idx + 1].all_nrec = internal->node_ptrs[idx + 1].node_nrec;
1056
0
    } /* end else */
1057
1058
    /* Mark parent as dirty */
1059
0
    *internal_flags_ptr |= H5AC__DIRTIED_FLAG;
1060
1061
#ifdef H5B2_DEBUG
1062
    H5B2__assert_internal((hsize_t)0, hdr, internal);
1063
    if (depth > 1) {
1064
        H5B2__assert_internal2(internal->node_ptrs[idx - 1].all_nrec, hdr, (H5B2_internal_t *)left_child,
1065
                               (H5B2_internal_t *)middle_child);
1066
        H5B2__assert_internal2(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)middle_child,
1067
                               (H5B2_internal_t *)left_child);
1068
        H5B2__assert_internal2(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)middle_child,
1069
                               (H5B2_internal_t *)right_child);
1070
        H5B2__assert_internal2(internal->node_ptrs[idx + 1].all_nrec, hdr, (H5B2_internal_t *)right_child,
1071
                               (H5B2_internal_t *)middle_child);
1072
    } /* end if */
1073
    else {
1074
        H5B2__assert_leaf2(hdr, (H5B2_leaf_t *)left_child, (H5B2_leaf_t *)middle_child);
1075
        H5B2__assert_leaf2(hdr, (H5B2_leaf_t *)middle_child, (H5B2_leaf_t *)right_child);
1076
        H5B2__assert_leaf(hdr, (H5B2_leaf_t *)right_child);
1077
    }  /* end else */
1078
#endif /* H5B2_DEBUG */
1079
1080
0
done:
1081
    /* Unlock child nodes (marked as dirty) */
1082
0
    if (left_child && H5AC_unprotect(hdr->f, child_class, left_addr, left_child, left_child_flags) < 0)
1083
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node");
1084
0
    if (middle_child &&
1085
0
        H5AC_unprotect(hdr->f, child_class, middle_addr, middle_child, middle_child_flags) < 0)
1086
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node");
1087
0
    if (right_child && H5AC_unprotect(hdr->f, child_class, right_addr, right_child, right_child_flags) < 0)
1088
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node");
1089
1090
0
    FUNC_LEAVE_NOAPI(ret_value)
1091
0
} /* end H5B2__redistribute3() */
1092
1093
/*-------------------------------------------------------------------------
1094
 * Function:  H5B2__merge2
1095
 *
1096
 * Purpose: Perform a 2->1 node merge
1097
 *
1098
 * Return:  Success:  Non-negative
1099
 *
1100
 *    Failure:  Negative
1101
 *
1102
 *-------------------------------------------------------------------------
1103
 */
1104
herr_t
1105
H5B2__merge2(H5B2_hdr_t *hdr, uint16_t depth, H5B2_node_ptr_t *curr_node_ptr,
1106
             unsigned *parent_cache_info_flags_ptr, H5B2_internal_t *internal, unsigned *internal_flags_ptr,
1107
             unsigned idx)
1108
0
{
1109
0
    const H5AC_class_t *child_class;                             /* Pointer to child node's class info */
1110
0
    haddr_t   left_addr = HADDR_UNDEF, right_addr = HADDR_UNDEF; /* Addresses of left & right child nodes */
1111
0
    void     *left_child = NULL, *right_child = NULL;            /* Pointers to left & right child nodes */
1112
0
    uint16_t *left_nrec, *right_nrec;     /* Pointers to left & right child # of records */
1113
0
    uint8_t  *left_native, *right_native; /* Pointers to left & right children's native records */
1114
0
    H5B2_node_ptr_t *left_node_ptrs  = NULL,
1115
0
                    *right_node_ptrs = NULL; /* Pointers to childs' node pointer info */
1116
0
    unsigned left_child_flags        = H5AC__NO_FLAGS_SET,
1117
0
             right_child_flags       = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
1118
0
    herr_t ret_value                 = SUCCEED;            /* Return value */
1119
1120
0
    FUNC_ENTER_PACKAGE
1121
1122
    /* Check arguments. */
1123
0
    assert(hdr);
1124
0
    assert(curr_node_ptr);
1125
0
    assert(internal);
1126
0
    assert(internal_flags_ptr);
1127
1128
    /* Check for the kind of B-tree node to split */
1129
0
    if (depth > 1) {
1130
0
        H5B2_internal_t *left_internal;  /* Pointer to left internal node */
1131
0
        H5B2_internal_t *right_internal; /* Pointer to right internal node */
1132
1133
        /* Setup information for unlocking child nodes */
1134
0
        child_class = H5AC_BT2_INT;
1135
1136
        /* Lock left & right B-tree child nodes */
1137
        /* (Shadow the left node if doing SWMR writes) */
1138
0
        if (NULL == (left_internal =
1139
0
                         H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx],
1140
0
                                                (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
1141
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
1142
0
        left_addr = internal->node_ptrs[idx].addr;
1143
0
        if (NULL ==
1144
0
            (right_internal = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx + 1],
1145
0
                                                     (uint16_t)(depth - 1), false, H5AC__NO_FLAGS_SET)))
1146
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
1147
0
        right_addr = internal->node_ptrs[idx + 1].addr;
1148
1149
        /* More setup for accessing child node information */
1150
0
        left_child      = left_internal;
1151
0
        right_child     = right_internal;
1152
0
        left_nrec       = &(left_internal->nrec);
1153
0
        right_nrec      = &(right_internal->nrec);
1154
0
        left_native     = left_internal->int_native;
1155
0
        right_native    = right_internal->int_native;
1156
0
        left_node_ptrs  = left_internal->node_ptrs;
1157
0
        right_node_ptrs = right_internal->node_ptrs;
1158
0
    } /* end if */
1159
0
    else {
1160
0
        H5B2_leaf_t *left_leaf;  /* Pointer to left leaf node */
1161
0
        H5B2_leaf_t *right_leaf; /* Pointer to right leaf node */
1162
1163
        /* Setup information for unlocking child nodes */
1164
0
        child_class = H5AC_BT2_LEAF;
1165
1166
        /* Lock left & right B-tree child nodes */
1167
        /* (Shadow the left node if doing SWMR writes) */
1168
0
        if (NULL == (left_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx], hdr->swmr_write,
1169
0
                                                    H5AC__NO_FLAGS_SET)))
1170
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
1171
0
        left_addr = internal->node_ptrs[idx].addr;
1172
0
        if (NULL == (right_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx + 1], false,
1173
0
                                                     H5AC__NO_FLAGS_SET)))
1174
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
1175
0
        right_addr = internal->node_ptrs[idx + 1].addr;
1176
1177
        /* More setup for accessing child node information */
1178
0
        left_child   = left_leaf;
1179
0
        right_child  = right_leaf;
1180
0
        left_nrec    = &(left_leaf->nrec);
1181
0
        right_nrec   = &(right_leaf->nrec);
1182
0
        left_native  = left_leaf->leaf_native;
1183
0
        right_native = right_leaf->leaf_native;
1184
0
    } /* end else */
1185
1186
    /* Redistribute records into left node */
1187
0
    {
1188
        /* Copy record from parent node to proper location */
1189
0
        H5MM_memcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec), H5B2_INT_NREC(internal, hdr, idx),
1190
0
                    hdr->cls->nrec_size);
1191
1192
        /* Copy records from right node to left node */
1193
0
        H5MM_memcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec + 1), H5B2_NAT_NREC(right_native, hdr, 0),
1194
0
                    hdr->cls->nrec_size * (*right_nrec));
1195
1196
        /* Copy node pointers from right node into left node */
1197
0
        if (depth > 1)
1198
0
            H5MM_memcpy(&(left_node_ptrs[*left_nrec + 1]), &(right_node_ptrs[0]),
1199
0
                        sizeof(H5B2_node_ptr_t) * (size_t)(*right_nrec + 1));
1200
1201
        /* Update flush dependencies for grandchildren, if using SWMR */
1202
0
        if (hdr->swmr_write && depth > 1)
1203
0
            if (H5B2__update_child_flush_depends(hdr, depth, left_node_ptrs, (unsigned)(*left_nrec + 1),
1204
0
                                                 (unsigned)(*left_nrec + *right_nrec + 2), right_child,
1205
0
                                                 left_child) < 0)
1206
0
                HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent");
1207
1208
        /* Update # of records in left node */
1209
0
        *left_nrec = (uint16_t)(*left_nrec + *right_nrec + 1);
1210
1211
        /* Mark nodes as dirty */
1212
0
        left_child_flags |= H5AC__DIRTIED_FLAG;
1213
0
        right_child_flags |= H5AC__DELETED_FLAG;
1214
0
        if (!(hdr->swmr_write))
1215
0
            right_child_flags |= H5AC__DIRTIED_FLAG | H5AC__FREE_FILE_SPACE_FLAG;
1216
0
    } /* end block */
1217
1218
    /* Update # of records in child nodes */
1219
0
    internal->node_ptrs[idx].node_nrec = *left_nrec;
1220
1221
    /* Update total # of records in child B-trees */
1222
0
    internal->node_ptrs[idx].all_nrec += internal->node_ptrs[idx + 1].all_nrec + 1;
1223
1224
    /* Slide records in parent node down, to eliminate demoted record */
1225
0
    if ((idx + 1) < internal->nrec) {
1226
0
        memmove(H5B2_INT_NREC(internal, hdr, idx), H5B2_INT_NREC(internal, hdr, idx + 1),
1227
0
                hdr->cls->nrec_size * (internal->nrec - (idx + 1)));
1228
0
        memmove(&(internal->node_ptrs[idx + 1]), &(internal->node_ptrs[idx + 2]),
1229
0
                sizeof(H5B2_node_ptr_t) * (internal->nrec - (idx + 1)));
1230
0
    } /* end if */
1231
1232
    /* Update # of records in parent node */
1233
0
    internal->nrec--;
1234
1235
    /* Mark parent as dirty */
1236
0
    *internal_flags_ptr |= H5AC__DIRTIED_FLAG;
1237
1238
    /* Update grandparent info */
1239
0
    curr_node_ptr->node_nrec--;
1240
1241
    /* Mark grandparent as dirty, if given */
1242
0
    if (parent_cache_info_flags_ptr)
1243
0
        *parent_cache_info_flags_ptr |= H5AC__DIRTIED_FLAG;
1244
1245
#ifdef H5B2_DEBUG
1246
    H5B2__assert_internal((hsize_t)0, hdr, internal);
1247
    if (depth > 1)
1248
        H5B2__assert_internal(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)left_child);
1249
    else
1250
        H5B2__assert_leaf(hdr, (H5B2_leaf_t *)left_child);
1251
#endif /* H5B2_DEBUG */
1252
1253
0
done:
1254
    /* Unlock left node (marked as dirty) */
1255
0
    if (left_child && H5AC_unprotect(hdr->f, child_class, left_addr, left_child, left_child_flags) < 0)
1256
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node");
1257
1258
    /* Delete right node & remove from cache (marked as dirty) */
1259
0
    if (right_child && H5AC_unprotect(hdr->f, child_class, right_addr, right_child, right_child_flags) < 0)
1260
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node");
1261
1262
0
    FUNC_LEAVE_NOAPI(ret_value)
1263
0
} /* end H5B2__merge2() */
1264
1265
/*-------------------------------------------------------------------------
1266
 * Function:  H5B2__merge3
1267
 *
1268
 * Purpose: Perform a 3->2 node merge
1269
 *
1270
 * Return:  Success:  Non-negative
1271
 *
1272
 *    Failure:  Negative
1273
 *
1274
 *-------------------------------------------------------------------------
1275
 */
1276
herr_t
1277
H5B2__merge3(H5B2_hdr_t *hdr, uint16_t depth, H5B2_node_ptr_t *curr_node_ptr,
1278
             unsigned *parent_cache_info_flags_ptr, H5B2_internal_t *internal, unsigned *internal_flags_ptr,
1279
             unsigned idx)
1280
0
{
1281
0
    const H5AC_class_t *child_class;                             /* Pointer to child node's class info */
1282
0
    haddr_t   left_addr = HADDR_UNDEF, right_addr = HADDR_UNDEF; /* Addresses of left & right child nodes */
1283
0
    haddr_t   middle_addr = HADDR_UNDEF;                         /* Address of middle child node */
1284
0
    void     *left_child = NULL, *right_child = NULL;            /* Pointers to left & right child nodes */
1285
0
    void     *middle_child = NULL;                               /* Pointer to middle child node */
1286
0
    uint16_t *left_nrec, *right_nrec;     /* Pointers to left & right child # of records */
1287
0
    uint16_t *middle_nrec;                /* Pointer to middle child # of records */
1288
0
    uint8_t  *left_native, *right_native; /* Pointers to left & right children's native records */
1289
0
    uint8_t  *middle_native;              /* Pointer to middle child's native records */
1290
0
    H5B2_node_ptr_t *left_node_ptrs   = NULL,
1291
0
                    *right_node_ptrs  = NULL; /* Pointers to childs' node pointer info */
1292
0
    H5B2_node_ptr_t *middle_node_ptrs = NULL; /* Pointer to child's node pointer info */
1293
0
    hsize_t          middle_moved_nrec;       /* Number of records moved, for internal split */
1294
0
    unsigned         left_child_flags = H5AC__NO_FLAGS_SET,
1295
0
             right_child_flags        = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
1296
0
    unsigned middle_child_flags       = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
1297
0
    herr_t   ret_value                = SUCCEED;            /* Return value */
1298
1299
0
    FUNC_ENTER_PACKAGE
1300
1301
    /* Check arguments. */
1302
0
    assert(hdr);
1303
0
    assert(curr_node_ptr);
1304
0
    assert(internal);
1305
0
    assert(internal_flags_ptr);
1306
1307
    /* Check for the kind of B-tree node to split */
1308
0
    if (depth > 1) {
1309
0
        H5B2_internal_t *left_internal;   /* Pointer to left internal node */
1310
0
        H5B2_internal_t *middle_internal; /* Pointer to middle internal node */
1311
0
        H5B2_internal_t *right_internal;  /* Pointer to right internal node */
1312
1313
        /* Setup information for unlocking child nodes */
1314
0
        child_class = H5AC_BT2_INT;
1315
1316
        /* Lock B-tree child nodes */
1317
        /* (Shadow left and middle nodes if doing SWMR writes) */
1318
0
        if (NULL == (left_internal =
1319
0
                         H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx - 1],
1320
0
                                                (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
1321
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
1322
0
        left_addr = internal->node_ptrs[idx - 1].addr;
1323
0
        if (NULL == (middle_internal =
1324
0
                         H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx],
1325
0
                                                (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
1326
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
1327
0
        middle_addr = internal->node_ptrs[idx].addr;
1328
0
        if (NULL ==
1329
0
            (right_internal = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx + 1],
1330
0
                                                     (uint16_t)(depth - 1), false, H5AC__NO_FLAGS_SET)))
1331
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
1332
0
        right_addr = internal->node_ptrs[idx + 1].addr;
1333
1334
        /* More setup for accessing child node information */
1335
0
        left_child       = left_internal;
1336
0
        middle_child     = middle_internal;
1337
0
        right_child      = right_internal;
1338
0
        left_nrec        = &(left_internal->nrec);
1339
0
        middle_nrec      = &(middle_internal->nrec);
1340
0
        right_nrec       = &(right_internal->nrec);
1341
0
        left_native      = left_internal->int_native;
1342
0
        middle_native    = middle_internal->int_native;
1343
0
        right_native     = right_internal->int_native;
1344
0
        left_node_ptrs   = left_internal->node_ptrs;
1345
0
        middle_node_ptrs = middle_internal->node_ptrs;
1346
0
        right_node_ptrs  = right_internal->node_ptrs;
1347
0
    } /* end if */
1348
0
    else {
1349
0
        H5B2_leaf_t *left_leaf;   /* Pointer to left leaf node */
1350
0
        H5B2_leaf_t *middle_leaf; /* Pointer to middle leaf node */
1351
0
        H5B2_leaf_t *right_leaf;  /* Pointer to right leaf node */
1352
1353
        /* Setup information for unlocking child nodes */
1354
0
        child_class = H5AC_BT2_LEAF;
1355
1356
        /* Lock B-tree child nodes */
1357
        /* (Shadow left and middle nodes if doing SWMR writes) */
1358
0
        if (NULL == (left_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx - 1],
1359
0
                                                    hdr->swmr_write, H5AC__NO_FLAGS_SET)))
1360
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
1361
0
        left_addr = internal->node_ptrs[idx - 1].addr;
1362
0
        if (NULL == (middle_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx],
1363
0
                                                      hdr->swmr_write, H5AC__NO_FLAGS_SET)))
1364
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
1365
0
        middle_addr = internal->node_ptrs[idx].addr;
1366
0
        if (NULL == (right_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx + 1], false,
1367
0
                                                     H5AC__NO_FLAGS_SET)))
1368
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
1369
0
        right_addr = internal->node_ptrs[idx + 1].addr;
1370
1371
        /* More setup for accessing child node information */
1372
0
        left_child    = left_leaf;
1373
0
        middle_child  = middle_leaf;
1374
0
        right_child   = right_leaf;
1375
0
        left_nrec     = &(left_leaf->nrec);
1376
0
        middle_nrec   = &(middle_leaf->nrec);
1377
0
        right_nrec    = &(right_leaf->nrec);
1378
0
        left_native   = left_leaf->leaf_native;
1379
0
        middle_native = middle_leaf->leaf_native;
1380
0
        right_native  = right_leaf->leaf_native;
1381
0
    } /* end else */
1382
1383
    /* Redistribute records into left node */
1384
0
    {
1385
0
        unsigned total_nrec       = (unsigned)(*left_nrec + *middle_nrec + *right_nrec + 2);
1386
0
        unsigned middle_nrec_move = ((total_nrec - 1) / 2) - *left_nrec;
1387
1388
        /* Set the base number of records moved from middle node */
1389
0
        middle_moved_nrec = middle_nrec_move;
1390
1391
        /* Copy record from parent node to proper location in left node */
1392
0
        H5MM_memcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec), H5B2_INT_NREC(internal, hdr, idx - 1),
1393
0
                    hdr->cls->nrec_size);
1394
1395
        /* Copy records from middle node to left node */
1396
0
        H5MM_memcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec + 1), H5B2_NAT_NREC(middle_native, hdr, 0),
1397
0
                    hdr->cls->nrec_size * (middle_nrec_move - 1));
1398
1399
        /* Copy record from middle node to proper location in parent node */
1400
0
        H5MM_memcpy(H5B2_INT_NREC(internal, hdr, idx - 1),
1401
0
                    H5B2_NAT_NREC(middle_native, hdr, (middle_nrec_move - 1)), hdr->cls->nrec_size);
1402
1403
        /* Slide records in middle node down */
1404
0
        memmove(H5B2_NAT_NREC(middle_native, hdr, 0), H5B2_NAT_NREC(middle_native, hdr, middle_nrec_move),
1405
0
                hdr->cls->nrec_size * (*middle_nrec - middle_nrec_move));
1406
1407
        /* Move node pointers also if this is an internal node */
1408
0
        if (depth > 1) {
1409
0
            unsigned u; /* Local index variable */
1410
1411
            /* Copy node pointers from middle node into left node */
1412
0
            H5MM_memcpy(&(left_node_ptrs[*left_nrec + 1]), &(middle_node_ptrs[0]),
1413
0
                        sizeof(H5B2_node_ptr_t) * middle_nrec_move);
1414
1415
            /* Count the number of records being moved into the left node */
1416
0
            for (u = 0; u < middle_nrec_move; u++)
1417
0
                middle_moved_nrec += middle_node_ptrs[u].all_nrec;
1418
1419
            /* Slide the node pointers in middle node down */
1420
0
            memmove(&(middle_node_ptrs[0]), &(middle_node_ptrs[middle_nrec_move]),
1421
0
                    sizeof(H5B2_node_ptr_t) * (size_t)((unsigned)(*middle_nrec + 1) - middle_nrec_move));
1422
0
        } /* end if */
1423
1424
        /* Update flush dependencies for grandchildren, if using SWMR */
1425
0
        if (hdr->swmr_write && depth > 1)
1426
0
            if (H5B2__update_child_flush_depends(hdr, depth, left_node_ptrs, (unsigned)(*left_nrec + 1),
1427
0
                                                 (unsigned)(*left_nrec + middle_nrec_move + 1), middle_child,
1428
0
                                                 left_child) < 0)
1429
0
                HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent");
1430
1431
        /* Update # of records in left & middle nodes */
1432
0
        *left_nrec   = (uint16_t)(*left_nrec + middle_nrec_move);
1433
0
        *middle_nrec = (uint16_t)(*middle_nrec - middle_nrec_move);
1434
1435
        /* Mark nodes as dirty */
1436
0
        left_child_flags |= H5AC__DIRTIED_FLAG;
1437
0
        middle_child_flags |= H5AC__DIRTIED_FLAG;
1438
0
    } /* end block */
1439
1440
    /* Redistribute records into middle node */
1441
0
    {
1442
        /* Copy record from parent node to proper location in middle node */
1443
0
        H5MM_memcpy(H5B2_NAT_NREC(middle_native, hdr, *middle_nrec), H5B2_INT_NREC(internal, hdr, idx),
1444
0
                    hdr->cls->nrec_size);
1445
1446
        /* Copy records from right node to middle node */
1447
0
        H5MM_memcpy(H5B2_NAT_NREC(middle_native, hdr, *middle_nrec + 1), H5B2_NAT_NREC(right_native, hdr, 0),
1448
0
                    hdr->cls->nrec_size * (*right_nrec));
1449
1450
        /* Move node pointers also if this is an internal node */
1451
0
        if (depth > 1)
1452
            /* Copy node pointers from right node into middle node */
1453
0
            H5MM_memcpy(&(middle_node_ptrs[*middle_nrec + 1]), &(right_node_ptrs[0]),
1454
0
                        sizeof(H5B2_node_ptr_t) * (size_t)(*right_nrec + 1));
1455
1456
        /* Update flush dependencies for grandchildren, if using SWMR */
1457
0
        if (hdr->swmr_write && depth > 1)
1458
0
            if (H5B2__update_child_flush_depends(hdr, depth, middle_node_ptrs, (unsigned)(*middle_nrec + 1),
1459
0
                                                 (unsigned)(*middle_nrec + *right_nrec + 2), right_child,
1460
0
                                                 middle_child) < 0)
1461
0
                HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent");
1462
1463
        /* Update # of records in middle node */
1464
0
        *middle_nrec = (uint16_t)(*middle_nrec + (*right_nrec + 1));
1465
1466
        /* Mark nodes as dirty */
1467
0
        middle_child_flags |= H5AC__DIRTIED_FLAG;
1468
0
        right_child_flags |= H5AC__DELETED_FLAG;
1469
0
        if (!(hdr->swmr_write))
1470
0
            right_child_flags |= H5AC__DIRTIED_FLAG | H5AC__FREE_FILE_SPACE_FLAG;
1471
0
    } /* end block */
1472
1473
    /* Update # of records in child nodes */
1474
0
    internal->node_ptrs[idx - 1].node_nrec = *left_nrec;
1475
0
    internal->node_ptrs[idx].node_nrec     = *middle_nrec;
1476
1477
    /* Update total # of records in child B-trees */
1478
0
    internal->node_ptrs[idx - 1].all_nrec += middle_moved_nrec;
1479
0
    internal->node_ptrs[idx].all_nrec += (internal->node_ptrs[idx + 1].all_nrec + 1) - middle_moved_nrec;
1480
1481
    /* Slide records in parent node down, to eliminate demoted record */
1482
0
    if ((idx + 1) < internal->nrec) {
1483
0
        memmove(H5B2_INT_NREC(internal, hdr, idx), H5B2_INT_NREC(internal, hdr, idx + 1),
1484
0
                hdr->cls->nrec_size * (internal->nrec - (idx + 1)));
1485
0
        memmove(&(internal->node_ptrs[idx + 1]), &(internal->node_ptrs[idx + 2]),
1486
0
                sizeof(H5B2_node_ptr_t) * (internal->nrec - (idx + 1)));
1487
0
    } /* end if */
1488
1489
    /* Update # of records in parent node */
1490
0
    internal->nrec--;
1491
1492
    /* Mark parent as dirty */
1493
0
    *internal_flags_ptr |= H5AC__DIRTIED_FLAG;
1494
1495
    /* Update grandparent info */
1496
0
    curr_node_ptr->node_nrec--;
1497
1498
    /* Mark grandparent as dirty, if given */
1499
0
    if (parent_cache_info_flags_ptr)
1500
0
        *parent_cache_info_flags_ptr |= H5AC__DIRTIED_FLAG;
1501
1502
#ifdef H5B2_DEBUG
1503
    H5B2__assert_internal((hsize_t)0, hdr, internal);
1504
    if (depth > 1) {
1505
        H5B2__assert_internal2(internal->node_ptrs[idx - 1].all_nrec, hdr, (H5B2_internal_t *)left_child,
1506
                               (H5B2_internal_t *)middle_child);
1507
        H5B2__assert_internal(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)middle_child);
1508
    } /* end if */
1509
    else {
1510
        H5B2__assert_leaf2(hdr, (H5B2_leaf_t *)left_child, (H5B2_leaf_t *)middle_child);
1511
        H5B2__assert_leaf(hdr, (H5B2_leaf_t *)middle_child);
1512
    }  /* end else */
1513
#endif /* H5B2_DEBUG */
1514
1515
0
done:
1516
    /* Unlock left & middle nodes (marked as dirty) */
1517
0
    if (left_child && H5AC_unprotect(hdr->f, child_class, left_addr, left_child, left_child_flags) < 0)
1518
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node");
1519
0
    if (middle_child &&
1520
0
        H5AC_unprotect(hdr->f, child_class, middle_addr, middle_child, middle_child_flags) < 0)
1521
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node");
1522
1523
    /* Delete right node & remove from cache (marked as dirty) */
1524
0
    if (right_child && H5AC_unprotect(hdr->f, child_class, right_addr, right_child, right_child_flags) < 0)
1525
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node");
1526
1527
0
    FUNC_LEAVE_NOAPI(ret_value)
1528
0
} /* end H5B2__merge3() */
1529
1530
/*-------------------------------------------------------------------------
1531
 * Function:  H5B2__insert
1532
 *
1533
 * Purpose: Adds a new record to the B-tree.
1534
 *
1535
 * Return:  Non-negative on success/Negative on failure
1536
 *
1537
 *-------------------------------------------------------------------------
1538
 */
1539
herr_t
1540
H5B2__insert(H5B2_hdr_t *hdr, void *udata)
1541
0
{
1542
0
    herr_t ret_value = SUCCEED; /* Return value */
1543
1544
0
    FUNC_ENTER_PACKAGE
1545
1546
    /* Check arguments. */
1547
0
    assert(hdr);
1548
0
    assert(udata);
1549
1550
    /* Check if the root node is allocated yet */
1551
0
    if (!H5_addr_defined(hdr->root.addr)) {
1552
        /* Create root node as leaf node in B-tree */
1553
0
        if (H5B2__create_leaf(hdr, hdr, &(hdr->root)) < 0)
1554
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTINIT, FAIL, "unable to create root node");
1555
0
    } /* end if */
1556
    /* Check if we need to split the root node (equiv. to a 1->2 node split) */
1557
0
    else if (hdr->root.node_nrec == hdr->node_info[hdr->depth].split_nrec) {
1558
        /* Split root node */
1559
0
        if (H5B2__split_root(hdr) < 0)
1560
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTSPLIT, FAIL, "unable to split root node");
1561
0
    } /* end if */
1562
1563
    /* Attempt to insert record into B-tree */
1564
0
    if (hdr->depth > 0) {
1565
0
        if (H5B2__insert_internal(hdr, hdr->depth, NULL, &hdr->root, H5B2_POS_ROOT, hdr, udata) < 0)
1566
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTINSERT, FAIL, "unable to insert record into B-tree internal node");
1567
0
    } /* end if */
1568
0
    else {
1569
0
        if (H5B2__insert_leaf(hdr, &hdr->root, H5B2_POS_ROOT, hdr, udata) < 0)
1570
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTINSERT, FAIL, "unable to insert record into B-tree leaf node");
1571
0
    } /* end else */
1572
1573
    /* Mark B-tree header as dirty */
1574
0
    if (H5B2__hdr_dirty(hdr) < 0)
1575
0
        HGOTO_ERROR(H5E_BTREE, H5E_CANTMARKDIRTY, FAIL, "unable to mark B-tree header dirty");
1576
1577
0
done:
1578
0
    FUNC_LEAVE_NOAPI(ret_value)
1579
0
} /* H5B2__insert() */
1580
1581
/*-------------------------------------------------------------------------
1582
 * Function:  H5B2__iterate_node
1583
 *
1584
 * Purpose: Iterate over all the records from a B-tree node, in "in-order"
1585
 *    order, making a callback for each record.
1586
 *
1587
 *              If the callback returns non-zero, the iteration breaks out
1588
 *              without finishing all the records.
1589
 *
1590
 * Return:  Value from callback, non-negative on success, negative on error
1591
 *
1592
 *-------------------------------------------------------------------------
1593
 */
1594
herr_t
1595
H5B2__iterate_node(H5B2_hdr_t *hdr, uint16_t depth, H5B2_node_ptr_t *curr_node, void *parent,
1596
                   H5B2_operator_t op, void *op_data)
1597
0
{
1598
0
    const H5AC_class_t *curr_node_class = NULL;   /* Pointer to current node's class info */
1599
0
    void               *node            = NULL;   /* Pointers to current node */
1600
0
    uint8_t            *node_native;              /* Pointers to node's native records */
1601
0
    uint8_t            *native      = NULL;       /* Pointers to copy of node's native records */
1602
0
    H5B2_node_ptr_t    *node_ptrs   = NULL;       /* Pointers to node's node pointers */
1603
0
    bool                node_pinned = false;      /* Whether node is pinned */
1604
0
    unsigned            u;                        /* Local index */
1605
0
    herr_t              ret_value = H5_ITER_CONT; /* Iterator return value */
1606
1607
0
    FUNC_ENTER_PACKAGE
1608
1609
    /* Check arguments. */
1610
0
    assert(hdr);
1611
0
    assert(curr_node);
1612
0
    assert(op);
1613
1614
    /* Protect current node & set up variables */
1615
0
    if (depth > 0) {
1616
0
        H5B2_internal_t *internal; /* Pointer to internal node */
1617
1618
        /* Lock the current B-tree node */
1619
0
        if (NULL ==
1620
0
            (internal = H5B2__protect_internal(hdr, parent, curr_node, depth, false, H5AC__READ_ONLY_FLAG)))
1621
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
1622
1623
        /* Set up information about current node */
1624
0
        curr_node_class = H5AC_BT2_INT;
1625
0
        node            = internal;
1626
0
        node_native     = internal->int_native;
1627
1628
        /* Allocate space for the node pointers in memory */
1629
0
        if (NULL == (node_ptrs = (H5B2_node_ptr_t *)H5FL_FAC_MALLOC(hdr->node_info[depth].node_ptr_fac)))
1630
0
            HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL,
1631
0
                        "memory allocation failed for B-tree internal node pointers");
1632
1633
        /* Copy the node pointers */
1634
0
        H5MM_memcpy(node_ptrs, internal->node_ptrs,
1635
0
                    (sizeof(H5B2_node_ptr_t) * (size_t)(curr_node->node_nrec + 1)));
1636
0
    } /* end if */
1637
0
    else {
1638
0
        H5B2_leaf_t *leaf; /* Pointer to leaf node */
1639
1640
        /* Lock the current B-tree node */
1641
0
        if (NULL == (leaf = H5B2__protect_leaf(hdr, parent, (H5B2_node_ptr_t *)curr_node, false,
1642
0
                                               H5AC__READ_ONLY_FLAG)))
1643
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
1644
1645
        /* Set up information about current node */
1646
0
        curr_node_class = H5AC_BT2_LEAF;
1647
0
        node            = leaf;
1648
0
        node_native     = leaf->leaf_native;
1649
0
    } /* end else */
1650
1651
    /* Allocate space for the native keys in memory */
1652
0
    if (NULL == (native = (uint8_t *)H5FL_FAC_MALLOC(hdr->node_info[depth].nat_rec_fac)))
1653
0
        HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL,
1654
0
                    "memory allocation failed for B-tree internal native keys");
1655
1656
    /* Copy the native keys */
1657
0
    H5MM_memcpy(native, node_native, (hdr->cls->nrec_size * curr_node->node_nrec));
1658
1659
    /* Unlock the node */
1660
0
    if (H5AC_unprotect(hdr->f, curr_node_class, curr_node->addr, node,
1661
0
                       (unsigned)(hdr->swmr_write ? H5AC__PIN_ENTRY_FLAG : H5AC__NO_FLAGS_SET)) < 0)
1662
0
        HGOTO_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree node");
1663
0
    if (hdr->swmr_write)
1664
0
        node_pinned = true;
1665
0
    else
1666
0
        node = NULL;
1667
1668
    /* Iterate through records, in order */
1669
0
    for (u = 0; u < curr_node->node_nrec && !ret_value; u++) {
1670
        /* Descend into child node, if current node is an internal node */
1671
0
        if (depth > 0)
1672
0
            if ((ret_value =
1673
0
                     H5B2__iterate_node(hdr, (uint16_t)(depth - 1), &(node_ptrs[u]), node, op, op_data)) < 0)
1674
0
                HERROR(H5E_BTREE, H5E_CANTLIST, "node iteration failed");
1675
1676
        /* Make callback for current record */
1677
0
        if (!ret_value)
1678
0
            if ((ret_value = (op)(H5B2_NAT_NREC(native, hdr, u), op_data)) < 0)
1679
0
                HERROR(H5E_BTREE, H5E_CANTLIST, "iterator function failed");
1680
0
    } /* end for */
1681
1682
    /* Descend into last child node, if current node is an internal node */
1683
0
    if (!ret_value && depth > 0)
1684
0
        if ((ret_value = H5B2__iterate_node(hdr, (uint16_t)(depth - 1), &(node_ptrs[u]), node, op, op_data)) <
1685
0
            0)
1686
0
            HERROR(H5E_BTREE, H5E_CANTLIST, "node iteration failed");
1687
1688
0
done:
1689
    /* Unpin the node if it was pinned */
1690
0
    if (node_pinned && H5AC_unpin_entry(node) < 0)
1691
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPIN, FAIL, "can't unpin node");
1692
1693
    /* Release the node pointers & native records, if they were copied */
1694
0
    if (node_ptrs)
1695
0
        node_ptrs = (H5B2_node_ptr_t *)H5FL_FAC_FREE(hdr->node_info[depth].node_ptr_fac, node_ptrs);
1696
0
    if (native)
1697
0
        native = (uint8_t *)H5FL_FAC_FREE(hdr->node_info[depth].nat_rec_fac, native);
1698
1699
0
    FUNC_LEAVE_NOAPI(ret_value)
1700
0
} /* H5B2__iterate_node() */
1701
1702
/*-------------------------------------------------------------------------
1703
 * Function:  H5B2__delete_node
1704
 *
1705
 * Purpose: Iterate over all the nodes in a B-tree node deleting them
1706
 *    after they no longer have any children
1707
 *
1708
 * Return:  Value from callback, non-negative on success, negative on error
1709
 *
1710
 *-------------------------------------------------------------------------
1711
 */
1712
herr_t
1713
H5B2__delete_node(H5B2_hdr_t *hdr, uint16_t depth, H5B2_node_ptr_t *curr_node, void *parent, H5B2_remove_t op,
1714
                  void *op_data)
1715
0
{
1716
0
    const H5AC_class_t *curr_node_class = NULL; /* Pointer to current node's class info */
1717
0
    void               *node            = NULL; /* Pointers to current node */
1718
0
    uint8_t            *native;                 /* Pointers to node's native records */
1719
0
    herr_t              ret_value = SUCCEED;    /* Return value */
1720
1721
0
    FUNC_ENTER_PACKAGE
1722
1723
    /* Check arguments. */
1724
0
    assert(hdr);
1725
0
    assert(curr_node);
1726
1727
0
    if (depth > 0) {
1728
0
        H5B2_internal_t *internal; /* Pointer to internal node */
1729
0
        unsigned         u;        /* Local index */
1730
1731
        /* Lock the current B-tree node */
1732
0
        if (NULL ==
1733
0
            (internal = H5B2__protect_internal(hdr, parent, curr_node, depth, false, H5AC__NO_FLAGS_SET)))
1734
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
1735
1736
        /* Set up information about current node */
1737
0
        curr_node_class = H5AC_BT2_INT;
1738
0
        node            = internal;
1739
0
        native          = internal->int_native;
1740
1741
        /* Descend into children */
1742
0
        for (u = 0; u < internal->nrec + (unsigned)1; u++)
1743
0
            if (H5B2__delete_node(hdr, (uint16_t)(depth - 1), &(internal->node_ptrs[u]), internal, op,
1744
0
                                  op_data) < 0)
1745
0
                HGOTO_ERROR(H5E_BTREE, H5E_CANTLIST, FAIL, "node descent failed");
1746
0
    } /* end if */
1747
0
    else {
1748
0
        H5B2_leaf_t *leaf; /* Pointer to leaf node */
1749
1750
        /* Lock the current B-tree node */
1751
0
        if (NULL ==
1752
0
            (leaf = H5B2__protect_leaf(hdr, parent, (H5B2_node_ptr_t *)curr_node, false, H5AC__NO_FLAGS_SET)))
1753
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
1754
1755
        /* Set up information about current node */
1756
0
        curr_node_class = H5AC_BT2_LEAF;
1757
0
        node            = leaf;
1758
0
        native          = leaf->leaf_native;
1759
0
    } /* end else */
1760
1761
    /* If there's a callback defined, iterate over the records in this node */
1762
0
    if (op) {
1763
0
        unsigned u; /* Local index */
1764
1765
        /* Iterate through records in this node */
1766
0
        for (u = 0; u < curr_node->node_nrec; u++) {
1767
            /* Make callback for each record */
1768
0
            if ((op)(H5B2_NAT_NREC(native, hdr, u), op_data) < 0)
1769
0
                HGOTO_ERROR(H5E_BTREE, H5E_CANTLIST, FAIL, "iterator function failed");
1770
0
        } /* end for */
1771
0
    }     /* end if */
1772
1773
0
done:
1774
    /* Unlock & delete current node */
1775
0
    if (node && H5AC_unprotect(
1776
0
                    hdr->f, curr_node_class, curr_node->addr, node,
1777
0
                    (unsigned)(H5AC__DELETED_FLAG | (hdr->swmr_write ? 0 : H5AC__FREE_FILE_SPACE_FLAG))) < 0)
1778
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree node");
1779
1780
0
    FUNC_LEAVE_NOAPI(ret_value)
1781
0
} /* H5B2__delete_node() */
1782
1783
/*-------------------------------------------------------------------------
1784
 * Function:    H5B2__node_size
1785
 *
1786
 * Purpose:     Iterate over all the records from a B-tree node, collecting
1787
 *    btree storage info.
1788
 *
1789
 * Return:      non-negative on success, negative on error
1790
 *
1791
 *-------------------------------------------------------------------------
1792
 */
1793
herr_t
1794
H5B2__node_size(H5B2_hdr_t *hdr, uint16_t depth, H5B2_node_ptr_t *curr_node, void *parent,
1795
                hsize_t *btree_size)
1796
0
{
1797
0
    H5B2_internal_t *internal  = NULL;    /* Pointer to internal node */
1798
0
    herr_t           ret_value = SUCCEED; /* Iterator return value */
1799
1800
0
    FUNC_ENTER_PACKAGE
1801
1802
    /* Check arguments. */
1803
0
    assert(hdr);
1804
0
    assert(curr_node);
1805
0
    assert(btree_size);
1806
0
    assert(depth > 0);
1807
1808
    /* Lock the current B-tree node */
1809
0
    if (NULL ==
1810
0
        (internal = H5B2__protect_internal(hdr, parent, curr_node, depth, false, H5AC__READ_ONLY_FLAG)))
1811
0
        HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
1812
1813
    /* Recursively descend into child nodes, if we are above the "twig" level in the B-tree */
1814
0
    if (depth > 1) {
1815
0
        unsigned u; /* Local index */
1816
1817
        /* Descend into children */
1818
0
        for (u = 0; u < internal->nrec + (unsigned)1; u++)
1819
0
            if (H5B2__node_size(hdr, (uint16_t)(depth - 1), &(internal->node_ptrs[u]), internal, btree_size) <
1820
0
                0)
1821
0
                HGOTO_ERROR(H5E_BTREE, H5E_CANTLIST, FAIL, "node iteration failed");
1822
0
    }    /* end if */
1823
0
    else /* depth is 1: count all the leaf nodes from this node */
1824
0
        *btree_size += (hsize_t)(internal->nrec + 1) * hdr->node_size;
1825
1826
    /* Count this node */
1827
0
    *btree_size += hdr->node_size;
1828
1829
0
done:
1830
0
    if (internal && H5AC_unprotect(hdr->f, H5AC_BT2_INT, curr_node->addr, internal, H5AC__NO_FLAGS_SET) < 0)
1831
0
        HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree node");
1832
1833
0
    FUNC_LEAVE_NOAPI(ret_value)
1834
0
} /* H5B2__node_size() */
1835
1836
/*-------------------------------------------------------------------------
1837
 * Function:    H5B2__create_flush_depend
1838
 *
1839
 * Purpose:     Create a flush dependency between two data structure components
1840
 *
1841
 * Return:      SUCCEED/FAIL
1842
 *
1843
 *-------------------------------------------------------------------------
1844
 */
1845
herr_t
1846
H5B2__create_flush_depend(H5AC_info_t *parent_entry, H5AC_info_t *child_entry)
1847
0
{
1848
0
    herr_t ret_value = SUCCEED; /* Return value */
1849
1850
0
    FUNC_ENTER_NOAPI_NOINIT
1851
1852
    /* Sanity check */
1853
0
    assert(parent_entry);
1854
0
    assert(child_entry);
1855
1856
    /* Create a flush dependency between parent and child entry */
1857
0
    if (H5AC_create_flush_dependency(parent_entry, child_entry) < 0)
1858
0
        HGOTO_ERROR(H5E_BTREE, H5E_CANTDEPEND, FAIL, "unable to create flush dependency");
1859
1860
0
done:
1861
0
    FUNC_LEAVE_NOAPI(ret_value)
1862
0
} /* end H5B2__create_flush_depend() */
1863
1864
/*-------------------------------------------------------------------------
1865
 * Function:    H5B2__update_flush_depend
1866
 *
1867
 * Purpose:     Update flush dependencies for children of a node
1868
 *
1869
 * Return:      SUCCEED/FAIL
1870
 *
1871
 *-------------------------------------------------------------------------
1872
 */
1873
herr_t
1874
H5B2__update_flush_depend(H5B2_hdr_t *hdr, unsigned depth, H5B2_node_ptr_t *node_ptr, void *old_parent,
1875
                          void *new_parent)
1876
0
{
1877
0
    const H5AC_class_t *child_class;           /* Pointer to child node's class info */
1878
0
    void               *child       = NULL;    /* Pointer to child node */
1879
0
    unsigned            node_status = 0;       /* Node's status in the metadata cache */
1880
0
    herr_t              ret_value   = SUCCEED; /* Return value */
1881
1882
0
    FUNC_ENTER_PACKAGE
1883
1884
    /* Sanity checks */
1885
0
    assert(hdr);
1886
0
    assert(depth > 0);
1887
0
    assert(node_ptr);
1888
0
    assert(old_parent);
1889
0
    assert(new_parent);
1890
1891
    /* Check the node's entry status in the metadata cache */
1892
0
    if (H5AC_get_entry_status(hdr->f, node_ptr->addr, &node_status) < 0)
1893
0
        HGOTO_ERROR(H5E_BTREE, H5E_CANTGET, FAIL, "unable to check status of B-tree node");
1894
1895
    /* If the node is in the cache, check for retargeting its parent */
1896
0
    if (node_status & H5AC_ES__IN_CACHE) {
1897
0
        void **parent_ptr  = NULL;  /* Pointer to child node's parent */
1898
0
        bool   update_deps = false; /* Whether to update flush dependencies */
1899
1900
        /* Get child node pointer */
1901
0
        if (depth > 1) {
1902
0
            H5B2_internal_t *child_int;
1903
1904
            /* Protect child */
1905
0
            if (NULL == (child_int = H5B2__protect_internal(hdr, new_parent, node_ptr, (uint16_t)(depth - 1),
1906
0
                                                            false, H5AC__NO_FLAGS_SET)))
1907
0
                HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node");
1908
0
            child_class = H5AC_BT2_INT;
1909
0
            child       = child_int;
1910
1911
0
            if (child_int->parent == old_parent) {
1912
0
                parent_ptr  = &child_int->parent;
1913
0
                update_deps = true;
1914
0
            } /* end if */
1915
0
            else
1916
0
                assert(child_int->parent == new_parent);
1917
0
        } /* end if */
1918
0
        else {
1919
0
            H5B2_leaf_t *child_leaf;
1920
1921
            /* Protect child */
1922
0
            if (NULL == (child_leaf = H5B2__protect_leaf(hdr, new_parent, (H5B2_node_ptr_t *)node_ptr, false,
1923
0
                                                         H5AC__NO_FLAGS_SET)))
1924
0
                HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node");
1925
0
            child_class = H5AC_BT2_LEAF;
1926
0
            child       = child_leaf;
1927
1928
0
            if (child_leaf->parent == old_parent) {
1929
0
                parent_ptr  = &child_leaf->parent;
1930
0
                update_deps = true;
1931
0
            } /* end if */
1932
0
            else
1933
0
                assert(child_leaf->parent == new_parent);
1934
0
        } /* end else */
1935
1936
        /* Update flush dependencies if necessary */
1937
0
        if (update_deps) {
1938
            /* Sanity check */
1939
0
            assert(parent_ptr);
1940
1941
            /* Switch the flush dependency for the node */
1942
0
            if (H5B2__destroy_flush_depend((H5AC_info_t *)old_parent, (H5AC_info_t *)child) < 0)
1943
0
                HGOTO_ERROR(H5E_BTREE, H5E_CANTUNDEPEND, FAIL, "unable to destroy flush dependency");
1944
0
            *parent_ptr = new_parent;
1945
0
            if (H5B2__create_flush_depend((H5AC_info_t *)new_parent, (H5AC_info_t *)child) < 0)
1946
0
                HGOTO_ERROR(H5E_BTREE, H5E_CANTDEPEND, FAIL, "unable to create flush dependency");
1947
0
        } /* end if */
1948
0
    }     /* end if */
1949
1950
0
done:
1951
    /* Unprotect the child */
1952
0
    if (child)
1953
0
        if (H5AC_unprotect(hdr->f, child_class, node_ptr->addr, child, H5AC__NO_FLAGS_SET) < 0)
1954
0
            HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree node");
1955
1956
0
    FUNC_LEAVE_NOAPI(ret_value)
1957
0
} /* end H5B2__update_flush_depend() */
1958
1959
/*-------------------------------------------------------------------------
1960
 * Function:    H5B2__update_child_flush_depends
1961
 *
1962
 * Purpose:     Update flush dependencies for children of a node
1963
 *
1964
 * Return:      SUCCEED/FAIL
1965
 *
1966
 *-------------------------------------------------------------------------
1967
 */
1968
static herr_t
1969
H5B2__update_child_flush_depends(H5B2_hdr_t *hdr, unsigned depth, H5B2_node_ptr_t *node_ptrs,
1970
                                 unsigned start_idx, unsigned end_idx, void *old_parent, void *new_parent)
1971
0
{
1972
0
    unsigned u;                   /* Local index variable */
1973
0
    herr_t   ret_value = SUCCEED; /* Return value */
1974
1975
0
    FUNC_ENTER_PACKAGE
1976
1977
    /* Sanity checks */
1978
0
    assert(hdr);
1979
0
    assert(depth > 1);
1980
0
    assert(node_ptrs);
1981
0
    assert(start_idx <= end_idx);
1982
0
    assert(old_parent);
1983
0
    assert(new_parent);
1984
1985
    /* Loop over children */
1986
0
    for (u = start_idx; u < end_idx; u++)
1987
        /* Update parent for children */
1988
0
        if (H5B2__update_flush_depend(hdr, depth - 1, &node_ptrs[u], old_parent, new_parent) < 0)
1989
0
            HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child node to new parent");
1990
1991
0
done:
1992
0
    FUNC_LEAVE_NOAPI(ret_value)
1993
0
} /* end H5B2__update_child_flush_depends() */
1994
1995
/*-------------------------------------------------------------------------
1996
 * Function:    H5B2__destroy_flush_depend
1997
 *
1998
 * Purpose:     Destroy a flush dependency between two data structure components
1999
 *
2000
 * Return:      SUCCEED/FAIL
2001
 *
2002
 *-------------------------------------------------------------------------
2003
 */
2004
herr_t
2005
H5B2__destroy_flush_depend(H5AC_info_t *parent_entry, H5AC_info_t *child_entry)
2006
0
{
2007
0
    herr_t ret_value = SUCCEED; /* Return value */
2008
2009
0
    FUNC_ENTER_NOAPI_NOINIT
2010
2011
    /* Sanity check */
2012
0
    assert(parent_entry);
2013
0
    assert(child_entry);
2014
2015
    /* Destroy a flush dependency between parent and child entry */
2016
0
    if (H5AC_destroy_flush_dependency(parent_entry, child_entry) < 0)
2017
0
        HGOTO_ERROR(H5E_BTREE, H5E_CANTUNDEPEND, FAIL, "unable to destroy flush dependency");
2018
2019
0
done:
2020
0
    FUNC_LEAVE_NOAPI(ret_value)
2021
0
} /* end H5B2__destroy_flush_depend() */