Coverage Report

Created: 2023-06-07 07:09

/src/LPM/external.protobuf/include/absl/container/internal/btree.h
Line
Count
Source (jump to first uncovered line)
1
// Copyright 2018 The Abseil Authors.
2
//
3
// Licensed under the Apache License, Version 2.0 (the "License");
4
// you may not use this file except in compliance with the License.
5
// You may obtain a copy of the License at
6
//
7
//      https://www.apache.org/licenses/LICENSE-2.0
8
//
9
// Unless required by applicable law or agreed to in writing, software
10
// distributed under the License is distributed on an "AS IS" BASIS,
11
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
// See the License for the specific language governing permissions and
13
// limitations under the License.
14
15
// A btree implementation of the STL set and map interfaces. A btree is smaller
16
// and generally also faster than STL set/map (refer to the benchmarks below).
17
// The red-black tree implementation of STL set/map has an overhead of 3
18
// pointers (left, right and parent) plus the node color information for each
19
// stored value. So a set<int32_t> consumes 40 bytes for each value stored in
20
// 64-bit mode. This btree implementation stores multiple values on fixed
21
// size nodes (usually 256 bytes) and doesn't store child pointers for leaf
22
// nodes. The result is that a btree_set<int32_t> may use much less memory per
23
// stored value. For the random insertion benchmark in btree_bench.cc, a
24
// btree_set<int32_t> with node-size of 256 uses 5.1 bytes per stored value.
25
//
26
// The packing of multiple values on to each node of a btree has another effect
27
// besides better space utilization: better cache locality due to fewer cache
28
// lines being accessed. Better cache locality translates into faster
29
// operations.
30
//
31
// CAVEATS
32
//
33
// Insertions and deletions on a btree can cause splitting, merging or
34
// rebalancing of btree nodes. And even without these operations, insertions
35
// and deletions on a btree will move values around within a node. In both
36
// cases, the result is that insertions and deletions can invalidate iterators
37
// pointing to values other than the one being inserted/deleted. Therefore, this
38
// container does not provide pointer stability. This is notably different from
39
// STL set/map which takes care to not invalidate iterators on insert/erase
40
// except, of course, for iterators pointing to the value being erased.  A
41
// partial workaround when erasing is available: erase() returns an iterator
42
// pointing to the item just after the one that was erased (or end() if none
43
// exists).
44
45
#ifndef ABSL_CONTAINER_INTERNAL_BTREE_H_
46
#define ABSL_CONTAINER_INTERNAL_BTREE_H_
47
48
#include <algorithm>
49
#include <cassert>
50
#include <cstddef>
51
#include <cstdint>
52
#include <cstring>
53
#include <functional>
54
#include <iterator>
55
#include <limits>
56
#include <new>
57
#include <string>
58
#include <type_traits>
59
#include <utility>
60
61
#include "absl/base/internal/raw_logging.h"
62
#include "absl/base/macros.h"
63
#include "absl/container/internal/common.h"
64
#include "absl/container/internal/common_policy_traits.h"
65
#include "absl/container/internal/compressed_tuple.h"
66
#include "absl/container/internal/container_memory.h"
67
#include "absl/container/internal/layout.h"
68
#include "absl/memory/memory.h"
69
#include "absl/meta/type_traits.h"
70
#include "absl/strings/cord.h"
71
#include "absl/strings/string_view.h"
72
#include "absl/types/compare.h"
73
#include "absl/utility/utility.h"
74
75
namespace absl {
76
ABSL_NAMESPACE_BEGIN
77
namespace container_internal {
78
79
#ifdef ABSL_BTREE_ENABLE_GENERATIONS
80
#error ABSL_BTREE_ENABLE_GENERATIONS cannot be directly set
81
#elif defined(ABSL_HAVE_ADDRESS_SANITIZER) || \
82
    defined(ABSL_HAVE_MEMORY_SANITIZER)
83
// When compiled in sanitizer mode, we add generation integers to the nodes and
84
// iterators. When iterators are used, we validate that the container has not
85
// been mutated since the iterator was constructed.
86
#define ABSL_BTREE_ENABLE_GENERATIONS
87
#endif
88
89
template <typename Compare, typename T, typename U>
90
using compare_result_t = absl::result_of_t<const Compare(const T &, const U &)>;
91
92
// A helper class that indicates if the Compare parameter is a key-compare-to
93
// comparator.
94
template <typename Compare, typename T>
95
using btree_is_key_compare_to =
96
    std::is_convertible<compare_result_t<Compare, T, T>, absl::weak_ordering>;
97
98
struct StringBtreeDefaultLess {
99
  using is_transparent = void;
100
101
  StringBtreeDefaultLess() = default;
102
103
  // Compatibility constructor.
104
0
  StringBtreeDefaultLess(std::less<std::string>) {}        // NOLINT
105
0
  StringBtreeDefaultLess(std::less<absl::string_view>) {}  // NOLINT
106
107
  // Allow converting to std::less for use in key_comp()/value_comp().
108
0
  explicit operator std::less<std::string>() const { return {}; }
109
0
  explicit operator std::less<absl::string_view>() const { return {}; }
110
0
  explicit operator std::less<absl::Cord>() const { return {}; }
111
112
  absl::weak_ordering operator()(absl::string_view lhs,
113
0
                                 absl::string_view rhs) const {
114
0
    return compare_internal::compare_result_as_ordering(lhs.compare(rhs));
115
0
  }
116
0
  StringBtreeDefaultLess(std::less<absl::Cord>) {}  // NOLINT
117
  absl::weak_ordering operator()(const absl::Cord &lhs,
118
0
                                 const absl::Cord &rhs) const {
119
0
    return compare_internal::compare_result_as_ordering(lhs.Compare(rhs));
120
0
  }
121
  absl::weak_ordering operator()(const absl::Cord &lhs,
122
0
                                 absl::string_view rhs) const {
123
0
    return compare_internal::compare_result_as_ordering(lhs.Compare(rhs));
124
0
  }
125
  absl::weak_ordering operator()(absl::string_view lhs,
126
0
                                 const absl::Cord &rhs) const {
127
0
    return compare_internal::compare_result_as_ordering(-rhs.Compare(lhs));
128
0
  }
129
};
130
131
struct StringBtreeDefaultGreater {
132
  using is_transparent = void;
133
134
  StringBtreeDefaultGreater() = default;
135
136
0
  StringBtreeDefaultGreater(std::greater<std::string>) {}        // NOLINT
137
0
  StringBtreeDefaultGreater(std::greater<absl::string_view>) {}  // NOLINT
138
139
  // Allow converting to std::greater for use in key_comp()/value_comp().
140
0
  explicit operator std::greater<std::string>() const { return {}; }
141
0
  explicit operator std::greater<absl::string_view>() const { return {}; }
142
0
  explicit operator std::greater<absl::Cord>() const { return {}; }
143
144
  absl::weak_ordering operator()(absl::string_view lhs,
145
0
                                 absl::string_view rhs) const {
146
0
    return compare_internal::compare_result_as_ordering(rhs.compare(lhs));
147
0
  }
148
0
  StringBtreeDefaultGreater(std::greater<absl::Cord>) {}  // NOLINT
149
  absl::weak_ordering operator()(const absl::Cord &lhs,
150
0
                                 const absl::Cord &rhs) const {
151
0
    return compare_internal::compare_result_as_ordering(rhs.Compare(lhs));
152
0
  }
153
  absl::weak_ordering operator()(const absl::Cord &lhs,
154
0
                                 absl::string_view rhs) const {
155
0
    return compare_internal::compare_result_as_ordering(-lhs.Compare(rhs));
156
0
  }
157
  absl::weak_ordering operator()(absl::string_view lhs,
158
0
                                 const absl::Cord &rhs) const {
159
0
    return compare_internal::compare_result_as_ordering(rhs.Compare(lhs));
160
0
  }
161
};
162
163
// See below comments for checked_compare.
164
template <typename Compare, bool is_class = std::is_class<Compare>::value>
165
struct checked_compare_base : Compare {
166
  using Compare::Compare;
167
  explicit checked_compare_base(Compare c) : Compare(std::move(c)) {}
168
  const Compare &comp() const { return *this; }
169
};
170
template <typename Compare>
171
struct checked_compare_base<Compare, false> {
172
  explicit checked_compare_base(Compare c) : compare(std::move(c)) {}
173
  const Compare &comp() const { return compare; }
174
  Compare compare;
175
};
176
177
// A mechanism for opting out of checked_compare for use only in btree_test.cc.
178
struct BtreeTestOnlyCheckedCompareOptOutBase {};
179
180
// A helper class to adapt the specified comparator for two use cases:
181
// (1) When using common Abseil string types with common comparison functors,
182
// convert a boolean comparison into a three-way comparison that returns an
183
// `absl::weak_ordering`. This helper class is specialized for
184
// less<std::string>, greater<std::string>, less<string_view>,
185
// greater<string_view>, less<absl::Cord>, and greater<absl::Cord>.
186
// (2) Adapt the comparator to diagnose cases of non-strict-weak-ordering (see
187
// https://en.cppreference.com/w/cpp/named_req/Compare) in debug mode. Whenever
188
// a comparison is made, we will make assertions to verify that the comparator
189
// is valid.
190
template <typename Compare, typename Key>
191
struct key_compare_adapter {
192
  // Inherit from checked_compare_base to support function pointers and also
193
  // keep empty-base-optimization (EBO) support for classes.
194
  // Note: we can't use CompressedTuple here because that would interfere
195
  // with the EBO for `btree::rightmost_`. `btree::rightmost_` is itself a
196
  // CompressedTuple and nested `CompressedTuple`s don't support EBO.
197
  // TODO(b/214288561): use CompressedTuple instead once it supports EBO for
198
  // nested `CompressedTuple`s.
199
  struct checked_compare : checked_compare_base<Compare> {
200
   private:
201
    using Base = typename checked_compare::checked_compare_base;
202
    using Base::comp;
203
204
    // If possible, returns whether `t` is equivalent to itself. We can only do
205
    // this for `Key`s because we can't be sure that it's safe to call
206
    // `comp()(k, k)` otherwise. Even if SFINAE allows it, there could be a
207
    // compilation failure inside the implementation of the comparison operator.
208
    bool is_self_equivalent(const Key &k) const {
209
      // Note: this works for both boolean and three-way comparators.
210
      return comp()(k, k) == 0;
211
    }
212
    // If we can't compare `t` with itself, returns true unconditionally.
213
    template <typename T>
214
    bool is_self_equivalent(const T &) const {
215
      return true;
216
    }
217
218
   public:
219
    using Base::Base;
220
    checked_compare(Compare comp) : Base(std::move(comp)) {}  // NOLINT
221
222
    // Allow converting to Compare for use in key_comp()/value_comp().
223
    explicit operator Compare() const { return comp(); }
224
225
    template <typename T, typename U,
226
              absl::enable_if_t<
227
                  std::is_same<bool, compare_result_t<Compare, T, U>>::value,
228
                  int> = 0>
229
    bool operator()(const T &lhs, const U &rhs) const {
230
      // NOTE: if any of these assertions fail, then the comparator does not
231
      // establish a strict-weak-ordering (see
232
      // https://en.cppreference.com/w/cpp/named_req/Compare).
233
      assert(is_self_equivalent(lhs));
234
      assert(is_self_equivalent(rhs));
235
      const bool lhs_comp_rhs = comp()(lhs, rhs);
236
      assert(!lhs_comp_rhs || !comp()(rhs, lhs));
237
      return lhs_comp_rhs;
238
    }
239
240
    template <
241
        typename T, typename U,
242
        absl::enable_if_t<std::is_convertible<compare_result_t<Compare, T, U>,
243
                                              absl::weak_ordering>::value,
244
                          int> = 0>
245
    absl::weak_ordering operator()(const T &lhs, const U &rhs) const {
246
      // NOTE: if any of these assertions fail, then the comparator does not
247
      // establish a strict-weak-ordering (see
248
      // https://en.cppreference.com/w/cpp/named_req/Compare).
249
      assert(is_self_equivalent(lhs));
250
      assert(is_self_equivalent(rhs));
251
      const absl::weak_ordering lhs_comp_rhs = comp()(lhs, rhs);
252
#ifndef NDEBUG
253
      const absl::weak_ordering rhs_comp_lhs = comp()(rhs, lhs);
254
      if (lhs_comp_rhs > 0) {
255
        assert(rhs_comp_lhs < 0 && "lhs_comp_rhs > 0 -> rhs_comp_lhs < 0");
256
      } else if (lhs_comp_rhs == 0) {
257
        assert(rhs_comp_lhs == 0 && "lhs_comp_rhs == 0 -> rhs_comp_lhs == 0");
258
      } else {
259
        assert(rhs_comp_lhs > 0 && "lhs_comp_rhs < 0 -> rhs_comp_lhs > 0");
260
      }
261
#endif
262
      return lhs_comp_rhs;
263
    }
264
  };
265
  using type = absl::conditional_t<
266
      std::is_base_of<BtreeTestOnlyCheckedCompareOptOutBase, Compare>::value,
267
      Compare, checked_compare>;
268
};
269
270
template <>
271
struct key_compare_adapter<std::less<std::string>, std::string> {
272
  using type = StringBtreeDefaultLess;
273
};
274
275
template <>
276
struct key_compare_adapter<std::greater<std::string>, std::string> {
277
  using type = StringBtreeDefaultGreater;
278
};
279
280
template <>
281
struct key_compare_adapter<std::less<absl::string_view>, absl::string_view> {
282
  using type = StringBtreeDefaultLess;
283
};
284
285
template <>
286
struct key_compare_adapter<std::greater<absl::string_view>, absl::string_view> {
287
  using type = StringBtreeDefaultGreater;
288
};
289
290
template <>
291
struct key_compare_adapter<std::less<absl::Cord>, absl::Cord> {
292
  using type = StringBtreeDefaultLess;
293
};
294
295
template <>
296
struct key_compare_adapter<std::greater<absl::Cord>, absl::Cord> {
297
  using type = StringBtreeDefaultGreater;
298
};
299
300
// Detects an 'absl_btree_prefer_linear_node_search' member. This is
301
// a protocol used as an opt-in or opt-out of linear search.
302
//
303
//  For example, this would be useful for key types that wrap an integer
304
//  and define their own cheap operator<(). For example:
305
//
306
//   class K {
307
//    public:
308
//     using absl_btree_prefer_linear_node_search = std::true_type;
309
//     ...
310
//    private:
311
//     friend bool operator<(K a, K b) { return a.k_ < b.k_; }
312
//     int k_;
313
//   };
314
//
315
//   btree_map<K, V> m;  // Uses linear search
316
//
317
// If T has the preference tag, then it has a preference.
318
// Btree will use the tag's truth value.
319
template <typename T, typename = void>
320
struct has_linear_node_search_preference : std::false_type {};
321
template <typename T, typename = void>
322
struct prefers_linear_node_search : std::false_type {};
323
template <typename T>
324
struct has_linear_node_search_preference<
325
    T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
326
    : std::true_type {};
327
template <typename T>
328
struct prefers_linear_node_search<
329
    T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
330
    : T::absl_btree_prefer_linear_node_search {};
331
332
template <typename Compare, typename Key>
333
0
constexpr bool compare_has_valid_result_type() {
334
0
  using compare_result_type = compare_result_t<Compare, Key, Key>;
335
0
  return std::is_same<compare_result_type, bool>::value ||
336
0
         std::is_convertible<compare_result_type, absl::weak_ordering>::value;
337
0
}
338
339
template <typename original_key_compare, typename value_type>
340
class map_value_compare {
341
  template <typename Params>
342
  friend class btree;
343
344
  // Note: this `protected` is part of the API of std::map::value_compare. See
345
  // https://en.cppreference.com/w/cpp/container/map/value_compare.
346
 protected:
347
  explicit map_value_compare(original_key_compare c) : comp(std::move(c)) {}
348
349
  original_key_compare comp;  // NOLINT
350
351
 public:
352
  auto operator()(const value_type &lhs, const value_type &rhs) const
353
      -> decltype(comp(lhs.first, rhs.first)) {
354
    return comp(lhs.first, rhs.first);
355
  }
356
};
357
358
template <typename Key, typename Compare, typename Alloc, int TargetNodeSize,
359
          bool IsMulti, bool IsMap, typename SlotPolicy>
360
struct common_params : common_policy_traits<SlotPolicy> {
361
  using original_key_compare = Compare;
362
363
  // If Compare is a common comparator for a string-like type, then we adapt it
364
  // to use heterogeneous lookup and to be a key-compare-to comparator.
365
  // We also adapt the comparator to diagnose invalid comparators in debug mode.
366
  // We disable this when `Compare` is invalid in a way that will cause
367
  // adaptation to fail (having invalid return type) so that we can give a
368
  // better compilation failure in static_assert_validation. If we don't do
369
  // this, then there will be cascading compilation failures that are confusing
370
  // for users.
371
  using key_compare =
372
      absl::conditional_t<!compare_has_valid_result_type<Compare, Key>(),
373
                          Compare,
374
                          typename key_compare_adapter<Compare, Key>::type>;
375
376
  static constexpr bool kIsKeyCompareStringAdapted =
377
      std::is_same<key_compare, StringBtreeDefaultLess>::value ||
378
      std::is_same<key_compare, StringBtreeDefaultGreater>::value;
379
  static constexpr bool kIsKeyCompareTransparent =
380
      IsTransparent<original_key_compare>::value || kIsKeyCompareStringAdapted;
381
  static constexpr bool kEnableGenerations =
382
#ifdef ABSL_BTREE_ENABLE_GENERATIONS
383
      true;
384
#else
385
      false;
386
#endif
387
388
  // A type which indicates if we have a key-compare-to functor or a plain old
389
  // key-compare functor.
390
  using is_key_compare_to = btree_is_key_compare_to<key_compare, Key>;
391
392
  using allocator_type = Alloc;
393
  using key_type = Key;
394
  using size_type = size_t;
395
  using difference_type = ptrdiff_t;
396
397
  using slot_policy = SlotPolicy;
398
  using slot_type = typename slot_policy::slot_type;
399
  using value_type = typename slot_policy::value_type;
400
  using init_type = typename slot_policy::mutable_value_type;
401
  using pointer = value_type *;
402
  using const_pointer = const value_type *;
403
  using reference = value_type &;
404
  using const_reference = const value_type &;
405
406
  using value_compare =
407
      absl::conditional_t<IsMap,
408
                          map_value_compare<original_key_compare, value_type>,
409
                          original_key_compare>;
410
  using is_map_container = std::integral_constant<bool, IsMap>;
411
412
  // For the given lookup key type, returns whether we can have multiple
413
  // equivalent keys in the btree. If this is a multi-container, then we can.
414
  // Otherwise, we can have multiple equivalent keys only if all of the
415
  // following conditions are met:
416
  // - The comparator is transparent.
417
  // - The lookup key type is not the same as key_type.
418
  // - The comparator is not a StringBtreeDefault{Less,Greater} comparator
419
  //   that we know has the same equivalence classes for all lookup types.
420
  template <typename LookupKey>
421
  constexpr static bool can_have_multiple_equivalent_keys() {
422
    return IsMulti || (IsTransparent<key_compare>::value &&
423
                       !std::is_same<LookupKey, Key>::value &&
424
                       !kIsKeyCompareStringAdapted);
425
  }
426
427
  enum {
428
    kTargetNodeSize = TargetNodeSize,
429
430
    // Upper bound for the available space for slots. This is largest for leaf
431
    // nodes, which have overhead of at least a pointer + 4 bytes (for storing
432
    // 3 field_types and an enum).
433
    kNodeSlotSpace = TargetNodeSize - /*minimum overhead=*/(sizeof(void *) + 4),
434
  };
435
436
  // This is an integral type large enough to hold as many slots as will fit a
437
  // node of TargetNodeSize bytes.
438
  using node_count_type =
439
      absl::conditional_t<(kNodeSlotSpace / sizeof(slot_type) >
440
                           (std::numeric_limits<uint8_t>::max)()),
441
                          uint16_t, uint8_t>;  // NOLINT
442
};
443
444
// An adapter class that converts a lower-bound compare into an upper-bound
445
// compare. Note: there is no need to make a version of this adapter specialized
446
// for key-compare-to functors because the upper-bound (the first value greater
447
// than the input) is never an exact match.
448
template <typename Compare>
449
struct upper_bound_adapter {
450
  explicit upper_bound_adapter(const Compare &c) : comp(c) {}
451
  template <typename K1, typename K2>
452
  bool operator()(const K1 &a, const K2 &b) const {
453
    // Returns true when a is not greater than b.
454
    return !compare_internal::compare_result_as_less_than(comp(b, a));
455
  }
456
457
 private:
458
  Compare comp;
459
};
460
461
enum class MatchKind : uint8_t { kEq, kNe };
462
463
template <typename V, bool IsCompareTo>
464
struct SearchResult {
465
  V value;
466
  MatchKind match;
467
468
  static constexpr bool HasMatch() { return true; }
469
  bool IsEq() const { return match == MatchKind::kEq; }
470
};
471
472
// When we don't use CompareTo, `match` is not present.
473
// This ensures that callers can't use it accidentally when it provides no
474
// useful information.
475
template <typename V>
476
struct SearchResult<V, false> {
477
  SearchResult() {}
478
  explicit SearchResult(V v) : value(v) {}
479
  SearchResult(V v, MatchKind /*match*/) : value(v) {}
480
481
  V value;
482
483
  static constexpr bool HasMatch() { return false; }
484
  static constexpr bool IsEq() { return false; }
485
};
486
487
// A node in the btree holding. The same node type is used for both internal
488
// and leaf nodes in the btree, though the nodes are allocated in such a way
489
// that the children array is only valid in internal nodes.
490
template <typename Params>
491
class btree_node {
492
  using is_key_compare_to = typename Params::is_key_compare_to;
493
  using field_type = typename Params::node_count_type;
494
  using allocator_type = typename Params::allocator_type;
495
  using slot_type = typename Params::slot_type;
496
  using original_key_compare = typename Params::original_key_compare;
497
498
 public:
499
  using params_type = Params;
500
  using key_type = typename Params::key_type;
501
  using value_type = typename Params::value_type;
502
  using pointer = typename Params::pointer;
503
  using const_pointer = typename Params::const_pointer;
504
  using reference = typename Params::reference;
505
  using const_reference = typename Params::const_reference;
506
  using key_compare = typename Params::key_compare;
507
  using size_type = typename Params::size_type;
508
  using difference_type = typename Params::difference_type;
509
510
  // Btree decides whether to use linear node search as follows:
511
  //   - If the comparator expresses a preference, use that.
512
  //   - If the key expresses a preference, use that.
513
  //   - If the key is arithmetic and the comparator is std::less or
514
  //     std::greater, choose linear.
515
  //   - Otherwise, choose binary.
516
  // TODO(ezb): Might make sense to add condition(s) based on node-size.
517
  using use_linear_search = std::integral_constant<
518
      bool, has_linear_node_search_preference<original_key_compare>::value
519
                ? prefers_linear_node_search<original_key_compare>::value
520
            : has_linear_node_search_preference<key_type>::value
521
                ? prefers_linear_node_search<key_type>::value
522
                : std::is_arithmetic<key_type>::value &&
523
                      (std::is_same<std::less<key_type>,
524
                                    original_key_compare>::value ||
525
                       std::is_same<std::greater<key_type>,
526
                                    original_key_compare>::value)>;
527
528
  // This class is organized by absl::container_internal::Layout as if it had
529
  // the following structure:
530
  //   // A pointer to the node's parent.
531
  //   btree_node *parent;
532
  //
533
  //   // When ABSL_BTREE_ENABLE_GENERATIONS is defined, we also have a
534
  //   // generation integer in order to check that when iterators are
535
  //   // used, they haven't been invalidated already. Only the generation on
536
  //   // the root is used, but we have one on each node because whether a node
537
  //   // is root or not can change.
538
  //   uint32_t generation;
539
  //
540
  //   // The position of the node in the node's parent.
541
  //   field_type position;
542
  //   // The index of the first populated value in `values`.
543
  //   // TODO(ezb): right now, `start` is always 0. Update insertion/merge
544
  //   // logic to allow for floating storage within nodes.
545
  //   field_type start;
546
  //   // The index after the last populated value in `values`. Currently, this
547
  //   // is the same as the count of values.
548
  //   field_type finish;
549
  //   // The maximum number of values the node can hold. This is an integer in
550
  //   // [1, kNodeSlots] for root leaf nodes, kNodeSlots for non-root leaf
551
  //   // nodes, and kInternalNodeMaxCount (as a sentinel value) for internal
552
  //   // nodes (even though there are still kNodeSlots values in the node).
553
  //   // TODO(ezb): make max_count use only 4 bits and record log2(capacity)
554
  //   // to free extra bits for is_root, etc.
555
  //   field_type max_count;
556
  //
557
  //   // The array of values. The capacity is `max_count` for leaf nodes and
558
  //   // kNodeSlots for internal nodes. Only the values in
559
  //   // [start, finish) have been initialized and are valid.
560
  //   slot_type values[max_count];
561
  //
562
  //   // The array of child pointers. The keys in children[i] are all less
563
  //   // than key(i). The keys in children[i + 1] are all greater than key(i).
564
  //   // There are 0 children for leaf nodes and kNodeSlots + 1 children for
565
  //   // internal nodes.
566
  //   btree_node *children[kNodeSlots + 1];
567
  //
568
  // This class is only constructed by EmptyNodeType. Normally, pointers to the
569
  // layout above are allocated, cast to btree_node*, and de-allocated within
570
  // the btree implementation.
571
  ~btree_node() = default;
572
  btree_node(btree_node const &) = delete;
573
  btree_node &operator=(btree_node const &) = delete;
574
575
  // Public for EmptyNodeType.
576
  constexpr static size_type Alignment() {
577
    static_assert(LeafLayout(1).Alignment() == InternalLayout().Alignment(),
578
                  "Alignment of all nodes must be equal.");
579
    return InternalLayout().Alignment();
580
  }
581
582
 protected:
583
  btree_node() = default;
584
585
 private:
586
  using layout_type =
587
      absl::container_internal::Layout<btree_node *, uint32_t, field_type,
588
                                       slot_type, btree_node *>;
589
0
  constexpr static size_type SizeWithNSlots(size_type n) {
590
0
    return layout_type(
591
0
               /*parent*/ 1,
592
0
               /*generation*/ params_type::kEnableGenerations ? 1 : 0,
593
0
               /*position, start, finish, max_count*/ 4,
594
0
               /*slots*/ n,
595
0
               /*children*/ 0)
596
0
        .AllocSize();
597
0
  }
598
  // A lower bound for the overhead of fields other than slots in a leaf node.
599
  constexpr static size_type MinimumOverhead() {
600
    return SizeWithNSlots(1) - sizeof(slot_type);
601
  }
602
603
  // Compute how many values we can fit onto a leaf node taking into account
604
  // padding.
605
  constexpr static size_type NodeTargetSlots(const size_type begin,
606
0
                                             const size_type end) {
607
0
    return begin == end ? begin
608
0
           : SizeWithNSlots((begin + end) / 2 + 1) >
609
0
                   params_type::kTargetNodeSize
610
0
               ? NodeTargetSlots(begin, (begin + end) / 2)
611
0
               : NodeTargetSlots((begin + end) / 2 + 1, end);
612
0
  }
613
614
  constexpr static size_type kTargetNodeSize = params_type::kTargetNodeSize;
615
  constexpr static size_type kNodeTargetSlots =
616
      NodeTargetSlots(0, kTargetNodeSize);
617
618
  // We need a minimum of 3 slots per internal node in order to perform
619
  // splitting (1 value for the two nodes involved in the split and 1 value
620
  // propagated to the parent as the delimiter for the split). For performance
621
  // reasons, we don't allow 3 slots-per-node due to bad worst case occupancy of
622
  // 1/3 (for a node, not a b-tree).
623
  constexpr static size_type kMinNodeSlots = 4;
624
625
  constexpr static size_type kNodeSlots =
626
      kNodeTargetSlots >= kMinNodeSlots ? kNodeTargetSlots : kMinNodeSlots;
627
628
  // The node is internal (i.e. is not a leaf node) if and only if `max_count`
629
  // has this value.
630
  constexpr static field_type kInternalNodeMaxCount = 0;
631
632
  // Leaves can have less than kNodeSlots values.
633
  constexpr static layout_type LeafLayout(
634
      const size_type slot_count = kNodeSlots) {
635
    return layout_type(
636
        /*parent*/ 1,
637
        /*generation*/ params_type::kEnableGenerations ? 1 : 0,
638
        /*position, start, finish, max_count*/ 4,
639
        /*slots*/ slot_count,
640
        /*children*/ 0);
641
  }
642
  constexpr static layout_type InternalLayout() {
643
    return layout_type(
644
        /*parent*/ 1,
645
        /*generation*/ params_type::kEnableGenerations ? 1 : 0,
646
        /*position, start, finish, max_count*/ 4,
647
        /*slots*/ kNodeSlots,
648
        /*children*/ kNodeSlots + 1);
649
  }
650
  constexpr static size_type LeafSize(const size_type slot_count = kNodeSlots) {
651
    return LeafLayout(slot_count).AllocSize();
652
  }
653
  constexpr static size_type InternalSize() {
654
    return InternalLayout().AllocSize();
655
  }
656
657
  // N is the index of the type in the Layout definition.
658
  // ElementType<N> is the Nth type in the Layout definition.
659
  template <size_type N>
660
  inline typename layout_type::template ElementType<N> *GetField() {
661
    // We assert that we don't read from values that aren't there.
662
    assert(N < 4 || is_internal());
663
    return InternalLayout().template Pointer<N>(reinterpret_cast<char *>(this));
664
  }
665
  template <size_type N>
666
  inline const typename layout_type::template ElementType<N> *GetField() const {
667
    assert(N < 4 || is_internal());
668
    return InternalLayout().template Pointer<N>(
669
        reinterpret_cast<const char *>(this));
670
  }
671
  void set_parent(btree_node *p) { *GetField<0>() = p; }
672
  field_type &mutable_finish() { return GetField<2>()[2]; }
673
  slot_type *slot(size_type i) { return &GetField<3>()[i]; }
674
  slot_type *start_slot() { return slot(start()); }
675
  slot_type *finish_slot() { return slot(finish()); }
676
  const slot_type *slot(size_type i) const { return &GetField<3>()[i]; }
677
  void set_position(field_type v) { GetField<2>()[0] = v; }
678
  void set_start(field_type v) { GetField<2>()[1] = v; }
679
  void set_finish(field_type v) { GetField<2>()[2] = v; }
680
  // This method is only called by the node init methods.
681
  void set_max_count(field_type v) { GetField<2>()[3] = v; }
682
683
 public:
684
  // Whether this is a leaf node or not. This value doesn't change after the
685
  // node is created.
686
  bool is_leaf() const { return GetField<2>()[3] != kInternalNodeMaxCount; }
687
  // Whether this is an internal node or not. This value doesn't change after
688
  // the node is created.
689
  bool is_internal() const { return !is_leaf(); }
690
691
  // Getter for the position of this node in its parent.
692
  field_type position() const { return GetField<2>()[0]; }
693
694
  // Getter for the offset of the first value in the `values` array.
695
  field_type start() const {
696
    // TODO(ezb): when floating storage is implemented, return GetField<2>()[1];
697
    assert(GetField<2>()[1] == 0);
698
    return 0;
699
  }
700
701
  // Getter for the offset after the last value in the `values` array.
702
  field_type finish() const { return GetField<2>()[2]; }
703
704
  // Getters for the number of values stored in this node.
705
  field_type count() const {
706
    assert(finish() >= start());
707
    return finish() - start();
708
  }
709
  field_type max_count() const {
710
    // Internal nodes have max_count==kInternalNodeMaxCount.
711
    // Leaf nodes have max_count in [1, kNodeSlots].
712
    const field_type max_count = GetField<2>()[3];
713
    return max_count == field_type{kInternalNodeMaxCount}
714
               ? field_type{kNodeSlots}
715
               : max_count;
716
  }
717
718
  // Getter for the parent of this node.
719
  btree_node *parent() const { return *GetField<0>(); }
720
  // Getter for whether the node is the root of the tree. The parent of the
721
  // root of the tree is the leftmost node in the tree which is guaranteed to
722
  // be a leaf.
723
  bool is_root() const { return parent()->is_leaf(); }
724
  void make_root() {
725
    assert(parent()->is_root());
726
    set_generation(parent()->generation());
727
    set_parent(parent()->parent());
728
  }
729
730
  // Gets the root node's generation integer, which is the one used by the tree.
731
  uint32_t *get_root_generation() const {
732
    assert(params_type::kEnableGenerations);
733
    const btree_node *curr = this;
734
    for (; !curr->is_root(); curr = curr->parent()) continue;
735
    return const_cast<uint32_t *>(&curr->GetField<1>()[0]);
736
  }
737
738
  // Returns the generation for iterator validation.
739
  uint32_t generation() const {
740
    return params_type::kEnableGenerations ? *get_root_generation() : 0;
741
  }
742
  // Updates generation. Should only be called on a root node or during node
743
  // initialization.
744
  void set_generation(uint32_t generation) {
745
    if (params_type::kEnableGenerations) GetField<1>()[0] = generation;
746
  }
747
  // Updates the generation. We do this whenever the node is mutated.
748
  void next_generation() {
749
    if (params_type::kEnableGenerations) ++*get_root_generation();
750
  }
751
752
  // Getters for the key/value at position i in the node.
753
  const key_type &key(size_type i) const { return params_type::key(slot(i)); }
754
  reference value(size_type i) { return params_type::element(slot(i)); }
755
  const_reference value(size_type i) const {
756
    return params_type::element(slot(i));
757
  }
758
759
  // Getters/setter for the child at position i in the node.
760
  btree_node *child(field_type i) const { return GetField<4>()[i]; }
761
  btree_node *start_child() const { return child(start()); }
762
  btree_node *&mutable_child(field_type i) { return GetField<4>()[i]; }
763
  void clear_child(field_type i) {
764
    absl::container_internal::SanitizerPoisonObject(&mutable_child(i));
765
  }
766
  void set_child(field_type i, btree_node *c) {
767
    absl::container_internal::SanitizerUnpoisonObject(&mutable_child(i));
768
    mutable_child(i) = c;
769
    c->set_position(i);
770
  }
771
  void init_child(field_type i, btree_node *c) {
772
    set_child(i, c);
773
    c->set_parent(this);
774
  }
775
776
  // Returns the position of the first value whose key is not less than k.
777
  template <typename K>
778
  SearchResult<size_type, is_key_compare_to::value> lower_bound(
779
      const K &k, const key_compare &comp) const {
780
    return use_linear_search::value ? linear_search(k, comp)
781
                                    : binary_search(k, comp);
782
  }
783
  // Returns the position of the first value whose key is greater than k.
784
  template <typename K>
785
  size_type upper_bound(const K &k, const key_compare &comp) const {
786
    auto upper_compare = upper_bound_adapter<key_compare>(comp);
787
    return use_linear_search::value ? linear_search(k, upper_compare).value
788
                                    : binary_search(k, upper_compare).value;
789
  }
790
791
  template <typename K, typename Compare>
792
  SearchResult<size_type, btree_is_key_compare_to<Compare, key_type>::value>
793
  linear_search(const K &k, const Compare &comp) const {
794
    return linear_search_impl(k, start(), finish(), comp,
795
                              btree_is_key_compare_to<Compare, key_type>());
796
  }
797
798
  template <typename K, typename Compare>
799
  SearchResult<size_type, btree_is_key_compare_to<Compare, key_type>::value>
800
  binary_search(const K &k, const Compare &comp) const {
801
    return binary_search_impl(k, start(), finish(), comp,
802
                              btree_is_key_compare_to<Compare, key_type>());
803
  }
804
805
  // Returns the position of the first value whose key is not less than k using
806
  // linear search performed using plain compare.
807
  template <typename K, typename Compare>
808
  SearchResult<size_type, false> linear_search_impl(
809
      const K &k, size_type s, const size_type e, const Compare &comp,
810
      std::false_type /* IsCompareTo */) const {
811
    while (s < e) {
812
      if (!comp(key(s), k)) {
813
        break;
814
      }
815
      ++s;
816
    }
817
    return SearchResult<size_type, false>{s};
818
  }
819
820
  // Returns the position of the first value whose key is not less than k using
821
  // linear search performed using compare-to.
822
  template <typename K, typename Compare>
823
  SearchResult<size_type, true> linear_search_impl(
824
      const K &k, size_type s, const size_type e, const Compare &comp,
825
      std::true_type /* IsCompareTo */) const {
826
    while (s < e) {
827
      const absl::weak_ordering c = comp(key(s), k);
828
      if (c == 0) {
829
        return {s, MatchKind::kEq};
830
      } else if (c > 0) {
831
        break;
832
      }
833
      ++s;
834
    }
835
    return {s, MatchKind::kNe};
836
  }
837
838
  // Returns the position of the first value whose key is not less than k using
839
  // binary search performed using plain compare.
840
  template <typename K, typename Compare>
841
  SearchResult<size_type, false> binary_search_impl(
842
      const K &k, size_type s, size_type e, const Compare &comp,
843
      std::false_type /* IsCompareTo */) const {
844
    while (s != e) {
845
      const size_type mid = (s + e) >> 1;
846
      if (comp(key(mid), k)) {
847
        s = mid + 1;
848
      } else {
849
        e = mid;
850
      }
851
    }
852
    return SearchResult<size_type, false>{s};
853
  }
854
855
  // Returns the position of the first value whose key is not less than k using
856
  // binary search performed using compare-to.
857
  template <typename K, typename CompareTo>
858
  SearchResult<size_type, true> binary_search_impl(
859
      const K &k, size_type s, size_type e, const CompareTo &comp,
860
      std::true_type /* IsCompareTo */) const {
861
    if (params_type::template can_have_multiple_equivalent_keys<K>()) {
862
      MatchKind exact_match = MatchKind::kNe;
863
      while (s != e) {
864
        const size_type mid = (s + e) >> 1;
865
        const absl::weak_ordering c = comp(key(mid), k);
866
        if (c < 0) {
867
          s = mid + 1;
868
        } else {
869
          e = mid;
870
          if (c == 0) {
871
            // Need to return the first value whose key is not less than k,
872
            // which requires continuing the binary search if there could be
873
            // multiple equivalent keys.
874
            exact_match = MatchKind::kEq;
875
          }
876
        }
877
      }
878
      return {s, exact_match};
879
    } else {  // Can't have multiple equivalent keys.
880
      while (s != e) {
881
        const size_type mid = (s + e) >> 1;
882
        const absl::weak_ordering c = comp(key(mid), k);
883
        if (c < 0) {
884
          s = mid + 1;
885
        } else if (c > 0) {
886
          e = mid;
887
        } else {
888
          return {mid, MatchKind::kEq};
889
        }
890
      }
891
      return {s, MatchKind::kNe};
892
    }
893
  }
894
895
  // Emplaces a value at position i, shifting all existing values and
896
  // children at positions >= i to the right by 1.
897
  template <typename... Args>
898
  void emplace_value(field_type i, allocator_type *alloc, Args &&...args);
899
900
  // Removes the values at positions [i, i + to_erase), shifting all existing
901
  // values and children after that range to the left by to_erase. Clears all
902
  // children between [i, i + to_erase).
903
  void remove_values(field_type i, field_type to_erase, allocator_type *alloc);
904
905
  // Rebalances a node with its right sibling.
906
  void rebalance_right_to_left(field_type to_move, btree_node *right,
907
                               allocator_type *alloc);
908
  void rebalance_left_to_right(field_type to_move, btree_node *right,
909
                               allocator_type *alloc);
910
911
  // Splits a node, moving a portion of the node's values to its right sibling.
912
  void split(int insert_position, btree_node *dest, allocator_type *alloc);
913
914
  // Merges a node with its right sibling, moving all of the values and the
915
  // delimiting key in the parent node onto itself, and deleting the src node.
916
  void merge(btree_node *src, allocator_type *alloc);
917
918
  // Node allocation/deletion routines.
919
  void init_leaf(field_type max_count, btree_node *parent) {
920
    set_generation(0);
921
    set_parent(parent);
922
    set_position(0);
923
    set_start(0);
924
    set_finish(0);
925
    set_max_count(max_count);
926
    absl::container_internal::SanitizerPoisonMemoryRegion(
927
        start_slot(), max_count * sizeof(slot_type));
928
  }
929
  void init_internal(btree_node *parent) {
930
    init_leaf(kNodeSlots, parent);
931
    // Set `max_count` to a sentinel value to indicate that this node is
932
    // internal.
933
    set_max_count(kInternalNodeMaxCount);
934
    absl::container_internal::SanitizerPoisonMemoryRegion(
935
        &mutable_child(start()), (kNodeSlots + 1) * sizeof(btree_node *));
936
  }
937
938
  static void deallocate(const size_type size, btree_node *node,
939
                         allocator_type *alloc) {
940
    absl::container_internal::SanitizerUnpoisonMemoryRegion(node, size);
941
    absl::container_internal::Deallocate<Alignment()>(alloc, node, size);
942
  }
943
944
  // Deletes a node and all of its children.
945
  static void clear_and_delete(btree_node *node, allocator_type *alloc);
946
947
 private:
948
  template <typename... Args>
949
  void value_init(const field_type i, allocator_type *alloc, Args &&...args) {
950
    next_generation();
951
    absl::container_internal::SanitizerUnpoisonObject(slot(i));
952
    params_type::construct(alloc, slot(i), std::forward<Args>(args)...);
953
  }
954
  void value_destroy(const field_type i, allocator_type *alloc) {
955
    next_generation();
956
    params_type::destroy(alloc, slot(i));
957
    absl::container_internal::SanitizerPoisonObject(slot(i));
958
  }
959
  void value_destroy_n(const field_type i, const field_type n,
960
                       allocator_type *alloc) {
961
    next_generation();
962
    for (slot_type *s = slot(i), *end = slot(i + n); s != end; ++s) {
963
      params_type::destroy(alloc, s);
964
      absl::container_internal::SanitizerPoisonObject(s);
965
    }
966
  }
967
968
  static void transfer(slot_type *dest, slot_type *src, allocator_type *alloc) {
969
    absl::container_internal::SanitizerUnpoisonObject(dest);
970
    params_type::transfer(alloc, dest, src);
971
    absl::container_internal::SanitizerPoisonObject(src);
972
  }
973
974
  // Transfers value from slot `src_i` in `src_node` to slot `dest_i` in `this`.
975
  void transfer(const size_type dest_i, const size_type src_i,
976
                btree_node *src_node, allocator_type *alloc) {
977
    next_generation();
978
    transfer(slot(dest_i), src_node->slot(src_i), alloc);
979
  }
980
981
  // Transfers `n` values starting at value `src_i` in `src_node` into the
982
  // values starting at value `dest_i` in `this`.
983
  void transfer_n(const size_type n, const size_type dest_i,
984
                  const size_type src_i, btree_node *src_node,
985
                  allocator_type *alloc) {
986
    next_generation();
987
    for (slot_type *src = src_node->slot(src_i), *end = src + n,
988
                   *dest = slot(dest_i);
989
         src != end; ++src, ++dest) {
990
      transfer(dest, src, alloc);
991
    }
992
  }
993
994
  // Same as above, except that we start at the end and work our way to the
995
  // beginning.
996
  void transfer_n_backward(const size_type n, const size_type dest_i,
997
                           const size_type src_i, btree_node *src_node,
998
                           allocator_type *alloc) {
999
    next_generation();
1000
    for (slot_type *src = src_node->slot(src_i + n), *end = src - n,
1001
                   *dest = slot(dest_i + n);
1002
         src != end; --src, --dest) {
1003
      // If we modified the loop index calculations above to avoid the -1s here,
1004
      // it would result in UB in the computation of `end` (and possibly `src`
1005
      // as well, if n == 0), since slot() is effectively an array index and it
1006
      // is UB to compute the address of any out-of-bounds array element except
1007
      // for one-past-the-end.
1008
      transfer(dest - 1, src - 1, alloc);
1009
    }
1010
  }
1011
1012
  template <typename P>
1013
  friend class btree;
1014
  template <typename N, typename R, typename P>
1015
  friend class btree_iterator;
1016
  friend class BtreeNodePeer;
1017
  friend struct btree_access;
1018
};
1019
1020
template <typename Node>
1021
bool AreNodesFromSameContainer(const Node *node_a, const Node *node_b) {
1022
  // If either node is null, then give up on checking whether they're from the
1023
  // same container. (If exactly one is null, then we'll trigger the
1024
  // default-constructed assert in Equals.)
1025
  if (node_a == nullptr || node_b == nullptr) return true;
1026
  while (!node_a->is_root()) node_a = node_a->parent();
1027
  while (!node_b->is_root()) node_b = node_b->parent();
1028
  return node_a == node_b;
1029
}
1030
1031
class btree_iterator_generation_info_enabled {
1032
 public:
1033
  explicit btree_iterator_generation_info_enabled(uint32_t g)
1034
0
      : generation_(g) {}
1035
1036
  // Updates the generation. For use internally right before we return an
1037
  // iterator to the user.
1038
  template <typename Node>
1039
  void update_generation(const Node *node) {
1040
    if (node != nullptr) generation_ = node->generation();
1041
  }
1042
0
  uint32_t generation() const { return generation_; }
1043
1044
  template <typename Node>
1045
  void assert_valid_generation(const Node *node) const {
1046
    if (node != nullptr && node->generation() != generation_) {
1047
      ABSL_INTERNAL_LOG(
1048
          FATAL,
1049
          "Attempting to use an invalidated iterator. The corresponding b-tree "
1050
          "container has been mutated since this iterator was constructed.");
1051
    }
1052
  }
1053
1054
 private:
1055
  // Used to check that the iterator hasn't been invalidated.
1056
  uint32_t generation_;
1057
};
1058
1059
class btree_iterator_generation_info_disabled {
1060
 public:
1061
0
  explicit btree_iterator_generation_info_disabled(uint32_t) {}
1062
0
  void update_generation(const void *) {}
1063
0
  uint32_t generation() const { return 0; }
1064
0
  void assert_valid_generation(const void *) const {}
1065
};
1066
1067
#ifdef ABSL_BTREE_ENABLE_GENERATIONS
1068
using btree_iterator_generation_info = btree_iterator_generation_info_enabled;
1069
#else
1070
using btree_iterator_generation_info = btree_iterator_generation_info_disabled;
1071
#endif
1072
1073
template <typename Node, typename Reference, typename Pointer>
1074
class btree_iterator : private btree_iterator_generation_info {
1075
  using field_type = typename Node::field_type;
1076
  using key_type = typename Node::key_type;
1077
  using size_type = typename Node::size_type;
1078
  using params_type = typename Node::params_type;
1079
  using is_map_container = typename params_type::is_map_container;
1080
1081
  using node_type = Node;
1082
  using normal_node = typename std::remove_const<Node>::type;
1083
  using const_node = const Node;
1084
  using normal_pointer = typename params_type::pointer;
1085
  using normal_reference = typename params_type::reference;
1086
  using const_pointer = typename params_type::const_pointer;
1087
  using const_reference = typename params_type::const_reference;
1088
  using slot_type = typename params_type::slot_type;
1089
1090
  using iterator =
1091
     btree_iterator<normal_node, normal_reference, normal_pointer>;
1092
  using const_iterator =
1093
      btree_iterator<const_node, const_reference, const_pointer>;
1094
1095
 public:
1096
  // These aliases are public for std::iterator_traits.
1097
  using difference_type = typename Node::difference_type;
1098
  using value_type = typename params_type::value_type;
1099
  using pointer = Pointer;
1100
  using reference = Reference;
1101
  using iterator_category = std::bidirectional_iterator_tag;
1102
1103
  btree_iterator() : btree_iterator(nullptr, -1) {}
1104
  explicit btree_iterator(Node *n) : btree_iterator(n, n->start()) {}
1105
  btree_iterator(Node *n, int p)
1106
      : btree_iterator_generation_info(n != nullptr ? n->generation()
1107
                                                    : ~uint32_t{}),
1108
        node_(n),
1109
        position_(p) {}
1110
1111
  // NOTE: this SFINAE allows for implicit conversions from iterator to
1112
  // const_iterator, but it specifically avoids hiding the copy constructor so
1113
  // that the trivial one will be used when possible.
1114
  template <typename N, typename R, typename P,
1115
            absl::enable_if_t<
1116
                std::is_same<btree_iterator<N, R, P>, iterator>::value &&
1117
                    std::is_same<btree_iterator, const_iterator>::value,
1118
                int> = 0>
1119
  btree_iterator(const btree_iterator<N, R, P> other)  // NOLINT
1120
      : btree_iterator_generation_info(other),
1121
        node_(other.node_),
1122
        position_(other.position_) {}
1123
1124
  bool operator==(const iterator &other) const {
1125
    return Equals(other);
1126
  }
1127
  bool operator==(const const_iterator &other) const {
1128
    return Equals(other);
1129
  }
1130
  bool operator!=(const iterator &other) const {
1131
    return !Equals(other);
1132
  }
1133
  bool operator!=(const const_iterator &other) const {
1134
    return !Equals(other);
1135
  }
1136
1137
  // Returns n such that n calls to ++other yields *this.
1138
  // Precondition: n exists.
1139
  difference_type operator-(const_iterator other) const {
1140
    if (node_ == other.node_) {
1141
      if (node_->is_leaf()) return position_ - other.position_;
1142
      if (position_ == other.position_) return 0;
1143
    }
1144
    return distance_slow(other);
1145
  }
1146
1147
  // Accessors for the key/value the iterator is pointing at.
1148
  reference operator*() const {
1149
    ABSL_HARDENING_ASSERT(node_ != nullptr);
1150
    assert_valid_generation(node_);
1151
    ABSL_HARDENING_ASSERT(position_ >= node_->start());
1152
    if (position_ >= node_->finish()) {
1153
      ABSL_HARDENING_ASSERT(!IsEndIterator() && "Dereferencing end() iterator");
1154
      ABSL_HARDENING_ASSERT(position_ < node_->finish());
1155
    }
1156
    return node_->value(static_cast<field_type>(position_));
1157
  }
1158
  pointer operator->() const { return &operator*(); }
1159
1160
  btree_iterator &operator++() {
1161
    increment();
1162
    return *this;
1163
  }
1164
  btree_iterator &operator--() {
1165
    decrement();
1166
    return *this;
1167
  }
1168
  btree_iterator operator++(int) {
1169
    btree_iterator tmp = *this;
1170
    ++*this;
1171
    return tmp;
1172
  }
1173
  btree_iterator operator--(int) {
1174
    btree_iterator tmp = *this;
1175
    --*this;
1176
    return tmp;
1177
  }
1178
1179
 private:
1180
  friend iterator;
1181
  friend const_iterator;
1182
  template <typename Params>
1183
  friend class btree;
1184
  template <typename Tree>
1185
  friend class btree_container;
1186
  template <typename Tree>
1187
  friend class btree_set_container;
1188
  template <typename Tree>
1189
  friend class btree_map_container;
1190
  template <typename Tree>
1191
  friend class btree_multiset_container;
1192
  template <typename TreeType, typename CheckerType>
1193
  friend class base_checker;
1194
  friend struct btree_access;
1195
1196
  // This SFINAE allows explicit conversions from const_iterator to
1197
  // iterator, but also avoids hiding the copy constructor.
1198
  // NOTE: the const_cast is safe because this constructor is only called by
1199
  // non-const methods and the container owns the nodes.
1200
  template <typename N, typename R, typename P,
1201
            absl::enable_if_t<
1202
                std::is_same<btree_iterator<N, R, P>, const_iterator>::value &&
1203
                    std::is_same<btree_iterator, iterator>::value,
1204
                int> = 0>
1205
  explicit btree_iterator(const btree_iterator<N, R, P> other)
1206
      : btree_iterator_generation_info(other.generation()),
1207
        node_(const_cast<node_type *>(other.node_)),
1208
        position_(other.position_) {}
1209
1210
  bool Equals(const const_iterator other) const {
1211
    ABSL_HARDENING_ASSERT(((node_ == nullptr && other.node_ == nullptr) ||
1212
                           (node_ != nullptr && other.node_ != nullptr)) &&
1213
                          "Comparing default-constructed iterator with "
1214
                          "non-default-constructed iterator.");
1215
    // Note: we use assert instead of ABSL_HARDENING_ASSERT here because this
1216
    // changes the complexity of Equals from O(1) to O(log(N) + log(M)) where
1217
    // N/M are sizes of the containers containing node_/other.node_.
1218
    assert(AreNodesFromSameContainer(node_, other.node_) &&
1219
           "Comparing iterators from different containers.");
1220
    assert_valid_generation(node_);
1221
    other.assert_valid_generation(other.node_);
1222
    return node_ == other.node_ && position_ == other.position_;
1223
  }
1224
1225
  bool IsEndIterator() const {
1226
    if (position_ != node_->finish()) return false;
1227
    node_type *node = node_;
1228
    while (!node->is_root()) {
1229
      if (node->position() != node->parent()->finish()) return false;
1230
      node = node->parent();
1231
    }
1232
    return true;
1233
  }
1234
1235
  // Returns n such that n calls to ++other yields *this.
1236
  // Precondition: n exists && (this->node_ != other.node_ ||
1237
  // !this->node_->is_leaf() || this->position_ != other.position_).
1238
  difference_type distance_slow(const_iterator other) const;
1239
1240
  // Increment/decrement the iterator.
1241
  void increment() {
1242
    assert_valid_generation(node_);
1243
    if (node_->is_leaf() && ++position_ < node_->finish()) {
1244
      return;
1245
    }
1246
    increment_slow();
1247
  }
1248
  void increment_slow();
1249
1250
  void decrement() {
1251
    assert_valid_generation(node_);
1252
    if (node_->is_leaf() && --position_ >= node_->start()) {
1253
      return;
1254
    }
1255
    decrement_slow();
1256
  }
1257
  void decrement_slow();
1258
1259
  const key_type &key() const {
1260
    return node_->key(static_cast<size_type>(position_));
1261
  }
1262
  decltype(std::declval<Node *>()->slot(0)) slot() {
1263
    return node_->slot(static_cast<size_type>(position_));
1264
  }
1265
1266
  void update_generation() {
1267
    btree_iterator_generation_info::update_generation(node_);
1268
  }
1269
1270
  // The node in the tree the iterator is pointing at.
1271
  Node *node_;
1272
  // The position within the node of the tree the iterator is pointing at.
1273
  // NOTE: this is an int rather than a field_type because iterators can point
1274
  // to invalid positions (such as -1) in certain circumstances.
1275
  int position_;
1276
};
1277
1278
template <typename Params>
1279
class btree {
1280
  using node_type = btree_node<Params>;
1281
  using is_key_compare_to = typename Params::is_key_compare_to;
1282
  using field_type = typename node_type::field_type;
1283
1284
  // We use a static empty node for the root/leftmost/rightmost of empty btrees
1285
  // in order to avoid branching in begin()/end().
1286
  struct alignas(node_type::Alignment()) EmptyNodeType : node_type {
1287
    using field_type = typename node_type::field_type;
1288
    node_type *parent;
1289
#ifdef ABSL_BTREE_ENABLE_GENERATIONS
1290
    uint32_t generation = 0;
1291
#endif
1292
    field_type position = 0;
1293
    field_type start = 0;
1294
    field_type finish = 0;
1295
    // max_count must be != kInternalNodeMaxCount (so that this node is regarded
1296
    // as a leaf node). max_count() is never called when the tree is empty.
1297
    field_type max_count = node_type::kInternalNodeMaxCount + 1;
1298
1299
#ifdef _MSC_VER
1300
    // MSVC has constexpr code generations bugs here.
1301
    EmptyNodeType() : parent(this) {}
1302
#else
1303
    explicit constexpr EmptyNodeType(node_type *p) : parent(p) {}
1304
#endif
1305
  };
1306
1307
  static node_type *EmptyNode() {
1308
#ifdef _MSC_VER
1309
    static EmptyNodeType *empty_node = new EmptyNodeType;
1310
    // This assert fails on some other construction methods.
1311
    assert(empty_node->parent == empty_node);
1312
    return empty_node;
1313
#else
1314
    static constexpr EmptyNodeType empty_node(
1315
        const_cast<EmptyNodeType *>(&empty_node));
1316
    return const_cast<EmptyNodeType *>(&empty_node);
1317
#endif
1318
  }
1319
1320
  enum : uint32_t {
1321
    kNodeSlots = node_type::kNodeSlots,
1322
    kMinNodeValues = kNodeSlots / 2,
1323
  };
1324
1325
  struct node_stats {
1326
    using size_type = typename Params::size_type;
1327
1328
    node_stats(size_type l, size_type i) : leaf_nodes(l), internal_nodes(i) {}
1329
1330
    node_stats &operator+=(const node_stats &other) {
1331
      leaf_nodes += other.leaf_nodes;
1332
      internal_nodes += other.internal_nodes;
1333
      return *this;
1334
    }
1335
1336
    size_type leaf_nodes;
1337
    size_type internal_nodes;
1338
  };
1339
1340
 public:
1341
  using key_type = typename Params::key_type;
1342
  using value_type = typename Params::value_type;
1343
  using size_type = typename Params::size_type;
1344
  using difference_type = typename Params::difference_type;
1345
  using key_compare = typename Params::key_compare;
1346
  using original_key_compare = typename Params::original_key_compare;
1347
  using value_compare = typename Params::value_compare;
1348
  using allocator_type = typename Params::allocator_type;
1349
  using reference = typename Params::reference;
1350
  using const_reference = typename Params::const_reference;
1351
  using pointer = typename Params::pointer;
1352
  using const_pointer = typename Params::const_pointer;
1353
  using iterator =
1354
      typename btree_iterator<node_type, reference, pointer>::iterator;
1355
  using const_iterator = typename iterator::const_iterator;
1356
  using reverse_iterator = std::reverse_iterator<iterator>;
1357
  using const_reverse_iterator = std::reverse_iterator<const_iterator>;
1358
  using node_handle_type = node_handle<Params, Params, allocator_type>;
1359
1360
  // Internal types made public for use by btree_container types.
1361
  using params_type = Params;
1362
  using slot_type = typename Params::slot_type;
1363
1364
 private:
1365
  // Copies or moves (depending on the template parameter) the values in
1366
  // other into this btree in their order in other. This btree must be empty
1367
  // before this method is called. This method is used in copy construction,
1368
  // copy assignment, and move assignment.
1369
  template <typename Btree>
1370
  void copy_or_move_values_in_order(Btree &other);
1371
1372
  // Validates that various assumptions/requirements are true at compile time.
1373
  constexpr static bool static_assert_validation();
1374
1375
 public:
1376
  btree(const key_compare &comp, const allocator_type &alloc)
1377
      : root_(EmptyNode()), rightmost_(comp, alloc, EmptyNode()), size_(0) {}
1378
1379
  btree(const btree &other) : btree(other, other.allocator()) {}
1380
  btree(const btree &other, const allocator_type &alloc)
1381
      : btree(other.key_comp(), alloc) {
1382
    copy_or_move_values_in_order(other);
1383
  }
1384
  btree(btree &&other) noexcept
1385
      : root_(absl::exchange(other.root_, EmptyNode())),
1386
        rightmost_(std::move(other.rightmost_)),
1387
        size_(absl::exchange(other.size_, 0u)) {
1388
    other.mutable_rightmost() = EmptyNode();
1389
  }
1390
  btree(btree &&other, const allocator_type &alloc)
1391
      : btree(other.key_comp(), alloc) {
1392
    if (alloc == other.allocator()) {
1393
      swap(other);
1394
    } else {
1395
      // Move values from `other` one at a time when allocators are different.
1396
      copy_or_move_values_in_order(other);
1397
    }
1398
  }
1399
1400
  ~btree() {
1401
    // Put static_asserts in destructor to avoid triggering them before the type
1402
    // is complete.
1403
    static_assert(static_assert_validation(), "This call must be elided.");
1404
    clear();
1405
  }
1406
1407
  // Assign the contents of other to *this.
1408
  btree &operator=(const btree &other);
1409
  btree &operator=(btree &&other) noexcept;
1410
1411
  iterator begin() { return iterator(leftmost()); }
1412
  const_iterator begin() const { return const_iterator(leftmost()); }
1413
  iterator end() { return iterator(rightmost(), rightmost()->finish()); }
1414
  const_iterator end() const {
1415
    return const_iterator(rightmost(), rightmost()->finish());
1416
  }
1417
  reverse_iterator rbegin() { return reverse_iterator(end()); }
1418
  const_reverse_iterator rbegin() const {
1419
    return const_reverse_iterator(end());
1420
  }
1421
  reverse_iterator rend() { return reverse_iterator(begin()); }
1422
  const_reverse_iterator rend() const {
1423
    return const_reverse_iterator(begin());
1424
  }
1425
1426
  // Finds the first element whose key is not less than `key`.
1427
  template <typename K>
1428
  iterator lower_bound(const K &key) {
1429
    return internal_end(internal_lower_bound(key).value);
1430
  }
1431
  template <typename K>
1432
  const_iterator lower_bound(const K &key) const {
1433
    return internal_end(internal_lower_bound(key).value);
1434
  }
1435
1436
  // Finds the first element whose key is not less than `key` and also returns
1437
  // whether that element is equal to `key`.
1438
  template <typename K>
1439
  std::pair<iterator, bool> lower_bound_equal(const K &key) const;
1440
1441
  // Finds the first element whose key is greater than `key`.
1442
  template <typename K>
1443
  iterator upper_bound(const K &key) {
1444
    return internal_end(internal_upper_bound(key));
1445
  }
1446
  template <typename K>
1447
  const_iterator upper_bound(const K &key) const {
1448
    return internal_end(internal_upper_bound(key));
1449
  }
1450
1451
  // Finds the range of values which compare equal to key. The first member of
1452
  // the returned pair is equal to lower_bound(key). The second member of the
1453
  // pair is equal to upper_bound(key).
1454
  template <typename K>
1455
  std::pair<iterator, iterator> equal_range(const K &key);
1456
  template <typename K>
1457
  std::pair<const_iterator, const_iterator> equal_range(const K &key) const {
1458
    return const_cast<btree *>(this)->equal_range(key);
1459
  }
1460
1461
  // Inserts a value into the btree only if it does not already exist. The
1462
  // boolean return value indicates whether insertion succeeded or failed.
1463
  // Requirement: if `key` already exists in the btree, does not consume `args`.
1464
  // Requirement: `key` is never referenced after consuming `args`.
1465
  template <typename K, typename... Args>
1466
  std::pair<iterator, bool> insert_unique(const K &key, Args &&...args);
1467
1468
  // Inserts with hint. Checks to see if the value should be placed immediately
1469
  // before `position` in the tree. If so, then the insertion will take
1470
  // amortized constant time. If not, the insertion will take amortized
1471
  // logarithmic time as if a call to insert_unique() were made.
1472
  // Requirement: if `key` already exists in the btree, does not consume `args`.
1473
  // Requirement: `key` is never referenced after consuming `args`.
1474
  template <typename K, typename... Args>
1475
  std::pair<iterator, bool> insert_hint_unique(iterator position, const K &key,
1476
                                               Args &&...args);
1477
1478
  // Insert a range of values into the btree.
1479
  // Note: the first overload avoids constructing a value_type if the key
1480
  // already exists in the btree.
1481
  template <typename InputIterator,
1482
            typename = decltype(std::declval<const key_compare &>()(
1483
                params_type::key(*std::declval<InputIterator>()),
1484
                std::declval<const key_type &>()))>
1485
  void insert_iterator_unique(InputIterator b, InputIterator e, int);
1486
  // We need the second overload for cases in which we need to construct a
1487
  // value_type in order to compare it with the keys already in the btree.
1488
  template <typename InputIterator>
1489
  void insert_iterator_unique(InputIterator b, InputIterator e, char);
1490
1491
  // Inserts a value into the btree.
1492
  template <typename ValueType>
1493
  iterator insert_multi(const key_type &key, ValueType &&v);
1494
1495
  // Inserts a value into the btree.
1496
  template <typename ValueType>
1497
  iterator insert_multi(ValueType &&v) {
1498
    return insert_multi(params_type::key(v), std::forward<ValueType>(v));
1499
  }
1500
1501
  // Insert with hint. Check to see if the value should be placed immediately
1502
  // before position in the tree. If it does, then the insertion will take
1503
  // amortized constant time. If not, the insertion will take amortized
1504
  // logarithmic time as if a call to insert_multi(v) were made.
1505
  template <typename ValueType>
1506
  iterator insert_hint_multi(iterator position, ValueType &&v);
1507
1508
  // Insert a range of values into the btree.
1509
  template <typename InputIterator>
1510
  void insert_iterator_multi(InputIterator b, InputIterator e);
1511
1512
  // Erase the specified iterator from the btree. The iterator must be valid
1513
  // (i.e. not equal to end()).  Return an iterator pointing to the node after
1514
  // the one that was erased (or end() if none exists).
1515
  // Requirement: does not read the value at `*iter`.
1516
  iterator erase(iterator iter);
1517
1518
  // Erases range. Returns the number of keys erased and an iterator pointing
1519
  // to the element after the last erased element.
1520
  std::pair<size_type, iterator> erase_range(iterator begin, iterator end);
1521
1522
  // Finds an element with key equivalent to `key` or returns `end()` if `key`
1523
  // is not present.
1524
  template <typename K>
1525
  iterator find(const K &key) {
1526
    return internal_end(internal_find(key));
1527
  }
1528
  template <typename K>
1529
  const_iterator find(const K &key) const {
1530
    return internal_end(internal_find(key));
1531
  }
1532
1533
  // Clear the btree, deleting all of the values it contains.
1534
  void clear();
1535
1536
  // Swaps the contents of `this` and `other`.
1537
  void swap(btree &other);
1538
1539
  const key_compare &key_comp() const noexcept {
1540
    return rightmost_.template get<0>();
1541
  }
1542
  template <typename K1, typename K2>
1543
  bool compare_keys(const K1 &a, const K2 &b) const {
1544
    return compare_internal::compare_result_as_less_than(key_comp()(a, b));
1545
  }
1546
1547
  value_compare value_comp() const {
1548
    return value_compare(original_key_compare(key_comp()));
1549
  }
1550
1551
  // Verifies the structure of the btree.
1552
  void verify() const;
1553
1554
  // Size routines.
1555
0
  size_type size() const { return size_; }
1556
  size_type max_size() const { return (std::numeric_limits<size_type>::max)(); }
1557
  bool empty() const { return size_ == 0; }
1558
1559
  // The height of the btree. An empty tree will have height 0.
1560
  size_type height() const {
1561
    size_type h = 0;
1562
    if (!empty()) {
1563
      // Count the length of the chain from the leftmost node up to the
1564
      // root. We actually count from the root back around to the level below
1565
      // the root, but the calculation is the same because of the circularity
1566
      // of that traversal.
1567
      const node_type *n = root();
1568
      do {
1569
        ++h;
1570
        n = n->parent();
1571
      } while (n != root());
1572
    }
1573
    return h;
1574
  }
1575
1576
  // The number of internal, leaf and total nodes used by the btree.
1577
  size_type leaf_nodes() const { return internal_stats(root()).leaf_nodes; }
1578
  size_type internal_nodes() const {
1579
    return internal_stats(root()).internal_nodes;
1580
  }
1581
  size_type nodes() const {
1582
    node_stats stats = internal_stats(root());
1583
    return stats.leaf_nodes + stats.internal_nodes;
1584
  }
1585
1586
  // The total number of bytes used by the btree.
1587
  // TODO(b/169338300): update to support node_btree_*.
1588
  size_type bytes_used() const {
1589
    node_stats stats = internal_stats(root());
1590
    if (stats.leaf_nodes == 1 && stats.internal_nodes == 0) {
1591
      return sizeof(*this) + node_type::LeafSize(root()->max_count());
1592
    } else {
1593
      return sizeof(*this) + stats.leaf_nodes * node_type::LeafSize() +
1594
             stats.internal_nodes * node_type::InternalSize();
1595
    }
1596
  }
1597
1598
  // The average number of bytes used per value stored in the btree assuming
1599
  // random insertion order.
1600
  static double average_bytes_per_value() {
1601
    // The expected number of values per node with random insertion order is the
1602
    // average of the maximum and minimum numbers of values per node.
1603
    const double expected_values_per_node = (kNodeSlots + kMinNodeValues) / 2.0;
1604
    return node_type::LeafSize() / expected_values_per_node;
1605
  }
1606
1607
  // The fullness of the btree. Computed as the number of elements in the btree
1608
  // divided by the maximum number of elements a tree with the current number
1609
  // of nodes could hold. A value of 1 indicates perfect space
1610
  // utilization. Smaller values indicate space wastage.
1611
  // Returns 0 for empty trees.
1612
  double fullness() const {
1613
    if (empty()) return 0.0;
1614
    return static_cast<double>(size()) / (nodes() * kNodeSlots);
1615
  }
1616
  // The overhead of the btree structure in bytes per node. Computed as the
1617
  // total number of bytes used by the btree minus the number of bytes used for
1618
  // storing elements divided by the number of elements.
1619
  // Returns 0 for empty trees.
1620
  double overhead() const {
1621
    if (empty()) return 0.0;
1622
    return (bytes_used() - size() * sizeof(value_type)) /
1623
           static_cast<double>(size());
1624
  }
1625
1626
  // The allocator used by the btree.
1627
  allocator_type get_allocator() const { return allocator(); }
1628
1629
 private:
1630
  friend struct btree_access;
1631
1632
  // Internal accessor routines.
1633
  node_type *root() { return root_; }
1634
  const node_type *root() const { return root_; }
1635
  node_type *&mutable_root() noexcept { return root_; }
1636
  node_type *rightmost() { return rightmost_.template get<2>(); }
1637
  const node_type *rightmost() const { return rightmost_.template get<2>(); }
1638
  node_type *&mutable_rightmost() noexcept {
1639
    return rightmost_.template get<2>();
1640
  }
1641
  key_compare *mutable_key_comp() noexcept {
1642
    return &rightmost_.template get<0>();
1643
  }
1644
1645
  // The leftmost node is stored as the parent of the root node.
1646
  node_type *leftmost() { return root()->parent(); }
1647
  const node_type *leftmost() const { return root()->parent(); }
1648
1649
  // Allocator routines.
1650
  allocator_type *mutable_allocator() noexcept {
1651
    return &rightmost_.template get<1>();
1652
  }
1653
  const allocator_type &allocator() const noexcept {
1654
    return rightmost_.template get<1>();
1655
  }
1656
1657
  // Allocates a correctly aligned node of at least size bytes using the
1658
  // allocator.
1659
  node_type *allocate(size_type size) {
1660
    return reinterpret_cast<node_type *>(
1661
        absl::container_internal::Allocate<node_type::Alignment()>(
1662
            mutable_allocator(), size));
1663
  }
1664
1665
  // Node creation/deletion routines.
1666
  node_type *new_internal_node(node_type *parent) {
1667
    node_type *n = allocate(node_type::InternalSize());
1668
    n->init_internal(parent);
1669
    return n;
1670
  }
1671
  node_type *new_leaf_node(node_type *parent) {
1672
    node_type *n = allocate(node_type::LeafSize());
1673
    n->init_leaf(kNodeSlots, parent);
1674
    return n;
1675
  }
1676
  node_type *new_leaf_root_node(field_type max_count) {
1677
    node_type *n = allocate(node_type::LeafSize(max_count));
1678
    n->init_leaf(max_count, /*parent=*/n);
1679
    return n;
1680
  }
1681
1682
  // Deletion helper routines.
1683
  iterator rebalance_after_delete(iterator iter);
1684
1685
  // Rebalances or splits the node iter points to.
1686
  void rebalance_or_split(iterator *iter);
1687
1688
  // Merges the values of left, right and the delimiting key on their parent
1689
  // onto left, removing the delimiting key and deleting right.
1690
  void merge_nodes(node_type *left, node_type *right);
1691
1692
  // Tries to merge node with its left or right sibling, and failing that,
1693
  // rebalance with its left or right sibling. Returns true if a merge
1694
  // occurred, at which point it is no longer valid to access node. Returns
1695
  // false if no merging took place.
1696
  bool try_merge_or_rebalance(iterator *iter);
1697
1698
  // Tries to shrink the height of the tree by 1.
1699
  void try_shrink();
1700
1701
  iterator internal_end(iterator iter) {
1702
    return iter.node_ != nullptr ? iter : end();
1703
  }
1704
  const_iterator internal_end(const_iterator iter) const {
1705
    return iter.node_ != nullptr ? iter : end();
1706
  }
1707
1708
  // Emplaces a value into the btree immediately before iter. Requires that
1709
  // key(v) <= iter.key() and (--iter).key() <= key(v).
1710
  template <typename... Args>
1711
  iterator internal_emplace(iterator iter, Args &&...args);
1712
1713
  // Returns an iterator pointing to the first value >= the value "iter" is
1714
  // pointing at. Note that "iter" might be pointing to an invalid location such
1715
  // as iter.position_ == iter.node_->finish(). This routine simply moves iter
1716
  // up in the tree to a valid location. Requires: iter.node_ is non-null.
1717
  template <typename IterType>
1718
  static IterType internal_last(IterType iter);
1719
1720
  // Returns an iterator pointing to the leaf position at which key would
1721
  // reside in the tree, unless there is an exact match - in which case, the
1722
  // result may not be on a leaf. When there's a three-way comparator, we can
1723
  // return whether there was an exact match. This allows the caller to avoid a
1724
  // subsequent comparison to determine if an exact match was made, which is
1725
  // important for keys with expensive comparison, such as strings.
1726
  template <typename K>
1727
  SearchResult<iterator, is_key_compare_to::value> internal_locate(
1728
      const K &key) const;
1729
1730
  // Internal routine which implements lower_bound().
1731
  template <typename K>
1732
  SearchResult<iterator, is_key_compare_to::value> internal_lower_bound(
1733
      const K &key) const;
1734
1735
  // Internal routine which implements upper_bound().
1736
  template <typename K>
1737
  iterator internal_upper_bound(const K &key) const;
1738
1739
  // Internal routine which implements find().
1740
  template <typename K>
1741
  iterator internal_find(const K &key) const;
1742
1743
  // Verifies the tree structure of node.
1744
  size_type internal_verify(const node_type *node, const key_type *lo,
1745
                            const key_type *hi) const;
1746
1747
  node_stats internal_stats(const node_type *node) const {
1748
    // The root can be a static empty node.
1749
    if (node == nullptr || (node == root() && empty())) {
1750
      return node_stats(0, 0);
1751
    }
1752
    if (node->is_leaf()) {
1753
      return node_stats(1, 0);
1754
    }
1755
    node_stats res(0, 1);
1756
    for (int i = node->start(); i <= node->finish(); ++i) {
1757
      res += internal_stats(node->child(i));
1758
    }
1759
    return res;
1760
  }
1761
1762
  node_type *root_;
1763
1764
  // A pointer to the rightmost node. Note that the leftmost node is stored as
1765
  // the root's parent. We use compressed tuple in order to save space because
1766
  // key_compare and allocator_type are usually empty.
1767
  absl::container_internal::CompressedTuple<key_compare, allocator_type,
1768
                                            node_type *>
1769
      rightmost_;
1770
1771
  // Number of values.
1772
  size_type size_;
1773
};
1774
1775
////
1776
// btree_node methods
1777
template <typename P>
1778
template <typename... Args>
1779
inline void btree_node<P>::emplace_value(const field_type i,
1780
                                         allocator_type *alloc,
1781
                                         Args &&...args) {
1782
  assert(i >= start());
1783
  assert(i <= finish());
1784
  // Shift old values to create space for new value and then construct it in
1785
  // place.
1786
  if (i < finish()) {
1787
    transfer_n_backward(finish() - i, /*dest_i=*/i + 1, /*src_i=*/i, this,
1788
                        alloc);
1789
  }
1790
  value_init(static_cast<field_type>(i), alloc, std::forward<Args>(args)...);
1791
  set_finish(finish() + 1);
1792
1793
  if (is_internal() && finish() > i + 1) {
1794
    for (field_type j = finish(); j > i + 1; --j) {
1795
      set_child(j, child(j - 1));
1796
    }
1797
    clear_child(i + 1);
1798
  }
1799
}
1800
1801
template <typename P>
1802
inline void btree_node<P>::remove_values(const field_type i,
1803
                                         const field_type to_erase,
1804
                                         allocator_type *alloc) {
1805
  // Transfer values after the removed range into their new places.
1806
  value_destroy_n(i, to_erase, alloc);
1807
  const field_type orig_finish = finish();
1808
  const field_type src_i = i + to_erase;
1809
  transfer_n(orig_finish - src_i, i, src_i, this, alloc);
1810
1811
  if (is_internal()) {
1812
    // Delete all children between begin and end.
1813
    for (field_type j = 0; j < to_erase; ++j) {
1814
      clear_and_delete(child(i + j + 1), alloc);
1815
    }
1816
    // Rotate children after end into new positions.
1817
    for (field_type j = i + to_erase + 1; j <= orig_finish; ++j) {
1818
      set_child(j - to_erase, child(j));
1819
      clear_child(j);
1820
    }
1821
  }
1822
  set_finish(orig_finish - to_erase);
1823
}
1824
1825
template <typename P>
1826
void btree_node<P>::rebalance_right_to_left(field_type to_move,
1827
                                            btree_node *right,
1828
                                            allocator_type *alloc) {
1829
  assert(parent() == right->parent());
1830
  assert(position() + 1 == right->position());
1831
  assert(right->count() >= count());
1832
  assert(to_move >= 1);
1833
  assert(to_move <= right->count());
1834
1835
  // 1) Move the delimiting value in the parent to the left node.
1836
  transfer(finish(), position(), parent(), alloc);
1837
1838
  // 2) Move the (to_move - 1) values from the right node to the left node.
1839
  transfer_n(to_move - 1, finish() + 1, right->start(), right, alloc);
1840
1841
  // 3) Move the new delimiting value to the parent from the right node.
1842
  parent()->transfer(position(), right->start() + to_move - 1, right, alloc);
1843
1844
  // 4) Shift the values in the right node to their correct positions.
1845
  right->transfer_n(right->count() - to_move, right->start(),
1846
                    right->start() + to_move, right, alloc);
1847
1848
  if (is_internal()) {
1849
    // Move the child pointers from the right to the left node.
1850
    for (field_type i = 0; i < to_move; ++i) {
1851
      init_child(finish() + i + 1, right->child(i));
1852
    }
1853
    for (field_type i = right->start(); i <= right->finish() - to_move; ++i) {
1854
      assert(i + to_move <= right->max_count());
1855
      right->init_child(i, right->child(i + to_move));
1856
      right->clear_child(i + to_move);
1857
    }
1858
  }
1859
1860
  // Fixup `finish` on the left and right nodes.
1861
  set_finish(finish() + to_move);
1862
  right->set_finish(right->finish() - to_move);
1863
}
1864
1865
template <typename P>
1866
void btree_node<P>::rebalance_left_to_right(field_type to_move,
1867
                                            btree_node *right,
1868
                                            allocator_type *alloc) {
1869
  assert(parent() == right->parent());
1870
  assert(position() + 1 == right->position());
1871
  assert(count() >= right->count());
1872
  assert(to_move >= 1);
1873
  assert(to_move <= count());
1874
1875
  // Values in the right node are shifted to the right to make room for the
1876
  // new to_move values. Then, the delimiting value in the parent and the
1877
  // other (to_move - 1) values in the left node are moved into the right node.
1878
  // Lastly, a new delimiting value is moved from the left node into the
1879
  // parent, and the remaining empty left node entries are destroyed.
1880
1881
  // 1) Shift existing values in the right node to their correct positions.
1882
  right->transfer_n_backward(right->count(), right->start() + to_move,
1883
                             right->start(), right, alloc);
1884
1885
  // 2) Move the delimiting value in the parent to the right node.
1886
  right->transfer(right->start() + to_move - 1, position(), parent(), alloc);
1887
1888
  // 3) Move the (to_move - 1) values from the left node to the right node.
1889
  right->transfer_n(to_move - 1, right->start(), finish() - (to_move - 1), this,
1890
                    alloc);
1891
1892
  // 4) Move the new delimiting value to the parent from the left node.
1893
  parent()->transfer(position(), finish() - to_move, this, alloc);
1894
1895
  if (is_internal()) {
1896
    // Move the child pointers from the left to the right node.
1897
    for (field_type i = right->finish() + 1; i > right->start(); --i) {
1898
      right->init_child(i - 1 + to_move, right->child(i - 1));
1899
      right->clear_child(i - 1);
1900
    }
1901
    for (field_type i = 1; i <= to_move; ++i) {
1902
      right->init_child(i - 1, child(finish() - to_move + i));
1903
      clear_child(finish() - to_move + i);
1904
    }
1905
  }
1906
1907
  // Fixup the counts on the left and right nodes.
1908
  set_finish(finish() - to_move);
1909
  right->set_finish(right->finish() + to_move);
1910
}
1911
1912
template <typename P>
1913
void btree_node<P>::split(const int insert_position, btree_node *dest,
1914
                          allocator_type *alloc) {
1915
  assert(dest->count() == 0);
1916
  assert(max_count() == kNodeSlots);
1917
1918
  // We bias the split based on the position being inserted. If we're
1919
  // inserting at the beginning of the left node then bias the split to put
1920
  // more values on the right node. If we're inserting at the end of the
1921
  // right node then bias the split to put more values on the left node.
1922
  if (insert_position == start()) {
1923
    dest->set_finish(dest->start() + finish() - 1);
1924
  } else if (insert_position == kNodeSlots) {
1925
    dest->set_finish(dest->start());
1926
  } else {
1927
    dest->set_finish(dest->start() + count() / 2);
1928
  }
1929
  set_finish(finish() - dest->count());
1930
  assert(count() >= 1);
1931
1932
  // Move values from the left sibling to the right sibling.
1933
  dest->transfer_n(dest->count(), dest->start(), finish(), this, alloc);
1934
1935
  // The split key is the largest value in the left sibling.
1936
  --mutable_finish();
1937
  parent()->emplace_value(position(), alloc, finish_slot());
1938
  value_destroy(finish(), alloc);
1939
  parent()->init_child(position() + 1, dest);
1940
1941
  if (is_internal()) {
1942
    for (field_type i = dest->start(), j = finish() + 1; i <= dest->finish();
1943
         ++i, ++j) {
1944
      assert(child(j) != nullptr);
1945
      dest->init_child(i, child(j));
1946
      clear_child(j);
1947
    }
1948
  }
1949
}
1950
1951
template <typename P>
1952
void btree_node<P>::merge(btree_node *src, allocator_type *alloc) {
1953
  assert(parent() == src->parent());
1954
  assert(position() + 1 == src->position());
1955
1956
  // Move the delimiting value to the left node.
1957
  value_init(finish(), alloc, parent()->slot(position()));
1958
1959
  // Move the values from the right to the left node.
1960
  transfer_n(src->count(), finish() + 1, src->start(), src, alloc);
1961
1962
  if (is_internal()) {
1963
    // Move the child pointers from the right to the left node.
1964
    for (field_type i = src->start(), j = finish() + 1; i <= src->finish();
1965
         ++i, ++j) {
1966
      init_child(j, src->child(i));
1967
      src->clear_child(i);
1968
    }
1969
  }
1970
1971
  // Fixup `finish` on the src and dest nodes.
1972
  set_finish(start() + 1 + count() + src->count());
1973
  src->set_finish(src->start());
1974
1975
  // Remove the value on the parent node and delete the src node.
1976
  parent()->remove_values(position(), /*to_erase=*/1, alloc);
1977
}
1978
1979
template <typename P>
1980
void btree_node<P>::clear_and_delete(btree_node *node, allocator_type *alloc) {
1981
  if (node->is_leaf()) {
1982
    node->value_destroy_n(node->start(), node->count(), alloc);
1983
    deallocate(LeafSize(node->max_count()), node, alloc);
1984
    return;
1985
  }
1986
  if (node->count() == 0) {
1987
    deallocate(InternalSize(), node, alloc);
1988
    return;
1989
  }
1990
1991
  // The parent of the root of the subtree we are deleting.
1992
  btree_node *delete_root_parent = node->parent();
1993
1994
  // Navigate to the leftmost leaf under node, and then delete upwards.
1995
  while (node->is_internal()) node = node->start_child();
1996
#ifdef ABSL_BTREE_ENABLE_GENERATIONS
1997
  // When generations are enabled, we delete the leftmost leaf last in case it's
1998
  // the parent of the root and we need to check whether it's a leaf before we
1999
  // can update the root's generation.
2000
  // TODO(ezb): if we change btree_node::is_root to check a bool inside the node
2001
  // instead of checking whether the parent is a leaf, we can remove this logic.
2002
  btree_node *leftmost_leaf = node;
2003
#endif
2004
  // Use `size_type` because `pos` needs to be able to hold `kNodeSlots+1`,
2005
  // which isn't guaranteed to be a valid `field_type`.
2006
  size_type pos = node->position();
2007
  btree_node *parent = node->parent();
2008
  for (;;) {
2009
    // In each iteration of the next loop, we delete one leaf node and go right.
2010
    assert(pos <= parent->finish());
2011
    do {
2012
      node = parent->child(static_cast<field_type>(pos));
2013
      if (node->is_internal()) {
2014
        // Navigate to the leftmost leaf under node.
2015
        while (node->is_internal()) node = node->start_child();
2016
        pos = node->position();
2017
        parent = node->parent();
2018
      }
2019
      node->value_destroy_n(node->start(), node->count(), alloc);
2020
#ifdef ABSL_BTREE_ENABLE_GENERATIONS
2021
      if (leftmost_leaf != node)
2022
#endif
2023
        deallocate(LeafSize(node->max_count()), node, alloc);
2024
      ++pos;
2025
    } while (pos <= parent->finish());
2026
2027
    // Once we've deleted all children of parent, delete parent and go up/right.
2028
    assert(pos > parent->finish());
2029
    do {
2030
      node = parent;
2031
      pos = node->position();
2032
      parent = node->parent();
2033
      node->value_destroy_n(node->start(), node->count(), alloc);
2034
      deallocate(InternalSize(), node, alloc);
2035
      if (parent == delete_root_parent) {
2036
#ifdef ABSL_BTREE_ENABLE_GENERATIONS
2037
        deallocate(LeafSize(leftmost_leaf->max_count()), leftmost_leaf, alloc);
2038
#endif
2039
        return;
2040
      }
2041
      ++pos;
2042
    } while (pos > parent->finish());
2043
  }
2044
}
2045
2046
////
2047
// btree_iterator methods
2048
2049
// Note: the implementation here is based on btree_node::clear_and_delete.
2050
template <typename N, typename R, typename P>
2051
auto btree_iterator<N, R, P>::distance_slow(const_iterator other) const
2052
    -> difference_type {
2053
  const_iterator begin = other;
2054
  const_iterator end = *this;
2055
  assert(begin.node_ != end.node_ || !begin.node_->is_leaf() ||
2056
         begin.position_ != end.position_);
2057
2058
  const node_type *node = begin.node_;
2059
  // We need to compensate for double counting if begin.node_ is a leaf node.
2060
  difference_type count = node->is_leaf() ? -begin.position_ : 0;
2061
2062
  // First navigate to the leftmost leaf node past begin.
2063
  if (node->is_internal()) {
2064
    ++count;
2065
    node = node->child(begin.position_ + 1);
2066
  }
2067
  while (node->is_internal()) node = node->start_child();
2068
2069
  // Use `size_type` because `pos` needs to be able to hold `kNodeSlots+1`,
2070
  // which isn't guaranteed to be a valid `field_type`.
2071
  size_type pos = node->position();
2072
  const node_type *parent = node->parent();
2073
  for (;;) {
2074
    // In each iteration of the next loop, we count one leaf node and go right.
2075
    assert(pos <= parent->finish());
2076
    do {
2077
      node = parent->child(static_cast<field_type>(pos));
2078
      if (node->is_internal()) {
2079
        // Navigate to the leftmost leaf under node.
2080
        while (node->is_internal()) node = node->start_child();
2081
        pos = node->position();
2082
        parent = node->parent();
2083
      }
2084
      if (node == end.node_) return count + end.position_;
2085
      if (parent == end.node_ && pos == static_cast<size_type>(end.position_))
2086
        return count + node->count();
2087
      // +1 is for the next internal node value.
2088
      count += node->count() + 1;
2089
      ++pos;
2090
    } while (pos <= parent->finish());
2091
2092
    // Once we've counted all children of parent, go up/right.
2093
    assert(pos > parent->finish());
2094
    do {
2095
      node = parent;
2096
      pos = node->position();
2097
      parent = node->parent();
2098
      // -1 because we counted the value at end and shouldn't.
2099
      if (parent == end.node_ && pos == static_cast<size_type>(end.position_))
2100
        return count - 1;
2101
      ++pos;
2102
    } while (pos > parent->finish());
2103
  }
2104
}
2105
2106
template <typename N, typename R, typename P>
2107
void btree_iterator<N, R, P>::increment_slow() {
2108
  if (node_->is_leaf()) {
2109
    assert(position_ >= node_->finish());
2110
    btree_iterator save(*this);
2111
    while (position_ == node_->finish() && !node_->is_root()) {
2112
      assert(node_->parent()->child(node_->position()) == node_);
2113
      position_ = node_->position();
2114
      node_ = node_->parent();
2115
    }
2116
    // TODO(ezb): assert we aren't incrementing end() instead of handling.
2117
    if (position_ == node_->finish()) {
2118
      *this = save;
2119
    }
2120
  } else {
2121
    assert(position_ < node_->finish());
2122
    node_ = node_->child(static_cast<field_type>(position_ + 1));
2123
    while (node_->is_internal()) {
2124
      node_ = node_->start_child();
2125
    }
2126
    position_ = node_->start();
2127
  }
2128
}
2129
2130
template <typename N, typename R, typename P>
2131
void btree_iterator<N, R, P>::decrement_slow() {
2132
  if (node_->is_leaf()) {
2133
    assert(position_ <= -1);
2134
    btree_iterator save(*this);
2135
    while (position_ < node_->start() && !node_->is_root()) {
2136
      assert(node_->parent()->child(node_->position()) == node_);
2137
      position_ = node_->position() - 1;
2138
      node_ = node_->parent();
2139
    }
2140
    // TODO(ezb): assert we aren't decrementing begin() instead of handling.
2141
    if (position_ < node_->start()) {
2142
      *this = save;
2143
    }
2144
  } else {
2145
    assert(position_ >= node_->start());
2146
    node_ = node_->child(static_cast<field_type>(position_));
2147
    while (node_->is_internal()) {
2148
      node_ = node_->child(node_->finish());
2149
    }
2150
    position_ = node_->finish() - 1;
2151
  }
2152
}
2153
2154
////
2155
// btree methods
2156
template <typename P>
2157
template <typename Btree>
2158
void btree<P>::copy_or_move_values_in_order(Btree &other) {
2159
  static_assert(std::is_same<btree, Btree>::value ||
2160
                    std::is_same<const btree, Btree>::value,
2161
                "Btree type must be same or const.");
2162
  assert(empty());
2163
2164
  // We can avoid key comparisons because we know the order of the
2165
  // values is the same order we'll store them in.
2166
  auto iter = other.begin();
2167
  if (iter == other.end()) return;
2168
  insert_multi(iter.slot());
2169
  ++iter;
2170
  for (; iter != other.end(); ++iter) {
2171
    // If the btree is not empty, we can just insert the new value at the end
2172
    // of the tree.
2173
    internal_emplace(end(), iter.slot());
2174
  }
2175
}
2176
2177
template <typename P>
2178
constexpr bool btree<P>::static_assert_validation() {
2179
  static_assert(std::is_nothrow_copy_constructible<key_compare>::value,
2180
                "Key comparison must be nothrow copy constructible");
2181
  static_assert(std::is_nothrow_copy_constructible<allocator_type>::value,
2182
                "Allocator must be nothrow copy constructible");
2183
  static_assert(type_traits_internal::is_trivially_copyable<iterator>::value,
2184
                "iterator not trivially copyable.");
2185
2186
  // Note: We assert that kTargetValues, which is computed from
2187
  // Params::kTargetNodeSize, must fit the node_type::field_type.
2188
  static_assert(
2189
      kNodeSlots < (1 << (8 * sizeof(typename node_type::field_type))),
2190
      "target node size too large");
2191
2192
  // Verify that key_compare returns an absl::{weak,strong}_ordering or bool.
2193
  static_assert(
2194
      compare_has_valid_result_type<key_compare, key_type>(),
2195
      "key comparison function must return absl::{weak,strong}_ordering or "
2196
      "bool.");
2197
2198
  // Test the assumption made in setting kNodeSlotSpace.
2199
  static_assert(node_type::MinimumOverhead() >= sizeof(void *) + 4,
2200
                "node space assumption incorrect");
2201
2202
  return true;
2203
}
2204
2205
template <typename P>
2206
template <typename K>
2207
auto btree<P>::lower_bound_equal(const K &key) const
2208
    -> std::pair<iterator, bool> {
2209
  const SearchResult<iterator, is_key_compare_to::value> res =
2210
      internal_lower_bound(key);
2211
  const iterator lower = iterator(internal_end(res.value));
2212
  const bool equal = res.HasMatch()
2213
                         ? res.IsEq()
2214
                         : lower != end() && !compare_keys(key, lower.key());
2215
  return {lower, equal};
2216
}
2217
2218
template <typename P>
2219
template <typename K>
2220
auto btree<P>::equal_range(const K &key) -> std::pair<iterator, iterator> {
2221
  const std::pair<iterator, bool> lower_and_equal = lower_bound_equal(key);
2222
  const iterator lower = lower_and_equal.first;
2223
  if (!lower_and_equal.second) {
2224
    return {lower, lower};
2225
  }
2226
2227
  const iterator next = std::next(lower);
2228
  if (!params_type::template can_have_multiple_equivalent_keys<K>()) {
2229
    // The next iterator after lower must point to a key greater than `key`.
2230
    // Note: if this assert fails, then it may indicate that the comparator does
2231
    // not meet the equivalence requirements for Compare
2232
    // (see https://en.cppreference.com/w/cpp/named_req/Compare).
2233
    assert(next == end() || compare_keys(key, next.key()));
2234
    return {lower, next};
2235
  }
2236
  // Try once more to avoid the call to upper_bound() if there's only one
2237
  // equivalent key. This should prevent all calls to upper_bound() in cases of
2238
  // unique-containers with heterogeneous comparators in which all comparison
2239
  // operators have the same equivalence classes.
2240
  if (next == end() || compare_keys(key, next.key())) return {lower, next};
2241
2242
  // In this case, we need to call upper_bound() to avoid worst case O(N)
2243
  // behavior if we were to iterate over equal keys.
2244
  return {lower, upper_bound(key)};
2245
}
2246
2247
template <typename P>
2248
template <typename K, typename... Args>
2249
auto btree<P>::insert_unique(const K &key, Args &&...args)
2250
    -> std::pair<iterator, bool> {
2251
  if (empty()) {
2252
    mutable_root() = mutable_rightmost() = new_leaf_root_node(1);
2253
  }
2254
2255
  SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
2256
  iterator iter = res.value;
2257
2258
  if (res.HasMatch()) {
2259
    if (res.IsEq()) {
2260
      // The key already exists in the tree, do nothing.
2261
      return {iter, false};
2262
    }
2263
  } else {
2264
    iterator last = internal_last(iter);
2265
    if (last.node_ && !compare_keys(key, last.key())) {
2266
      // The key already exists in the tree, do nothing.
2267
      return {last, false};
2268
    }
2269
  }
2270
  return {internal_emplace(iter, std::forward<Args>(args)...), true};
2271
}
2272
2273
template <typename P>
2274
template <typename K, typename... Args>
2275
inline auto btree<P>::insert_hint_unique(iterator position, const K &key,
2276
                                         Args &&...args)
2277
    -> std::pair<iterator, bool> {
2278
  if (!empty()) {
2279
    if (position == end() || compare_keys(key, position.key())) {
2280
      if (position == begin() || compare_keys(std::prev(position).key(), key)) {
2281
        // prev.key() < key < position.key()
2282
        return {internal_emplace(position, std::forward<Args>(args)...), true};
2283
      }
2284
    } else if (compare_keys(position.key(), key)) {
2285
      ++position;
2286
      if (position == end() || compare_keys(key, position.key())) {
2287
        // {original `position`}.key() < key < {current `position`}.key()
2288
        return {internal_emplace(position, std::forward<Args>(args)...), true};
2289
      }
2290
    } else {
2291
      // position.key() == key
2292
      return {position, false};
2293
    }
2294
  }
2295
  return insert_unique(key, std::forward<Args>(args)...);
2296
}
2297
2298
template <typename P>
2299
template <typename InputIterator, typename>
2300
void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, int) {
2301
  for (; b != e; ++b) {
2302
    insert_hint_unique(end(), params_type::key(*b), *b);
2303
  }
2304
}
2305
2306
template <typename P>
2307
template <typename InputIterator>
2308
void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, char) {
2309
  for (; b != e; ++b) {
2310
    // Use a node handle to manage a temp slot.
2311
    auto node_handle =
2312
        CommonAccess::Construct<node_handle_type>(get_allocator(), *b);
2313
    slot_type *slot = CommonAccess::GetSlot(node_handle);
2314
    insert_hint_unique(end(), params_type::key(slot), slot);
2315
  }
2316
}
2317
2318
template <typename P>
2319
template <typename ValueType>
2320
auto btree<P>::insert_multi(const key_type &key, ValueType &&v) -> iterator {
2321
  if (empty()) {
2322
    mutable_root() = mutable_rightmost() = new_leaf_root_node(1);
2323
  }
2324
2325
  iterator iter = internal_upper_bound(key);
2326
  if (iter.node_ == nullptr) {
2327
    iter = end();
2328
  }
2329
  return internal_emplace(iter, std::forward<ValueType>(v));
2330
}
2331
2332
template <typename P>
2333
template <typename ValueType>
2334
auto btree<P>::insert_hint_multi(iterator position, ValueType &&v) -> iterator {
2335
  if (!empty()) {
2336
    const key_type &key = params_type::key(v);
2337
    if (position == end() || !compare_keys(position.key(), key)) {
2338
      if (position == begin() ||
2339
          !compare_keys(key, std::prev(position).key())) {
2340
        // prev.key() <= key <= position.key()
2341
        return internal_emplace(position, std::forward<ValueType>(v));
2342
      }
2343
    } else {
2344
      ++position;
2345
      if (position == end() || !compare_keys(position.key(), key)) {
2346
        // {original `position`}.key() < key < {current `position`}.key()
2347
        return internal_emplace(position, std::forward<ValueType>(v));
2348
      }
2349
    }
2350
  }
2351
  return insert_multi(std::forward<ValueType>(v));
2352
}
2353
2354
template <typename P>
2355
template <typename InputIterator>
2356
void btree<P>::insert_iterator_multi(InputIterator b, InputIterator e) {
2357
  for (; b != e; ++b) {
2358
    insert_hint_multi(end(), *b);
2359
  }
2360
}
2361
2362
template <typename P>
2363
auto btree<P>::operator=(const btree &other) -> btree & {
2364
  if (this != &other) {
2365
    clear();
2366
2367
    *mutable_key_comp() = other.key_comp();
2368
    if (absl::allocator_traits<
2369
            allocator_type>::propagate_on_container_copy_assignment::value) {
2370
      *mutable_allocator() = other.allocator();
2371
    }
2372
2373
    copy_or_move_values_in_order(other);
2374
  }
2375
  return *this;
2376
}
2377
2378
template <typename P>
2379
auto btree<P>::operator=(btree &&other) noexcept -> btree & {
2380
  if (this != &other) {
2381
    clear();
2382
2383
    using std::swap;
2384
    if (absl::allocator_traits<
2385
            allocator_type>::propagate_on_container_copy_assignment::value) {
2386
      swap(root_, other.root_);
2387
      // Note: `rightmost_` also contains the allocator and the key comparator.
2388
      swap(rightmost_, other.rightmost_);
2389
      swap(size_, other.size_);
2390
    } else {
2391
      if (allocator() == other.allocator()) {
2392
        swap(mutable_root(), other.mutable_root());
2393
        swap(*mutable_key_comp(), *other.mutable_key_comp());
2394
        swap(mutable_rightmost(), other.mutable_rightmost());
2395
        swap(size_, other.size_);
2396
      } else {
2397
        // We aren't allowed to propagate the allocator and the allocator is
2398
        // different so we can't take over its memory. We must move each element
2399
        // individually. We need both `other` and `this` to have `other`s key
2400
        // comparator while moving the values so we can't swap the key
2401
        // comparators.
2402
        *mutable_key_comp() = other.key_comp();
2403
        copy_or_move_values_in_order(other);
2404
      }
2405
    }
2406
  }
2407
  return *this;
2408
}
2409
2410
template <typename P>
2411
auto btree<P>::erase(iterator iter) -> iterator {
2412
  iter.node_->value_destroy(static_cast<field_type>(iter.position_),
2413
                            mutable_allocator());
2414
  iter.update_generation();
2415
2416
  const bool internal_delete = iter.node_->is_internal();
2417
  if (internal_delete) {
2418
    // Deletion of a value on an internal node. First, transfer the largest
2419
    // value from our left child here, then erase/rebalance from that position.
2420
    // We can get to the largest value from our left child by decrementing iter.
2421
    iterator internal_iter(iter);
2422
    --iter;
2423
    assert(iter.node_->is_leaf());
2424
    internal_iter.node_->transfer(
2425
        static_cast<size_type>(internal_iter.position_),
2426
        static_cast<size_type>(iter.position_), iter.node_,
2427
        mutable_allocator());
2428
  } else {
2429
    // Shift values after erased position in leaf. In the internal case, we
2430
    // don't need to do this because the leaf position is the end of the node.
2431
    const field_type transfer_from =
2432
        static_cast<field_type>(iter.position_ + 1);
2433
    const field_type num_to_transfer = iter.node_->finish() - transfer_from;
2434
    iter.node_->transfer_n(num_to_transfer,
2435
                           static_cast<size_type>(iter.position_),
2436
                           transfer_from, iter.node_, mutable_allocator());
2437
  }
2438
  // Update node finish and container size.
2439
  iter.node_->set_finish(iter.node_->finish() - 1);
2440
  --size_;
2441
2442
  // We want to return the next value after the one we just erased. If we
2443
  // erased from an internal node (internal_delete == true), then the next
2444
  // value is ++(++iter). If we erased from a leaf node (internal_delete ==
2445
  // false) then the next value is ++iter. Note that ++iter may point to an
2446
  // internal node and the value in the internal node may move to a leaf node
2447
  // (iter.node_) when rebalancing is performed at the leaf level.
2448
2449
  iterator res = rebalance_after_delete(iter);
2450
2451
  // If we erased from an internal node, advance the iterator.
2452
  if (internal_delete) {
2453
    ++res;
2454
  }
2455
  return res;
2456
}
2457
2458
template <typename P>
2459
auto btree<P>::rebalance_after_delete(iterator iter) -> iterator {
2460
  // Merge/rebalance as we walk back up the tree.
2461
  iterator res(iter);
2462
  bool first_iteration = true;
2463
  for (;;) {
2464
    if (iter.node_ == root()) {
2465
      try_shrink();
2466
      if (empty()) {
2467
        return end();
2468
      }
2469
      break;
2470
    }
2471
    if (iter.node_->count() >= kMinNodeValues) {
2472
      break;
2473
    }
2474
    bool merged = try_merge_or_rebalance(&iter);
2475
    // On the first iteration, we should update `res` with `iter` because `res`
2476
    // may have been invalidated.
2477
    if (first_iteration) {
2478
      res = iter;
2479
      first_iteration = false;
2480
    }
2481
    if (!merged) {
2482
      break;
2483
    }
2484
    iter.position_ = iter.node_->position();
2485
    iter.node_ = iter.node_->parent();
2486
  }
2487
  res.update_generation();
2488
2489
  // Adjust our return value. If we're pointing at the end of a node, advance
2490
  // the iterator.
2491
  if (res.position_ == res.node_->finish()) {
2492
    res.position_ = res.node_->finish() - 1;
2493
    ++res;
2494
  }
2495
2496
  return res;
2497
}
2498
2499
template <typename P>
2500
auto btree<P>::erase_range(iterator begin, iterator end)
2501
    -> std::pair<size_type, iterator> {
2502
  size_type count = static_cast<size_type>(end - begin);
2503
  assert(count >= 0);
2504
2505
  if (count == 0) {
2506
    return {0, begin};
2507
  }
2508
2509
  if (static_cast<size_type>(count) == size_) {
2510
    clear();
2511
    return {count, this->end()};
2512
  }
2513
2514
  if (begin.node_ == end.node_) {
2515
    assert(end.position_ > begin.position_);
2516
    begin.node_->remove_values(
2517
        static_cast<field_type>(begin.position_),
2518
        static_cast<field_type>(end.position_ - begin.position_),
2519
        mutable_allocator());
2520
    size_ -= count;
2521
    return {count, rebalance_after_delete(begin)};
2522
  }
2523
2524
  const size_type target_size = size_ - count;
2525
  while (size_ > target_size) {
2526
    if (begin.node_->is_leaf()) {
2527
      const size_type remaining_to_erase = size_ - target_size;
2528
      const size_type remaining_in_node =
2529
          static_cast<size_type>(begin.node_->finish() - begin.position_);
2530
      const field_type to_erase = static_cast<field_type>(
2531
          (std::min)(remaining_to_erase, remaining_in_node));
2532
      begin.node_->remove_values(static_cast<field_type>(begin.position_),
2533
                                 to_erase, mutable_allocator());
2534
      size_ -= to_erase;
2535
      begin = rebalance_after_delete(begin);
2536
    } else {
2537
      begin = erase(begin);
2538
    }
2539
  }
2540
  begin.update_generation();
2541
  return {count, begin};
2542
}
2543
2544
template <typename P>
2545
void btree<P>::clear() {
2546
  if (!empty()) {
2547
    node_type::clear_and_delete(root(), mutable_allocator());
2548
  }
2549
  mutable_root() = mutable_rightmost() = EmptyNode();
2550
  size_ = 0;
2551
}
2552
2553
template <typename P>
2554
void btree<P>::swap(btree &other) {
2555
  using std::swap;
2556
  if (absl::allocator_traits<
2557
          allocator_type>::propagate_on_container_swap::value) {
2558
    // Note: `rightmost_` also contains the allocator and the key comparator.
2559
    swap(rightmost_, other.rightmost_);
2560
  } else {
2561
    // It's undefined behavior if the allocators are unequal here.
2562
    assert(allocator() == other.allocator());
2563
    swap(mutable_rightmost(), other.mutable_rightmost());
2564
    swap(*mutable_key_comp(), *other.mutable_key_comp());
2565
  }
2566
  swap(mutable_root(), other.mutable_root());
2567
  swap(size_, other.size_);
2568
}
2569
2570
template <typename P>
2571
void btree<P>::verify() const {
2572
  assert(root() != nullptr);
2573
  assert(leftmost() != nullptr);
2574
  assert(rightmost() != nullptr);
2575
  assert(empty() || size() == internal_verify(root(), nullptr, nullptr));
2576
  assert(leftmost() == (++const_iterator(root(), -1)).node_);
2577
  assert(rightmost() == (--const_iterator(root(), root()->finish())).node_);
2578
  assert(leftmost()->is_leaf());
2579
  assert(rightmost()->is_leaf());
2580
}
2581
2582
template <typename P>
2583
void btree<P>::rebalance_or_split(iterator *iter) {
2584
  node_type *&node = iter->node_;
2585
  int &insert_position = iter->position_;
2586
  assert(node->count() == node->max_count());
2587
  assert(kNodeSlots == node->max_count());
2588
2589
  // First try to make room on the node by rebalancing.
2590
  node_type *parent = node->parent();
2591
  if (node != root()) {
2592
    if (node->position() > parent->start()) {
2593
      // Try rebalancing with our left sibling.
2594
      node_type *left = parent->child(node->position() - 1);
2595
      assert(left->max_count() == kNodeSlots);
2596
      if (left->count() < kNodeSlots) {
2597
        // We bias rebalancing based on the position being inserted. If we're
2598
        // inserting at the end of the right node then we bias rebalancing to
2599
        // fill up the left node.
2600
        field_type to_move =
2601
            (kNodeSlots - left->count()) /
2602
            (1 + (static_cast<field_type>(insert_position) < kNodeSlots));
2603
        to_move = (std::max)(field_type{1}, to_move);
2604
2605
        if (static_cast<field_type>(insert_position) - to_move >=
2606
                node->start() ||
2607
            left->count() + to_move < kNodeSlots) {
2608
          left->rebalance_right_to_left(to_move, node, mutable_allocator());
2609
2610
          assert(node->max_count() - node->count() == to_move);
2611
          insert_position = static_cast<int>(
2612
              static_cast<field_type>(insert_position) - to_move);
2613
          if (insert_position < node->start()) {
2614
            insert_position = insert_position + left->count() + 1;
2615
            node = left;
2616
          }
2617
2618
          assert(node->count() < node->max_count());
2619
          return;
2620
        }
2621
      }
2622
    }
2623
2624
    if (node->position() < parent->finish()) {
2625
      // Try rebalancing with our right sibling.
2626
      node_type *right = parent->child(node->position() + 1);
2627
      assert(right->max_count() == kNodeSlots);
2628
      if (right->count() < kNodeSlots) {
2629
        // We bias rebalancing based on the position being inserted. If we're
2630
        // inserting at the beginning of the left node then we bias rebalancing
2631
        // to fill up the right node.
2632
        field_type to_move = (kNodeSlots - right->count()) /
2633
                             (1 + (insert_position > node->start()));
2634
        to_move = (std::max)(field_type{1}, to_move);
2635
2636
        if (static_cast<field_type>(insert_position) <=
2637
                node->finish() - to_move ||
2638
            right->count() + to_move < kNodeSlots) {
2639
          node->rebalance_left_to_right(to_move, right, mutable_allocator());
2640
2641
          if (insert_position > node->finish()) {
2642
            insert_position = insert_position - node->count() - 1;
2643
            node = right;
2644
          }
2645
2646
          assert(node->count() < node->max_count());
2647
          return;
2648
        }
2649
      }
2650
    }
2651
2652
    // Rebalancing failed, make sure there is room on the parent node for a new
2653
    // value.
2654
    assert(parent->max_count() == kNodeSlots);
2655
    if (parent->count() == kNodeSlots) {
2656
      iterator parent_iter(node->parent(), node->position());
2657
      rebalance_or_split(&parent_iter);
2658
    }
2659
  } else {
2660
    // Rebalancing not possible because this is the root node.
2661
    // Create a new root node and set the current root node as the child of the
2662
    // new root.
2663
    parent = new_internal_node(parent);
2664
    parent->set_generation(root()->generation());
2665
    parent->init_child(parent->start(), root());
2666
    mutable_root() = parent;
2667
    // If the former root was a leaf node, then it's now the rightmost node.
2668
    assert(parent->start_child()->is_internal() ||
2669
           parent->start_child() == rightmost());
2670
  }
2671
2672
  // Split the node.
2673
  node_type *split_node;
2674
  if (node->is_leaf()) {
2675
    split_node = new_leaf_node(parent);
2676
    node->split(insert_position, split_node, mutable_allocator());
2677
    if (rightmost() == node) mutable_rightmost() = split_node;
2678
  } else {
2679
    split_node = new_internal_node(parent);
2680
    node->split(insert_position, split_node, mutable_allocator());
2681
  }
2682
2683
  if (insert_position > node->finish()) {
2684
    insert_position = insert_position - node->count() - 1;
2685
    node = split_node;
2686
  }
2687
}
2688
2689
template <typename P>
2690
void btree<P>::merge_nodes(node_type *left, node_type *right) {
2691
  left->merge(right, mutable_allocator());
2692
  if (rightmost() == right) mutable_rightmost() = left;
2693
}
2694
2695
template <typename P>
2696
bool btree<P>::try_merge_or_rebalance(iterator *iter) {
2697
  node_type *parent = iter->node_->parent();
2698
  if (iter->node_->position() > parent->start()) {
2699
    // Try merging with our left sibling.
2700
    node_type *left = parent->child(iter->node_->position() - 1);
2701
    assert(left->max_count() == kNodeSlots);
2702
    if (1U + left->count() + iter->node_->count() <= kNodeSlots) {
2703
      iter->position_ += 1 + left->count();
2704
      merge_nodes(left, iter->node_);
2705
      iter->node_ = left;
2706
      return true;
2707
    }
2708
  }
2709
  if (iter->node_->position() < parent->finish()) {
2710
    // Try merging with our right sibling.
2711
    node_type *right = parent->child(iter->node_->position() + 1);
2712
    assert(right->max_count() == kNodeSlots);
2713
    if (1U + iter->node_->count() + right->count() <= kNodeSlots) {
2714
      merge_nodes(iter->node_, right);
2715
      return true;
2716
    }
2717
    // Try rebalancing with our right sibling. We don't perform rebalancing if
2718
    // we deleted the first element from iter->node_ and the node is not
2719
    // empty. This is a small optimization for the common pattern of deleting
2720
    // from the front of the tree.
2721
    if (right->count() > kMinNodeValues &&
2722
        (iter->node_->count() == 0 || iter->position_ > iter->node_->start())) {
2723
      field_type to_move = (right->count() - iter->node_->count()) / 2;
2724
      to_move =
2725
          (std::min)(to_move, static_cast<field_type>(right->count() - 1));
2726
      iter->node_->rebalance_right_to_left(to_move, right, mutable_allocator());
2727
      return false;
2728
    }
2729
  }
2730
  if (iter->node_->position() > parent->start()) {
2731
    // Try rebalancing with our left sibling. We don't perform rebalancing if
2732
    // we deleted the last element from iter->node_ and the node is not
2733
    // empty. This is a small optimization for the common pattern of deleting
2734
    // from the back of the tree.
2735
    node_type *left = parent->child(iter->node_->position() - 1);
2736
    if (left->count() > kMinNodeValues &&
2737
        (iter->node_->count() == 0 ||
2738
         iter->position_ < iter->node_->finish())) {
2739
      field_type to_move = (left->count() - iter->node_->count()) / 2;
2740
      to_move = (std::min)(to_move, static_cast<field_type>(left->count() - 1));
2741
      left->rebalance_left_to_right(to_move, iter->node_, mutable_allocator());
2742
      iter->position_ += to_move;
2743
      return false;
2744
    }
2745
  }
2746
  return false;
2747
}
2748
2749
template <typename P>
2750
void btree<P>::try_shrink() {
2751
  node_type *orig_root = root();
2752
  if (orig_root->count() > 0) {
2753
    return;
2754
  }
2755
  // Deleted the last item on the root node, shrink the height of the tree.
2756
  if (orig_root->is_leaf()) {
2757
    assert(size() == 0);
2758
    mutable_root() = mutable_rightmost() = EmptyNode();
2759
  } else {
2760
    node_type *child = orig_root->start_child();
2761
    child->make_root();
2762
    mutable_root() = child;
2763
  }
2764
  node_type::clear_and_delete(orig_root, mutable_allocator());
2765
}
2766
2767
template <typename P>
2768
template <typename IterType>
2769
inline IterType btree<P>::internal_last(IterType iter) {
2770
  assert(iter.node_ != nullptr);
2771
  while (iter.position_ == iter.node_->finish()) {
2772
    iter.position_ = iter.node_->position();
2773
    iter.node_ = iter.node_->parent();
2774
    if (iter.node_->is_leaf()) {
2775
      iter.node_ = nullptr;
2776
      break;
2777
    }
2778
  }
2779
  iter.update_generation();
2780
  return iter;
2781
}
2782
2783
template <typename P>
2784
template <typename... Args>
2785
inline auto btree<P>::internal_emplace(iterator iter, Args &&...args)
2786
    -> iterator {
2787
  if (iter.node_->is_internal()) {
2788
    // We can't insert on an internal node. Instead, we'll insert after the
2789
    // previous value which is guaranteed to be on a leaf node.
2790
    --iter;
2791
    ++iter.position_;
2792
  }
2793
  const field_type max_count = iter.node_->max_count();
2794
  allocator_type *alloc = mutable_allocator();
2795
  if (iter.node_->count() == max_count) {
2796
    // Make room in the leaf for the new item.
2797
    if (max_count < kNodeSlots) {
2798
      // Insertion into the root where the root is smaller than the full node
2799
      // size. Simply grow the size of the root node.
2800
      assert(iter.node_ == root());
2801
      iter.node_ = new_leaf_root_node(static_cast<field_type>(
2802
          (std::min)(static_cast<int>(kNodeSlots), 2 * max_count)));
2803
      // Transfer the values from the old root to the new root.
2804
      node_type *old_root = root();
2805
      node_type *new_root = iter.node_;
2806
      new_root->transfer_n(old_root->count(), new_root->start(),
2807
                           old_root->start(), old_root, alloc);
2808
      new_root->set_finish(old_root->finish());
2809
      old_root->set_finish(old_root->start());
2810
      new_root->set_generation(old_root->generation());
2811
      node_type::clear_and_delete(old_root, alloc);
2812
      mutable_root() = mutable_rightmost() = new_root;
2813
    } else {
2814
      rebalance_or_split(&iter);
2815
    }
2816
  }
2817
  iter.node_->emplace_value(static_cast<field_type>(iter.position_), alloc,
2818
                            std::forward<Args>(args)...);
2819
  ++size_;
2820
  iter.update_generation();
2821
  return iter;
2822
}
2823
2824
template <typename P>
2825
template <typename K>
2826
inline auto btree<P>::internal_locate(const K &key) const
2827
    -> SearchResult<iterator, is_key_compare_to::value> {
2828
  iterator iter(const_cast<node_type *>(root()));
2829
  for (;;) {
2830
    SearchResult<size_type, is_key_compare_to::value> res =
2831
        iter.node_->lower_bound(key, key_comp());
2832
    iter.position_ = static_cast<int>(res.value);
2833
    if (res.IsEq()) {
2834
      return {iter, MatchKind::kEq};
2835
    }
2836
    // Note: in the non-key-compare-to case, we don't need to walk all the way
2837
    // down the tree if the keys are equal, but determining equality would
2838
    // require doing an extra comparison on each node on the way down, and we
2839
    // will need to go all the way to the leaf node in the expected case.
2840
    if (iter.node_->is_leaf()) {
2841
      break;
2842
    }
2843
    iter.node_ = iter.node_->child(static_cast<field_type>(iter.position_));
2844
  }
2845
  // Note: in the non-key-compare-to case, the key may actually be equivalent
2846
  // here (and the MatchKind::kNe is ignored).
2847
  return {iter, MatchKind::kNe};
2848
}
2849
2850
template <typename P>
2851
template <typename K>
2852
auto btree<P>::internal_lower_bound(const K &key) const
2853
    -> SearchResult<iterator, is_key_compare_to::value> {
2854
  if (!params_type::template can_have_multiple_equivalent_keys<K>()) {
2855
    SearchResult<iterator, is_key_compare_to::value> ret = internal_locate(key);
2856
    ret.value = internal_last(ret.value);
2857
    return ret;
2858
  }
2859
  iterator iter(const_cast<node_type *>(root()));
2860
  SearchResult<size_type, is_key_compare_to::value> res;
2861
  bool seen_eq = false;
2862
  for (;;) {
2863
    res = iter.node_->lower_bound(key, key_comp());
2864
    iter.position_ = static_cast<int>(res.value);
2865
    if (iter.node_->is_leaf()) {
2866
      break;
2867
    }
2868
    seen_eq = seen_eq || res.IsEq();
2869
    iter.node_ = iter.node_->child(static_cast<field_type>(iter.position_));
2870
  }
2871
  if (res.IsEq()) return {iter, MatchKind::kEq};
2872
  return {internal_last(iter), seen_eq ? MatchKind::kEq : MatchKind::kNe};
2873
}
2874
2875
template <typename P>
2876
template <typename K>
2877
auto btree<P>::internal_upper_bound(const K &key) const -> iterator {
2878
  iterator iter(const_cast<node_type *>(root()));
2879
  for (;;) {
2880
    iter.position_ = static_cast<int>(iter.node_->upper_bound(key, key_comp()));
2881
    if (iter.node_->is_leaf()) {
2882
      break;
2883
    }
2884
    iter.node_ = iter.node_->child(static_cast<field_type>(iter.position_));
2885
  }
2886
  return internal_last(iter);
2887
}
2888
2889
template <typename P>
2890
template <typename K>
2891
auto btree<P>::internal_find(const K &key) const -> iterator {
2892
  SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
2893
  if (res.HasMatch()) {
2894
    if (res.IsEq()) {
2895
      return res.value;
2896
    }
2897
  } else {
2898
    const iterator iter = internal_last(res.value);
2899
    if (iter.node_ != nullptr && !compare_keys(key, iter.key())) {
2900
      return iter;
2901
    }
2902
  }
2903
  return {nullptr, 0};
2904
}
2905
2906
template <typename P>
2907
typename btree<P>::size_type btree<P>::internal_verify(
2908
    const node_type *node, const key_type *lo, const key_type *hi) const {
2909
  assert(node->count() > 0);
2910
  assert(node->count() <= node->max_count());
2911
  if (lo) {
2912
    assert(!compare_keys(node->key(node->start()), *lo));
2913
  }
2914
  if (hi) {
2915
    assert(!compare_keys(*hi, node->key(node->finish() - 1)));
2916
  }
2917
  for (int i = node->start() + 1; i < node->finish(); ++i) {
2918
    assert(!compare_keys(node->key(i), node->key(i - 1)));
2919
  }
2920
  size_type count = node->count();
2921
  if (node->is_internal()) {
2922
    for (field_type i = node->start(); i <= node->finish(); ++i) {
2923
      assert(node->child(i) != nullptr);
2924
      assert(node->child(i)->parent() == node);
2925
      assert(node->child(i)->position() == i);
2926
      count += internal_verify(node->child(i),
2927
                               i == node->start() ? lo : &node->key(i - 1),
2928
                               i == node->finish() ? hi : &node->key(i));
2929
    }
2930
  }
2931
  return count;
2932
}
2933
2934
struct btree_access {
2935
  template <typename BtreeContainer, typename Pred>
2936
  static auto erase_if(BtreeContainer &container, Pred pred) ->
2937
      typename BtreeContainer::size_type {
2938
    const auto initial_size = container.size();
2939
    auto &tree = container.tree_;
2940
    auto *alloc = tree.mutable_allocator();
2941
    for (auto it = container.begin(); it != container.end();) {
2942
      if (!pred(*it)) {
2943
        ++it;
2944
        continue;
2945
      }
2946
      auto *node = it.node_;
2947
      if (node->is_internal()) {
2948
        // Handle internal nodes normally.
2949
        it = container.erase(it);
2950
        continue;
2951
      }
2952
      // If this is a leaf node, then we do all the erases from this node
2953
      // at once before doing rebalancing.
2954
2955
      // The current position to transfer slots to.
2956
      int to_pos = it.position_;
2957
      node->value_destroy(it.position_, alloc);
2958
      while (++it.position_ < node->finish()) {
2959
        it.update_generation();
2960
        if (pred(*it)) {
2961
          node->value_destroy(it.position_, alloc);
2962
        } else {
2963
          node->transfer(node->slot(to_pos++), node->slot(it.position_), alloc);
2964
        }
2965
      }
2966
      const int num_deleted = node->finish() - to_pos;
2967
      tree.size_ -= num_deleted;
2968
      node->set_finish(to_pos);
2969
      it.position_ = to_pos;
2970
      it = tree.rebalance_after_delete(it);
2971
    }
2972
    return initial_size - container.size();
2973
  }
2974
};
2975
2976
#undef ABSL_BTREE_ENABLE_GENERATIONS
2977
2978
}  // namespace container_internal
2979
ABSL_NAMESPACE_END
2980
}  // namespace absl
2981
2982
#endif  // ABSL_CONTAINER_INTERNAL_BTREE_H_