Coverage Report

Created: 2025-12-31 06:30

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/abseil-cpp/absl/container/internal/container_memory.h
Line
Count
Source
1
// Copyright 2018 The Abseil Authors.
2
//
3
// Licensed under the Apache License, Version 2.0 (the "License");
4
// you may not use this file except in compliance with the License.
5
// You may obtain a copy of the License at
6
//
7
//      https://www.apache.org/licenses/LICENSE-2.0
8
//
9
// Unless required by applicable law or agreed to in writing, software
10
// distributed under the License is distributed on an "AS IS" BASIS,
11
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
// See the License for the specific language governing permissions and
13
// limitations under the License.
14
15
#ifndef ABSL_CONTAINER_INTERNAL_CONTAINER_MEMORY_H_
16
#define ABSL_CONTAINER_INTERNAL_CONTAINER_MEMORY_H_
17
18
#include <cassert>
19
#include <cstddef>
20
#include <cstdint>
21
#include <cstring>
22
#include <memory>
23
#include <new>
24
#include <tuple>
25
#include <type_traits>
26
#include <utility>
27
28
#include "absl/base/config.h"
29
#include "absl/hash/hash.h"
30
#include "absl/memory/memory.h"
31
#include "absl/meta/type_traits.h"
32
#include "absl/utility/utility.h"
33
34
#ifdef ABSL_HAVE_ADDRESS_SANITIZER
35
#include <sanitizer/asan_interface.h>
36
#endif
37
38
#ifdef ABSL_HAVE_MEMORY_SANITIZER
39
#include <sanitizer/msan_interface.h>
40
#endif
41
42
namespace absl {
43
ABSL_NAMESPACE_BEGIN
44
namespace container_internal {
45
46
template <size_t Alignment>
47
struct alignas(Alignment) AlignedType {
48
  // When alignment is sufficient for the allocated memory to store pointers,
49
  // include a pointer member so that swisstable backing arrays end up in the
50
  // pointer-containing partition for heap partitioning.
51
  std::conditional_t<(Alignment < alignof(void*)), char, void*> pointer;
52
};
53
54
// Allocates at least n bytes aligned to the specified alignment.
55
// Alignment must be a power of 2. It must be positive.
56
//
57
// Note that many allocators don't honor alignment requirements above certain
58
// threshold (usually either alignof(std::max_align_t) or alignof(void*)).
59
// Allocate() doesn't apply alignment corrections. If the underlying allocator
60
// returns insufficiently alignment pointer, that's what you are going to get.
61
template <size_t Alignment, class Alloc>
62
249k
void* Allocate(Alloc* alloc, size_t n) {
63
249k
  static_assert(Alignment > 0, "");
64
249k
  assert(n && "n must be positive");
65
249k
  using M = AlignedType<Alignment>;
66
249k
  using A = typename absl::allocator_traits<Alloc>::template rebind_alloc<M>;
67
249k
  using AT = typename absl::allocator_traits<Alloc>::template rebind_traits<M>;
68
  // On macOS, "mem_alloc" is a #define with one argument defined in
69
  // rpc/types.h, so we can't name the variable "mem_alloc" and initialize it
70
  // with the "foo(bar)" syntax.
71
249k
  A my_mem_alloc(*alloc);
72
249k
  void* p = AT::allocate(my_mem_alloc, (n + sizeof(M) - 1) / sizeof(M));
73
249k
  assert(reinterpret_cast<uintptr_t>(p) % Alignment == 0 &&
74
249k
         "allocator does not respect alignment");
75
249k
  return p;
76
249k
}
77
78
// Returns true if the destruction of the value with given Allocator will be
79
// trivial.
80
template <class Allocator, class ValueType>
81
constexpr auto IsDestructionTrivial() {
82
  constexpr bool result =
83
      std::is_trivially_destructible<ValueType>::value &&
84
      std::is_same<typename absl::allocator_traits<
85
                       Allocator>::template rebind_alloc<char>,
86
                   std::allocator<char>>::value;
87
  return std::integral_constant<bool, result>();
88
}
89
90
// The pointer must have been previously obtained by calling
91
// Allocate<Alignment>(alloc, n).
92
template <size_t Alignment, class Alloc>
93
249k
void Deallocate(Alloc* alloc, void* p, size_t n) {
94
249k
  static_assert(Alignment > 0, "");
95
249k
  assert(n && "n must be positive");
96
249k
  using M = AlignedType<Alignment>;
97
249k
  using A = typename absl::allocator_traits<Alloc>::template rebind_alloc<M>;
98
249k
  using AT = typename absl::allocator_traits<Alloc>::template rebind_traits<M>;
99
  // On macOS, "mem_alloc" is a #define with one argument defined in
100
  // rpc/types.h, so we can't name the variable "mem_alloc" and initialize it
101
  // with the "foo(bar)" syntax.
102
249k
  A my_mem_alloc(*alloc);
103
249k
  AT::deallocate(my_mem_alloc, static_cast<M*>(p),
104
249k
                 (n + sizeof(M) - 1) / sizeof(M));
105
249k
}
106
107
namespace memory_internal {
108
109
// Constructs T into uninitialized storage pointed by `ptr` using the args
110
// specified in the tuple.
111
template <class Alloc, class T, class Tuple, size_t... I>
112
void ConstructFromTupleImpl(Alloc* alloc, T* ptr, Tuple&& t,
113
                            absl::index_sequence<I...>) {
114
  absl::allocator_traits<Alloc>::construct(
115
      *alloc, ptr, std::get<I>(std::forward<Tuple>(t))...);
116
}
117
118
template <class T, class F>
119
struct WithConstructedImplF {
120
  template <class... Args>
121
  decltype(std::declval<F>()(std::declval<T>())) operator()(
122
      Args&&... args) const {
123
    return std::forward<F>(f)(T(std::forward<Args>(args)...));
124
  }
125
  F&& f;
126
};
127
128
template <class T, class Tuple, size_t... Is, class F>
129
decltype(std::declval<F>()(std::declval<T>())) WithConstructedImpl(
130
    Tuple&& t, absl::index_sequence<Is...>, F&& f) {
131
  return WithConstructedImplF<T, F>{std::forward<F>(f)}(
132
      std::get<Is>(std::forward<Tuple>(t))...);
133
}
134
135
template <class T, size_t... Is>
136
auto TupleRefImpl(T&& t, absl::index_sequence<Is...>)
137
    -> decltype(std::forward_as_tuple(std::get<Is>(std::forward<T>(t))...)) {
138
  return std::forward_as_tuple(std::get<Is>(std::forward<T>(t))...);
139
}
140
141
// Returns a tuple of references to the elements of the input tuple. T must be a
142
// tuple.
143
template <class T>
144
auto TupleRef(T&& t) -> decltype(TupleRefImpl(
145
    std::forward<T>(t),
146
    absl::make_index_sequence<
147
        std::tuple_size<typename std::decay<T>::type>::value>())) {
148
  return TupleRefImpl(
149
      std::forward<T>(t),
150
      absl::make_index_sequence<
151
          std::tuple_size<typename std::decay<T>::type>::value>());
152
}
153
154
template <class F, class K, class V>
155
decltype(std::declval<F>()(std::declval<const K&>(), std::piecewise_construct,
156
                           std::declval<std::tuple<K>>(), std::declval<V>()))
157
DecomposePairImpl(F&& f, std::pair<std::tuple<K>, V> p) {
158
  const auto& key = std::get<0>(p.first);
159
  return std::forward<F>(f)(key, std::piecewise_construct, std::move(p.first),
160
                            std::move(p.second));
161
}
162
163
}  // namespace memory_internal
164
165
// Constructs T into uninitialized storage pointed by `ptr` using the args
166
// specified in the tuple.
167
template <class Alloc, class T, class Tuple>
168
void ConstructFromTuple(Alloc* alloc, T* ptr, Tuple&& t) {
169
  memory_internal::ConstructFromTupleImpl(
170
      alloc, ptr, std::forward<Tuple>(t),
171
      absl::make_index_sequence<
172
          std::tuple_size<typename std::decay<Tuple>::type>::value>());
173
}
174
175
// Constructs T using the args specified in the tuple and calls F with the
176
// constructed value.
177
template <class T, class Tuple, class F>
178
decltype(std::declval<F>()(std::declval<T>())) WithConstructed(Tuple&& t,
179
                                                               F&& f) {
180
  return memory_internal::WithConstructedImpl<T>(
181
      std::forward<Tuple>(t),
182
      absl::make_index_sequence<
183
          std::tuple_size<typename std::decay<Tuple>::type>::value>(),
184
      std::forward<F>(f));
185
}
186
187
// Given arguments of an std::pair's constructor, PairArgs() returns a pair of
188
// tuples with references to the passed arguments. The tuples contain
189
// constructor arguments for the first and the second elements of the pair.
190
//
191
// The following two snippets are equivalent.
192
//
193
// 1. std::pair<F, S> p(args...);
194
//
195
// 2. auto a = PairArgs(args...);
196
//    std::pair<F, S> p(std::piecewise_construct,
197
//                      std::move(a.first), std::move(a.second));
198
0
inline std::pair<std::tuple<>, std::tuple<>> PairArgs() { return {}; }
199
template <class F, class S>
200
std::pair<std::tuple<F&&>, std::tuple<S&&>> PairArgs(F&& f, S&& s) {
201
  return {std::piecewise_construct, std::forward_as_tuple(std::forward<F>(f)),
202
          std::forward_as_tuple(std::forward<S>(s))};
203
}
204
template <class F, class S>
205
std::pair<std::tuple<const F&>, std::tuple<const S&>> PairArgs(
206
    const std::pair<F, S>& p) {
207
  return PairArgs(p.first, p.second);
208
}
209
template <class F, class S>
210
std::pair<std::tuple<F&&>, std::tuple<S&&>> PairArgs(std::pair<F, S>&& p) {
211
  return PairArgs(std::forward<F>(p.first), std::forward<S>(p.second));
212
}
213
template <class F, class S>
214
auto PairArgs(std::piecewise_construct_t, F&& f, S&& s)
215
    -> decltype(std::make_pair(memory_internal::TupleRef(std::forward<F>(f)),
216
                               memory_internal::TupleRef(std::forward<S>(s)))) {
217
  return std::make_pair(memory_internal::TupleRef(std::forward<F>(f)),
218
                        memory_internal::TupleRef(std::forward<S>(s)));
219
}
220
221
// A helper function for implementing apply() in map policies.
222
template <class F, class... Args>
223
auto DecomposePair(F&& f, Args&&... args)
224
    -> decltype(memory_internal::DecomposePairImpl(
225
        std::forward<F>(f), PairArgs(std::forward<Args>(args)...))) {
226
  return memory_internal::DecomposePairImpl(
227
      std::forward<F>(f), PairArgs(std::forward<Args>(args)...));
228
}
229
230
// A helper function for implementing apply() in set policies.
231
template <class F, class Arg>
232
decltype(std::declval<F>()(std::declval<const Arg&>(), std::declval<Arg>()))
233
DecomposeValue(F&& f, Arg&& arg) {
234
  const auto& key = arg;
235
  return std::forward<F>(f)(key, std::forward<Arg>(arg));
236
}
237
238
// Helper functions for asan and msan.
239
595k
inline void SanitizerPoisonMemoryRegion(const void* m, size_t s) {
240
#ifdef ABSL_HAVE_ADDRESS_SANITIZER
241
  ASAN_POISON_MEMORY_REGION(m, s);
242
#endif
243
#ifdef ABSL_HAVE_MEMORY_SANITIZER
244
  __msan_poison(m, s);
245
#endif
246
595k
  (void)m;
247
595k
  (void)s;
248
595k
}
249
250
10.4M
inline void SanitizerUnpoisonMemoryRegion(const void* m, size_t s) {
251
#ifdef ABSL_HAVE_ADDRESS_SANITIZER
252
  ASAN_UNPOISON_MEMORY_REGION(m, s);
253
#endif
254
#ifdef ABSL_HAVE_MEMORY_SANITIZER
255
  __msan_unpoison(m, s);
256
#endif
257
10.4M
  (void)m;
258
10.4M
  (void)s;
259
10.4M
}
260
261
template <typename T>
262
inline void SanitizerPoisonObject(const T* object) {
263
  SanitizerPoisonMemoryRegion(object, sizeof(T));
264
}
265
266
template <typename T>
267
inline void SanitizerUnpoisonObject(const T* object) {
268
  SanitizerUnpoisonMemoryRegion(object, sizeof(T));
269
}
270
271
namespace memory_internal {
272
273
// If Pair is a standard-layout type, OffsetOf<Pair>::kFirst and
274
// OffsetOf<Pair>::kSecond are equivalent to offsetof(Pair, first) and
275
// offsetof(Pair, second) respectively. Otherwise they are -1.
276
//
277
// The purpose of OffsetOf is to avoid calling offsetof() on non-standard-layout
278
// type, which is non-portable.
279
template <class Pair, class = std::true_type>
280
struct OffsetOf {
281
  static constexpr size_t kFirst = static_cast<size_t>(-1);
282
  static constexpr size_t kSecond = static_cast<size_t>(-1);
283
};
284
285
template <class Pair>
286
struct OffsetOf<Pair, typename std::is_standard_layout<Pair>::type> {
287
  static constexpr size_t kFirst = offsetof(Pair, first);
288
  static constexpr size_t kSecond = offsetof(Pair, second);
289
};
290
291
template <class K, class V>
292
struct IsLayoutCompatible {
293
 private:
294
  struct Pair {
295
    K first;
296
    V second;
297
  };
298
299
  // Is P layout-compatible with Pair?
300
  template <class P>
301
  static constexpr bool LayoutCompatible() {
302
    return std::is_standard_layout<P>() && sizeof(P) == sizeof(Pair) &&
303
           alignof(P) == alignof(Pair) &&
304
           memory_internal::OffsetOf<P>::kFirst ==
305
               memory_internal::OffsetOf<Pair>::kFirst &&
306
           memory_internal::OffsetOf<P>::kSecond ==
307
               memory_internal::OffsetOf<Pair>::kSecond;
308
  }
309
310
 public:
311
  // Whether pair<const K, V> and pair<K, V> are layout-compatible. If they are,
312
  // then it is safe to store them in a union and read from either.
313
  static constexpr bool value = std::is_standard_layout<K>() &&
314
                                std::is_standard_layout<Pair>() &&
315
                                memory_internal::OffsetOf<Pair>::kFirst == 0 &&
316
                                LayoutCompatible<std::pair<K, V>>() &&
317
                                LayoutCompatible<std::pair<const K, V>>();
318
};
319
320
}  // namespace memory_internal
321
322
// The internal storage type for key-value containers like flat_hash_map.
323
//
324
// It is convenient for the value_type of a flat_hash_map<K, V> to be
325
// pair<const K, V>; the "const K" prevents accidental modification of the key
326
// when dealing with the reference returned from find() and similar methods.
327
// However, this creates other problems; we want to be able to emplace(K, V)
328
// efficiently with move operations, and similarly be able to move a
329
// pair<K, V> in insert().
330
//
331
// The solution is this union, which aliases the const and non-const versions
332
// of the pair. This also allows flat_hash_map<const K, V> to work, even though
333
// that has the same efficiency issues with move in emplace() and insert() -
334
// but people do it anyway.
335
//
336
// If kMutableKeys is false, only the value member can be accessed.
337
//
338
// If kMutableKeys is true, key can be accessed through all slots while value
339
// and mutable_value must be accessed only via INITIALIZED slots. Slots are
340
// created and destroyed via mutable_value so that the key can be moved later.
341
//
342
// Accessing one of the union fields while the other is active is safe as
343
// long as they are layout-compatible, which is guaranteed by the definition of
344
// kMutableKeys. For C++11, the relevant section of the standard is
345
// https://timsong-cpp.github.io/cppwp/n3337/class.mem#19 (9.2.19)
346
template <class K, class V>
347
union map_slot_type {
348
  map_slot_type() {}
349
  ~map_slot_type() = delete;
350
  using value_type = std::pair<const K, V>;
351
  using mutable_value_type =
352
      std::pair<absl::remove_const_t<K>, absl::remove_const_t<V>>;
353
354
  value_type value;
355
  mutable_value_type mutable_value;
356
  absl::remove_const_t<K> key;
357
};
358
359
template <class K, class V>
360
struct map_slot_policy {
361
  using slot_type = map_slot_type<K, V>;
362
  using value_type = std::pair<const K, V>;
363
  using mutable_value_type =
364
      std::pair<absl::remove_const_t<K>, absl::remove_const_t<V>>;
365
366
 private:
367
  static void emplace(slot_type* slot) {
368
    // The construction of union doesn't do anything at runtime but it allows us
369
    // to access its members without violating aliasing rules.
370
    new (slot) slot_type;
371
  }
372
  // If pair<const K, V> and pair<K, V> are layout-compatible, we can accept one
373
  // or the other via slot_type. We are also free to access the key via
374
  // slot_type::key in this case.
375
  using kMutableKeys = memory_internal::IsLayoutCompatible<K, V>;
376
377
 public:
378
  static value_type& element(slot_type* slot) { return slot->value; }
379
  static const value_type& element(const slot_type* slot) {
380
    return slot->value;
381
  }
382
383
  static K& mutable_key(slot_type* slot) {
384
    // Still check for kMutableKeys so that we can avoid calling std::launder
385
    // unless necessary because it can interfere with optimizations.
386
    return kMutableKeys::value ? slot->key
387
                               : *std::launder(const_cast<K*>(
388
                                     std::addressof(slot->value.first)));
389
  }
390
391
  static const K& key(const slot_type* slot) {
392
    return kMutableKeys::value ? slot->key : slot->value.first;
393
  }
394
395
  template <class Allocator, class... Args>
396
  static void construct(Allocator* alloc, slot_type* slot, Args&&... args) {
397
    emplace(slot);
398
    if (kMutableKeys::value) {
399
      absl::allocator_traits<Allocator>::construct(*alloc, &slot->mutable_value,
400
                                                   std::forward<Args>(args)...);
401
    } else {
402
      absl::allocator_traits<Allocator>::construct(*alloc, &slot->value,
403
                                                   std::forward<Args>(args)...);
404
    }
405
  }
406
407
  // Construct this slot by moving from another slot.
408
  template <class Allocator>
409
  static void construct(Allocator* alloc, slot_type* slot, slot_type* other) {
410
    emplace(slot);
411
    if (kMutableKeys::value) {
412
      absl::allocator_traits<Allocator>::construct(
413
          *alloc, &slot->mutable_value, std::move(other->mutable_value));
414
    } else {
415
      absl::allocator_traits<Allocator>::construct(*alloc, &slot->value,
416
                                                   std::move(other->value));
417
    }
418
  }
419
420
  // Construct this slot by copying from another slot.
421
  template <class Allocator>
422
  static void construct(Allocator* alloc, slot_type* slot,
423
                        const slot_type* other) {
424
    emplace(slot);
425
    absl::allocator_traits<Allocator>::construct(*alloc, &slot->value,
426
                                                 other->value);
427
  }
428
429
  template <class Allocator>
430
  static auto destroy(Allocator* alloc, slot_type* slot) {
431
    if (kMutableKeys::value) {
432
      absl::allocator_traits<Allocator>::destroy(*alloc, &slot->mutable_value);
433
    } else {
434
      absl::allocator_traits<Allocator>::destroy(*alloc, &slot->value);
435
    }
436
    return IsDestructionTrivial<Allocator, value_type>();
437
  }
438
439
  template <class Allocator>
440
  static auto transfer(Allocator* alloc, slot_type* new_slot,
441
                       slot_type* old_slot) {
442
    // This should really just be
443
    // typename absl::is_trivially_relocatable<value_type>::type()
444
    // but std::pair is not trivially copyable in C++23 in some standard
445
    // library versions.
446
    // See https://github.com/llvm/llvm-project/pull/95444 for instance.
447
    auto is_relocatable = typename std::conjunction<
448
        absl::is_trivially_relocatable<typename value_type::first_type>,
449
        absl::is_trivially_relocatable<typename value_type::second_type>>::
450
        type();
451
452
    emplace(new_slot);
453
    if (is_relocatable) {
454
      // TODO(b/247130232,b/251814870): remove casts after fixing warnings.
455
      std::memcpy(static_cast<void*>(std::launder(&new_slot->value)),
456
                  static_cast<const void*>(&old_slot->value),
457
                  sizeof(value_type));
458
      return is_relocatable;
459
    }
460
461
    if (kMutableKeys::value) {
462
      absl::allocator_traits<Allocator>::construct(
463
          *alloc, &new_slot->mutable_value, std::move(old_slot->mutable_value));
464
    } else {
465
      absl::allocator_traits<Allocator>::construct(*alloc, &new_slot->value,
466
                                                   std::move(old_slot->value));
467
    }
468
    destroy(alloc, old_slot);
469
    return is_relocatable;
470
  }
471
};
472
473
// Suppress erroneous uninitialized memory errors on GCC. For example, GCC
474
// thinks that the call to slot_array() in find_or_prepare_insert() is reading
475
// uninitialized memory, but slot_array is only called there when the table is
476
// non-empty and this memory is initialized when the table is non-empty.
477
#if !defined(__clang__) && defined(__GNUC__)
478
#define ABSL_SWISSTABLE_IGNORE_UNINITIALIZED(x)                    \
479
  _Pragma("GCC diagnostic push")                                   \
480
      _Pragma("GCC diagnostic ignored \"-Wmaybe-uninitialized\"")  \
481
          _Pragma("GCC diagnostic ignored \"-Wuninitialized\"") x; \
482
  _Pragma("GCC diagnostic pop")
483
#define ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(x) \
484
  ABSL_SWISSTABLE_IGNORE_UNINITIALIZED(return x)
485
#else
486
#define ABSL_SWISSTABLE_IGNORE_UNINITIALIZED(x) x
487
112M
#define ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(x) return x
488
#endif
489
490
// Variadic arguments hash function that ignore the rest of the arguments.
491
// Useful for usage with policy traits.
492
template <class Hash, bool kIsDefault>
493
struct HashElement {
494
  HashElement(const Hash& h, size_t s) : hash(h), seed(s) {}
495
496
  template <class K, class... Args>
497
  size_t operator()(const K& key, Args&&...) const {
498
    if constexpr (kIsDefault) {
499
      // TODO(b/384509507): resolve `no header providing
500
      // "absl::hash_internal::SupportsHashWithSeed" is directly included`.
501
      // Maybe we should make "internal/hash.h" be a separate library.
502
      return absl::hash_internal::HashWithSeed().hash(hash, key, seed);
503
    }
504
    // NOLINTNEXTLINE(clang-diagnostic-sign-conversion)
505
    return hash(key) ^ seed;
506
  }
507
  const Hash& hash;
508
  size_t seed;
509
};
510
511
// No arguments function hash function for a specific key.
512
template <class Hash, class Key, bool kIsDefault>
513
struct HashKey {
514
  HashKey(const Hash& h, const Key& k) : hash(h), key(k) {}
515
516
  size_t operator()(size_t seed) const {
517
    return HashElement<Hash, kIsDefault>{hash, seed}(key);
518
  }
519
  const Hash& hash;
520
  const Key& key;
521
};
522
523
// Variadic arguments equality function that ignore the rest of the arguments.
524
// Useful for usage with policy traits.
525
template <class K1, class KeyEqual>
526
struct EqualElement {
527
  template <class K2, class... Args>
528
  bool operator()(const K2& lhs, Args&&...) const {
529
    ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(eq(lhs, rhs));
530
  }
531
  const K1& rhs;
532
  const KeyEqual& eq;
533
};
534
535
// Type erased function for computing hash of the slot.
536
using HashSlotFn = size_t (*)(const void* hash_fn, void* slot, size_t seed);
537
538
// Type erased function to apply `Fn` to data inside of the `slot`.
539
// The data is expected to have type `T`.
540
template <class Fn, class T, bool kIsDefault>
541
size_t TypeErasedApplyToSlotFn(const void* fn, void* slot, size_t seed) {
542
  const auto* f = static_cast<const Fn*>(fn);
543
  return HashElement<Fn, kIsDefault>{*f, seed}(*static_cast<const T*>(slot));
544
}
545
546
// Type erased function to apply `Fn` to data inside of the `*slot_ptr`.
547
// The data is expected to have type `T`.
548
template <class Fn, class T, bool kIsDefault>
549
size_t TypeErasedDerefAndApplyToSlotFn(const void* fn, void* slot_ptr,
550
                                       size_t seed) {
551
  const auto* f = static_cast<const Fn*>(fn);
552
  const T* slot = *static_cast<const T**>(slot_ptr);
553
  return HashElement<Fn, kIsDefault>{*f, seed}(*slot);
554
}
555
556
}  // namespace container_internal
557
ABSL_NAMESPACE_END
558
}  // namespace absl
559
560
#endif  // ABSL_CONTAINER_INTERNAL_CONTAINER_MEMORY_H_