Coverage Report

Created: 2025-04-27 06:20

/src/LPM/external.protobuf/include/google/protobuf/port.h
Line
Count
Source (jump to first uncovered line)
1
// Protocol Buffers - Google's data interchange format
2
// Copyright 2008 Google Inc.  All rights reserved.
3
//
4
// Use of this source code is governed by a BSD-style
5
// license that can be found in the LICENSE file or at
6
// https://developers.google.com/open-source/licenses/bsd
7
8
// A common header that is included across all protobuf headers.  We do our best
9
// to avoid #defining any macros here; instead we generally put macros in
10
// port_def.inc and port_undef.inc so they are not visible from outside of
11
// protobuf.
12
13
#ifndef GOOGLE_PROTOBUF_PORT_H__
14
#define GOOGLE_PROTOBUF_PORT_H__
15
16
#include <atomic>
17
#include <cassert>
18
#include <cstddef>
19
#include <cstdint>
20
#include <new>
21
#include <string>
22
#include <type_traits>
23
#include <typeinfo>
24
25
26
#include "absl/base/config.h"
27
#include "absl/base/prefetch.h"
28
#include "absl/meta/type_traits.h"
29
#include "absl/strings/string_view.h"
30
#include "absl/types/optional.h"
31
32
// must be last
33
#include "google/protobuf/port_def.inc"
34
35
36
namespace google {
37
namespace protobuf {
38
39
class MessageLite;
40
41
namespace internal {
42
43
struct MessageTraitsImpl;
44
45
template <typename T>
46
inline PROTOBUF_ALWAYS_INLINE void StrongPointer(T* var) {
47
#if defined(__GNUC__)
48
  asm("" : : "r"(var));
49
#else
50
  auto volatile unused = var;
51
  (void)&unused;  // Use address to avoid an extra load of "unused".
52
#endif
53
}
54
55
#if defined(__x86_64__) && defined(__linux__) && !defined(__APPLE__) && \
56
    !defined(__ANDROID__) && defined(__clang__) && __clang_major__ >= 19
57
// Optimized implementation for clang where we can generate a relocation without
58
// adding runtime instructions.
59
template <typename T, T ptr>
60
inline PROTOBUF_ALWAYS_INLINE void StrongPointer() {
61
  // This injects a relocation in the code path without having to run code, but
62
  // we can only do it with a newer clang.
63
  asm(".reloc ., BFD_RELOC_NONE, %p0" ::"Ws"(ptr));
64
}
65
66
template <typename T, typename TraitsImpl = MessageTraitsImpl>
67
inline PROTOBUF_ALWAYS_INLINE void StrongReferenceToType() {
68
  static constexpr auto ptr =
69
      decltype(TraitsImpl::template value<T>)::StrongPointer();
70
  // This is identical to the implementation of StrongPointer() above, but it
71
  // has to be explicitly inlined here or else Clang 19 will raise an error in
72
  // some configurations.
73
  asm(".reloc ., BFD_RELOC_NONE, %p0" ::"Ws"(ptr));
74
}
75
#else   // .reloc
76
// Portable fallback. It usually generates a single LEA instruction or
77
// equivalent.
78
template <typename T, T ptr>
79
inline PROTOBUF_ALWAYS_INLINE void StrongPointer() {
80
  StrongPointer(ptr);
81
}
82
83
template <typename T, typename TraitsImpl = MessageTraitsImpl>
84
inline PROTOBUF_ALWAYS_INLINE void StrongReferenceToType() {
85
  return StrongPointer(
86
      decltype(TraitsImpl::template value<T>)::StrongPointer());
87
}
88
#endif  // .reloc
89
90
91
// See comments on `AllocateAtLeast` for information on size returning new.
92
struct SizedPtr {
93
  void* p;
94
  size_t n;
95
};
96
97
// Debug hook allowing setting up test scenarios for AllocateAtLeast usage.
98
using AllocateAtLeastHookFn = SizedPtr (*)(size_t, void*);
99
100
// `AllocAtLeastHook` API
101
constexpr bool HaveAllocateAtLeastHook();
102
void SetAllocateAtLeastHook(AllocateAtLeastHookFn fn, void* context = nullptr);
103
104
#if !defined(NDEBUG) && defined(ABSL_HAVE_THREAD_LOCAL) && \
105
    defined(__cpp_inline_variables)
106
107
// Hook data for current thread. These vars must not be accessed directly, use
108
// the 'HaveAllocateAtLeastHook()` and `SetAllocateAtLeastHook()` API instead.
109
inline thread_local AllocateAtLeastHookFn allocate_at_least_hook = nullptr;
110
inline thread_local void* allocate_at_least_hook_context = nullptr;
111
112
constexpr bool HaveAllocateAtLeastHook() { return true; }
113
inline void SetAllocateAtLeastHook(AllocateAtLeastHookFn fn, void* context) {
114
  allocate_at_least_hook = fn;
115
  allocate_at_least_hook_context = context;
116
}
117
118
#else  // !NDEBUG && ABSL_HAVE_THREAD_LOCAL && __cpp_inline_variables
119
120
0
constexpr bool HaveAllocateAtLeastHook() { return false; }
121
0
inline void SetAllocateAtLeastHook(AllocateAtLeastHookFn fn, void* context) {}
122
123
#endif  // !NDEBUG && ABSL_HAVE_THREAD_LOCAL && __cpp_inline_variables
124
125
// Allocates at least `size` bytes. This function follows the c++ language
126
// proposal from D0901R10 (http://wg21.link/D0901R10) and will be implemented
127
// in terms of the new operator new semantics when available. The allocated
128
// memory should be released by a call to `SizedDelete` or `::operator delete`.
129
0
inline SizedPtr AllocateAtLeast(size_t size) {
130
0
#if !defined(NDEBUG) && defined(ABSL_HAVE_THREAD_LOCAL) && \
131
0
    defined(__cpp_inline_variables)
132
0
  if (allocate_at_least_hook != nullptr) {
133
0
    return allocate_at_least_hook(size, allocate_at_least_hook_context);
134
0
  }
135
0
#endif  // !NDEBUG && ABSL_HAVE_THREAD_LOCAL && __cpp_inline_variables
136
0
  return {::operator new(size), size};
137
0
}
138
139
0
inline void SizedDelete(void* p, size_t size) {
140
0
#if defined(__cpp_sized_deallocation)
141
0
  ::operator delete(p, size);
142
0
#else
143
0
  // Avoid -Wunused-parameter
144
0
  (void)size;
145
0
  ::operator delete(p);
146
0
#endif
147
0
}
148
0
inline void SizedArrayDelete(void* p, size_t size) {
149
0
#if defined(__cpp_sized_deallocation)
150
0
  ::operator delete[](p, size);
151
0
#else
152
0
  // Avoid -Wunused-parameter
153
0
  (void)size;
154
0
  ::operator delete[](p);
155
0
#endif
156
0
}
157
158
// Tag type used to invoke the constinit constructor overload of classes
159
// such as ArenaStringPtr and MapFieldBase. Such constructors are internal
160
// implementation details of the library.
161
struct ConstantInitialized {
162
  explicit ConstantInitialized() = default;
163
};
164
165
// Tag type used to invoke the arena constructor overload of classes such
166
// as ExtensionSet and MapFieldLite in aggregate initialization. These
167
// classes typically don't have move/copy constructors, which rules out
168
// explicit initialization in pre-C++17.
169
struct ArenaInitialized {
170
  explicit ArenaInitialized() = default;
171
};
172
173
template <typename To, typename From>
174
void AssertDownCast(From* from) {
175
  static_assert(std::is_base_of<From, To>::value, "illegal DownCast");
176
177
#if defined(__cpp_concepts)
178
  // Check that this function is not used to downcast message types.
179
  // For those we should use {Down,Dynamic}CastTo{Message,Generated}.
180
  static_assert(!requires {
181
    std::derived_from<std::remove_pointer_t<To>,
182
                      typename std::remove_pointer_t<To>::MessageLite>;
183
  });
184
#endif
185
186
#if PROTOBUF_RTTI
187
  // RTTI: debug mode only!
188
  assert(from == nullptr || dynamic_cast<To*>(from) != nullptr);
189
#endif
190
}
191
192
template <typename To, typename From>
193
inline To DownCast(From* f) {
194
  AssertDownCast<std::remove_pointer_t<To>>(f);
195
  return static_cast<To>(f);
196
}
197
198
template <typename ToRef, typename From>
199
inline ToRef DownCast(From& f) {
200
  AssertDownCast<std::remove_reference_t<ToRef>>(&f);
201
  return static_cast<ToRef>(f);
202
}
203
204
// Looks up the name of `T` via RTTI, if RTTI is available.
205
template <typename T>
206
inline absl::optional<absl::string_view> RttiTypeName() {
207
#if PROTOBUF_RTTI
208
  return typeid(T).name();
209
#else
210
  return absl::nullopt;
211
#endif
212
}
213
214
// Helpers for identifying our supported types.
215
template <typename T>
216
struct is_supported_integral_type
217
    : absl::disjunction<std::is_same<T, int32_t>, std::is_same<T, uint32_t>,
218
                        std::is_same<T, int64_t>, std::is_same<T, uint64_t>,
219
                        std::is_same<T, bool>> {};
220
221
template <typename T>
222
struct is_supported_floating_point_type
223
    : absl::disjunction<std::is_same<T, float>, std::is_same<T, double>> {};
224
225
template <typename T>
226
struct is_supported_string_type
227
    : absl::disjunction<std::is_same<T, std::string>> {};
228
229
template <typename T>
230
struct is_supported_scalar_type
231
    : absl::disjunction<is_supported_integral_type<T>,
232
                        is_supported_floating_point_type<T>,
233
                        is_supported_string_type<T>> {};
234
235
template <typename T>
236
struct is_supported_message_type
237
    : absl::disjunction<std::is_base_of<MessageLite, T>> {
238
  static constexpr auto force_complete_type = sizeof(T);
239
};
240
241
// To prevent sharing cache lines between threads
242
#ifdef __cpp_aligned_new
243
enum { kCacheAlignment = 64 };
244
#else
245
enum { kCacheAlignment = alignof(max_align_t) };  // do the best we can
246
#endif
247
248
// The maximum byte alignment we support.
249
enum { kMaxMessageAlignment = 8 };
250
251
// Returns true if debug hardening for clearing oneof message on arenas is
252
// enabled.
253
0
inline constexpr bool DebugHardenClearOneofMessageOnArena() {
254
0
#ifdef NDEBUG
255
0
  return false;
256
0
#else
257
0
  return true;
258
0
#endif
259
0
}
260
261
0
constexpr bool PerformDebugChecks() {
262
0
#if defined(NDEBUG) && !defined(PROTOBUF_ASAN) && !defined(PROTOBUF_MSAN) && \
263
0
    !defined(PROTOBUF_TSAN)
264
0
  return false;
265
0
#else
266
0
  return true;
267
0
#endif
268
0
}
269
270
// Force copy the default string to a string field so that non-optimized builds
271
// have harder-to-rely-on address stability.
272
0
constexpr bool DebugHardenForceCopyDefaultString() {
273
0
  return false;
274
0
}
275
276
0
constexpr bool DebugHardenForceCopyInRelease() {
277
0
  return false;
278
0
}
279
280
0
constexpr bool DebugHardenForceCopyInSwap() {
281
0
  return false;
282
0
}
283
284
0
constexpr bool DebugHardenForceCopyInMove() {
285
0
  return false;
286
0
}
287
288
0
constexpr bool DebugHardenForceAllocationOnConstruction() {
289
0
  return false;
290
0
}
291
292
0
constexpr bool DebugHardenFuzzMessageSpaceUsedLong() {
293
0
  return false;
294
0
}
295
296
// Returns true if pointers are 8B aligned, leaving least significant 3 bits
297
// available.
298
0
inline constexpr bool PtrIsAtLeast8BAligned() { return alignof(void*) >= 8; }
299
300
0
inline constexpr bool IsLazyParsingSupported() {
301
0
  // We need 3 bits for pointer tagging in lazy parsing.
302
0
  return PtrIsAtLeast8BAligned();
303
0
}
304
305
// Prefetch 5 64-byte cache line starting from 7 cache-lines ahead.
306
// Constants are somewhat arbitrary and pretty aggressive, but were
307
// chosen to give a better benchmark results. E.g. this is ~20%
308
// faster, single cache line prefetch is ~12% faster, increasing
309
// decreasing distance makes results 2-4% worse. Important note,
310
// prefetch doesn't require a valid address, so it is ok to prefetch
311
// past the end of message/valid memory, however we are doing this
312
// inside inline asm block, since computing the invalid pointer
313
// is a potential UB. Only insert prefetch once per function,
314
0
inline PROTOBUF_ALWAYS_INLINE void Prefetch5LinesFrom7Lines(const void* ptr) {
315
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 448);
316
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 512);
317
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 576);
318
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 640);
319
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 704);
320
0
}
321
322
// Prefetch 5 64-byte cache lines starting from 1 cache-line ahead.
323
0
inline PROTOBUF_ALWAYS_INLINE void Prefetch5LinesFrom1Line(const void* ptr) {
324
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 64);
325
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 128);
326
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 192);
327
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 256);
328
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 320);
329
0
}
330
331
#if defined(NDEBUG) && ABSL_HAVE_BUILTIN(__builtin_unreachable)
332
[[noreturn]] ABSL_ATTRIBUTE_COLD PROTOBUF_ALWAYS_INLINE inline void
333
0
Unreachable() {
334
0
  __builtin_unreachable();
335
0
}
336
#elif ABSL_HAVE_BUILTIN(__builtin_FILE) && ABSL_HAVE_BUILTIN(__builtin_LINE)
337
[[noreturn]] ABSL_ATTRIBUTE_COLD inline void Unreachable(
338
    const char* file = __builtin_FILE(), int line = __builtin_LINE()) {
339
  protobuf_assumption_failed("Unreachable", file, line);
340
}
341
#else
342
[[noreturn]] ABSL_ATTRIBUTE_COLD inline void Unreachable() {
343
  protobuf_assumption_failed("Unreachable", "", 0);
344
}
345
#endif
346
347
#ifdef PROTOBUF_TSAN
348
// TODO: it would be preferable to use __tsan_external_read/
349
// __tsan_external_write, but they can cause dlopen issues.
350
template <typename T>
351
inline PROTOBUF_ALWAYS_INLINE void TSanRead(const T* impl) {
352
  char protobuf_tsan_dummy =
353
      *reinterpret_cast<const char*>(&impl->_tsan_detect_race);
354
  asm volatile("" : "+r"(protobuf_tsan_dummy));
355
}
356
357
// We currently use a dedicated member for TSan checking so the value of this
358
// member is not important. We can unconditionally write to it without affecting
359
// correctness of the rest of the class.
360
template <typename T>
361
inline PROTOBUF_ALWAYS_INLINE void TSanWrite(T* impl) {
362
  *reinterpret_cast<char*>(&impl->_tsan_detect_race) = 0;
363
}
364
#else
365
204
inline PROTOBUF_ALWAYS_INLINE void TSanRead(const void*) {}
366
102
inline PROTOBUF_ALWAYS_INLINE void TSanWrite(const void*) {}
367
#endif
368
369
// This trampoline allows calling from codegen without needing a #include to
370
// absl. It simplifies IWYU and deps.
371
1.32k
inline void PrefetchToLocalCache(const void* ptr) {
372
1.32k
  absl::PrefetchToLocalCache(ptr);
373
1.32k
}
374
375
template <typename T>
376
0
constexpr T* Launder(T* p) {
377
0
#if defined(__cpp_lib_launder) && __cpp_lib_launder >= 201606L
378
0
  return std::launder(p);
379
0
#elif ABSL_HAVE_BUILTIN(__builtin_launder)
380
0
  return __builtin_launder(p);
381
0
#else
382
0
  return p;
383
0
#endif
384
0
}
385
386
#if defined(PROTOBUF_CUSTOM_VTABLE)
387
constexpr bool EnableCustomNew() { return true; }
388
template <typename T>
389
constexpr bool EnableCustomNewFor() {
390
  return true;
391
}
392
#elif ABSL_HAVE_BUILTIN(__is_bitwise_cloneable)
393
constexpr bool EnableCustomNew() { return true; }
394
template <typename T>
395
constexpr bool EnableCustomNewFor() {
396
  return __is_bitwise_cloneable(T);
397
}
398
#else
399
0
constexpr bool EnableCustomNew() { return false; }
400
template <typename T>
401
0
constexpr bool EnableCustomNewFor() {
402
0
  return false;
403
0
}
404
#endif
405
406
0
constexpr bool IsOss() { return true; }
407
408
// Counter library for debugging internal protobuf logic.
409
// It allows instrumenting code that has different options (eg fast vs slow
410
// path) to get visibility into how much we are hitting each path.
411
// When compiled with -DPROTOBUF_INTERNAL_ENABLE_DEBUG_COUNTERS, the counters
412
// register an atexit handler to dump the table. Otherwise, they are a noop and
413
// have not runtime cost.
414
//
415
// Usage:
416
//
417
// if (do_fast) {
418
//   PROTOBUF_DEBUG_COUNTER("Foo.Fast").Inc();
419
//   ...
420
// } else {
421
//   PROTOBUF_DEBUG_COUNTER("Foo.Slow").Inc();
422
//   ...
423
// }
424
class PROTOBUF_EXPORT RealDebugCounter {
425
 public:
426
0
  explicit RealDebugCounter(absl::string_view name) { Register(name); }
427
  // Lossy increment.
428
0
  void Inc() { counter_.store(value() + 1, std::memory_order_relaxed); }
429
0
  size_t value() const { return counter_.load(std::memory_order_relaxed); }
430
431
 private:
432
  void Register(absl::string_view name);
433
  std::atomic<size_t> counter_{};
434
};
435
436
// When the feature is not enabled, the type is a noop.
437
class NoopDebugCounter {
438
 public:
439
  explicit constexpr NoopDebugCounter() = default;
440
0
  constexpr void Inc() {}
441
};
442
443
}  // namespace internal
444
}  // namespace protobuf
445
}  // namespace google
446
447
#include "google/protobuf/port_undef.inc"
448
449
#endif  // GOOGLE_PROTOBUF_PORT_H__