Coverage Report

Created: 2024-07-27 06:53

/src/LPM/external.protobuf/include/google/protobuf/port.h
Line
Count
Source (jump to first uncovered line)
1
// Protocol Buffers - Google's data interchange format
2
// Copyright 2008 Google Inc.  All rights reserved.
3
//
4
// Use of this source code is governed by a BSD-style
5
// license that can be found in the LICENSE file or at
6
// https://developers.google.com/open-source/licenses/bsd
7
8
// A common header that is included across all protobuf headers.  We do our best
9
// to avoid #defining any macros here; instead we generally put macros in
10
// port_def.inc and port_undef.inc so they are not visible from outside of
11
// protobuf.
12
13
#ifndef GOOGLE_PROTOBUF_PORT_H__
14
#define GOOGLE_PROTOBUF_PORT_H__
15
16
#include <cassert>
17
#include <cstddef>
18
#include <cstdint>
19
#include <new>
20
#include <string>
21
#include <type_traits>
22
#include <typeinfo>
23
24
25
#include "absl/base/config.h"
26
#include "absl/base/prefetch.h"
27
#include "absl/meta/type_traits.h"
28
#include "absl/strings/string_view.h"
29
#include "absl/types/optional.h"
30
31
// must be last
32
#include "google/protobuf/port_def.inc"
33
34
35
namespace google {
36
namespace protobuf {
37
38
class MessageLite;
39
40
namespace internal {
41
42
template <typename T>
43
4.51k
inline PROTOBUF_ALWAYS_INLINE void StrongPointer(T* var) {
44
4.51k
#if defined(__GNUC__)
45
4.51k
  asm("" : : "r"(var));
46
#else
47
  auto volatile unused = var;
48
  (void)&unused;  // Use address to avoid an extra load of "unused".
49
#endif
50
4.51k
}
51
52
// Similar to the overload above, but optimized for constant inputs.
53
template <typename T, T ptr>
54
4.51k
inline PROTOBUF_ALWAYS_INLINE void StrongPointer() {
55
#if defined(__x86_64__) && defined(__linux__) && !defined(__APPLE__) &&     \
56
    !defined(__ANDROID__) && defined(__clang__) && __clang_major__ >= 19 && \
57
    !defined(PROTOBUF_INTERNAL_TEMPORARY_STRONG_POINTER_OPT_OUT)
58
  // This injects a relocation in the code path without having to run code, but
59
  // we can only do it with a newer clang.
60
  asm(".reloc ., BFD_RELOC_NONE, %p0" ::"Ws"(ptr));
61
#else
62
4.51k
  StrongPointer(ptr);
63
4.51k
#endif
64
4.51k
}
65
66
template <typename T>
67
4.51k
inline PROTOBUF_ALWAYS_INLINE void StrongReferenceToType() {
68
4.51k
  constexpr auto ptr = T::template GetStrongPointerForType<T>();
69
4.51k
#if defined(__cpp_nontype_template_args) && \
70
4.51k
    __cpp_nontype_template_args >= 201411L
71
  // We can only use `ptr` as a template parameter since C++17
72
4.51k
  return StrongPointer<decltype(ptr), ptr>();
73
#else
74
  return StrongPointer(ptr);
75
#endif
76
4.51k
}
77
78
79
// See comments on `AllocateAtLeast` for information on size returning new.
80
struct SizedPtr {
81
  void* p;
82
  size_t n;
83
};
84
85
// Debug hook allowing setting up test scenarios for AllocateAtLeast usage.
86
using AllocateAtLeastHookFn = SizedPtr (*)(size_t, void*);
87
88
// `AllocAtLeastHook` API
89
constexpr bool HaveAllocateAtLeastHook();
90
void SetAllocateAtLeastHook(AllocateAtLeastHookFn fn, void* context = nullptr);
91
92
#if !defined(NDEBUG) && defined(ABSL_HAVE_THREAD_LOCAL) && \
93
    defined(__cpp_inline_variables)
94
95
// Hook data for current thread. These vars must not be accessed directly, use
96
// the 'HaveAllocateAtLeastHook()` and `SetAllocateAtLeastHook()` API instead.
97
inline thread_local AllocateAtLeastHookFn allocate_at_least_hook = nullptr;
98
inline thread_local void* allocate_at_least_hook_context = nullptr;
99
100
constexpr bool HaveAllocateAtLeastHook() { return true; }
101
inline void SetAllocateAtLeastHook(AllocateAtLeastHookFn fn, void* context) {
102
  allocate_at_least_hook = fn;
103
  allocate_at_least_hook_context = context;
104
}
105
106
#else  // !NDEBUG && ABSL_HAVE_THREAD_LOCAL && __cpp_inline_variables
107
108
0
constexpr bool HaveAllocateAtLeastHook() { return false; }
109
0
inline void SetAllocateAtLeastHook(AllocateAtLeastHookFn fn, void* context) {}
110
111
#endif  // !NDEBUG && ABSL_HAVE_THREAD_LOCAL && __cpp_inline_variables
112
113
// Allocates at least `size` bytes. This function follows the c++ language
114
// proposal from D0901R10 (http://wg21.link/D0901R10) and will be implemented
115
// in terms of the new operator new semantics when available. The allocated
116
// memory should be released by a call to `SizedDelete` or `::operator delete`.
117
0
inline SizedPtr AllocateAtLeast(size_t size) {
118
0
#if !defined(NDEBUG) && defined(ABSL_HAVE_THREAD_LOCAL) && \
119
0
    defined(__cpp_inline_variables)
120
0
  if (allocate_at_least_hook != nullptr) {
121
0
    return allocate_at_least_hook(size, allocate_at_least_hook_context);
122
0
  }
123
0
#endif  // !NDEBUG && ABSL_HAVE_THREAD_LOCAL && __cpp_inline_variables
124
0
  return {::operator new(size), size};
125
0
}
126
127
0
inline void SizedDelete(void* p, size_t size) {
128
0
#if defined(__cpp_sized_deallocation)
129
0
  ::operator delete(p, size);
130
0
#else
131
0
  // Avoid -Wunused-parameter
132
0
  (void)size;
133
0
  ::operator delete(p);
134
0
#endif
135
0
}
136
0
inline void SizedArrayDelete(void* p, size_t size) {
137
0
#if defined(__cpp_sized_deallocation)
138
0
  ::operator delete[](p, size);
139
0
#else
140
0
  // Avoid -Wunused-parameter
141
0
  (void)size;
142
0
  ::operator delete[](p);
143
0
#endif
144
0
}
145
146
// Tag type used to invoke the constinit constructor overload of classes
147
// such as ArenaStringPtr and MapFieldBase. Such constructors are internal
148
// implementation details of the library.
149
struct ConstantInitialized {
150
  explicit ConstantInitialized() = default;
151
};
152
153
// Tag type used to invoke the arena constructor overload of classes such
154
// as ExtensionSet and MapFieldLite in aggregate initialization. These
155
// classes typically don't have move/copy constructors, which rules out
156
// explicit initialization in pre-C++17.
157
struct ArenaInitialized {
158
  explicit ArenaInitialized() = default;
159
};
160
161
template <typename To, typename From>
162
inline To DownCast(From* f) {
163
  static_assert(
164
      std::is_base_of<From, typename std::remove_pointer<To>::type>::value,
165
      "illegal DownCast");
166
167
#if PROTOBUF_RTTI
168
  // RTTI: debug mode only!
169
  assert(f == nullptr || dynamic_cast<To>(f) != nullptr);
170
#endif
171
  return static_cast<To>(f);
172
}
173
174
template <typename ToRef, typename From>
175
inline ToRef DownCast(From& f) {
176
  using To = typename std::remove_reference<ToRef>::type;
177
  static_assert(std::is_base_of<From, To>::value, "illegal DownCast");
178
179
#if PROTOBUF_RTTI
180
  // RTTI: debug mode only!
181
  assert(dynamic_cast<To*>(&f) != nullptr);
182
#endif
183
  return *static_cast<To*>(&f);
184
}
185
186
// Looks up the name of `T` via RTTI, if RTTI is available.
187
template <typename T>
188
inline absl::optional<absl::string_view> RttiTypeName() {
189
#if PROTOBUF_RTTI
190
  return typeid(T).name();
191
#else
192
  return absl::nullopt;
193
#endif
194
}
195
196
// Helpers for identifying our supported types.
197
template <typename T>
198
struct is_supported_integral_type
199
    : absl::disjunction<std::is_same<T, int32_t>, std::is_same<T, uint32_t>,
200
                        std::is_same<T, int64_t>, std::is_same<T, uint64_t>,
201
                        std::is_same<T, bool>> {};
202
203
template <typename T>
204
struct is_supported_floating_point_type
205
    : absl::disjunction<std::is_same<T, float>, std::is_same<T, double>> {};
206
207
template <typename T>
208
struct is_supported_string_type
209
    : absl::disjunction<std::is_same<T, std::string>> {};
210
211
template <typename T>
212
struct is_supported_scalar_type
213
    : absl::disjunction<is_supported_integral_type<T>,
214
                        is_supported_floating_point_type<T>,
215
                        is_supported_string_type<T>> {};
216
217
template <typename T>
218
struct is_supported_message_type
219
    : absl::disjunction<std::is_base_of<MessageLite, T>> {
220
  static constexpr auto force_complete_type = sizeof(T);
221
};
222
223
// To prevent sharing cache lines between threads
224
#ifdef __cpp_aligned_new
225
enum { kCacheAlignment = 64 };
226
#else
227
enum { kCacheAlignment = alignof(max_align_t) };  // do the best we can
228
#endif
229
230
// The maximum byte alignment we support.
231
enum { kMaxMessageAlignment = 8 };
232
233
// Returns true if debug string hardening is required
234
0
inline constexpr bool DebugHardenStringValues() {
235
0
#ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
236
0
  return true;
237
0
#else
238
0
  return false;
239
0
#endif
240
0
}
241
242
// Returns true if debug hardening for clearing oneof message on arenas is
243
// enabled.
244
0
inline constexpr bool DebugHardenClearOneofMessageOnArena() {
245
0
#ifdef NDEBUG
246
0
  return false;
247
0
#else
248
0
  return true;
249
0
#endif
250
0
}
251
252
// Returns true if pointers are 8B aligned, leaving least significant 3 bits
253
// available.
254
0
inline constexpr bool PtrIsAtLeast8BAligned() { return alignof(void*) >= 8; }
255
256
// Prefetch 5 64-byte cache line starting from 7 cache-lines ahead.
257
// Constants are somewhat arbitrary and pretty aggressive, but were
258
// chosen to give a better benchmark results. E.g. this is ~20%
259
// faster, single cache line prefetch is ~12% faster, increasing
260
// decreasing distance makes results 2-4% worse. Important note,
261
// prefetch doesn't require a valid address, so it is ok to prefetch
262
// past the end of message/valid memory, however we are doing this
263
// inside inline asm block, since computing the invalid pointer
264
// is a potential UB. Only insert prefetch once per function,
265
0
inline PROTOBUF_ALWAYS_INLINE void Prefetch5LinesFrom7Lines(const void* ptr) {
266
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 448);
267
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 512);
268
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 576);
269
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 640);
270
0
  PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 704);
271
0
}
272
273
#if defined(NDEBUG) && ABSL_HAVE_BUILTIN(__builtin_unreachable)
274
[[noreturn]] ABSL_ATTRIBUTE_COLD PROTOBUF_ALWAYS_INLINE inline void
275
0
Unreachable() {
276
0
  __builtin_unreachable();
277
0
}
278
#elif ABSL_HAVE_BUILTIN(__builtin_FILE) && ABSL_HAVE_BUILTIN(__builtin_LINE)
279
[[noreturn]] ABSL_ATTRIBUTE_COLD inline void Unreachable(
280
    const char* file = __builtin_FILE(), int line = __builtin_LINE()) {
281
  protobuf_assumption_failed("Unreachable", file, line);
282
}
283
#else
284
[[noreturn]] ABSL_ATTRIBUTE_COLD inline void Unreachable() {
285
  protobuf_assumption_failed("Unreachable", "", 0);
286
}
287
#endif
288
289
#ifdef PROTOBUF_TSAN
290
// TODO: it would be preferable to use __tsan_external_read/
291
// __tsan_external_write, but they can cause dlopen issues.
292
template <typename T>
293
inline PROTOBUF_ALWAYS_INLINE void TSanRead(const T* impl) {
294
  char protobuf_tsan_dummy =
295
      *reinterpret_cast<const char*>(&impl->_tsan_detect_race);
296
  asm volatile("" : "+r"(protobuf_tsan_dummy));
297
}
298
299
// We currently use a dedicated member for TSan checking so the value of this
300
// member is not important. We can unconditionally write to it without affecting
301
// correctness of the rest of the class.
302
template <typename T>
303
inline PROTOBUF_ALWAYS_INLINE void TSanWrite(T* impl) {
304
  *reinterpret_cast<char*>(&impl->_tsan_detect_race) = 0;
305
}
306
#else
307
9.24M
inline PROTOBUF_ALWAYS_INLINE void TSanRead(const void*) {}
308
23.6k
inline PROTOBUF_ALWAYS_INLINE void TSanWrite(const void*) {}
309
#endif
310
311
// This trampoline allows calling from codegen without needing a #include to
312
// absl. It simplifies IWYU and deps.
313
52.0M
inline void PrefetchToLocalCache(const void* ptr) {
314
52.0M
  absl::PrefetchToLocalCache(ptr);
315
52.0M
}
316
317
}  // namespace internal
318
}  // namespace protobuf
319
}  // namespace google
320
321
#include "google/protobuf/port_undef.inc"
322
323
#endif  // GOOGLE_PROTOBUF_PORT_H__