Coverage Report

Created: 2025-12-12 07:27

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/hermes/include/hermes/VM/AlignedHeapSegment.h
Line
Count
Source
1
/*
2
 * Copyright (c) Meta Platforms, Inc. and affiliates.
3
 *
4
 * This source code is licensed under the MIT license found in the
5
 * LICENSE file in the root directory of this source tree.
6
 */
7
8
#ifndef HERMES_VM_SEGMENT_H
9
#define HERMES_VM_SEGMENT_H
10
11
#include "hermes/Support/OSCompat.h"
12
#include "hermes/VM/AdviseUnused.h"
13
#include "hermes/VM/AlignedStorage.h"
14
#include "hermes/VM/AllocResult.h"
15
#include "hermes/VM/AllocSource.h"
16
#include "hermes/VM/CardTableNC.h"
17
#include "hermes/VM/GCBase.h"
18
#include "hermes/VM/GCCell.h"
19
#include "hermes/VM/HeapAlign.h"
20
#include "hermes/VM/MarkBitArrayNC.h"
21
#include "hermes/VM/PointerBase.h"
22
#include "hermes/VM/SegmentInfo.h"
23
24
#include "llvh/Support/MathExtras.h"
25
26
#include <cstdint>
27
#include <vector>
28
29
namespace hermes {
30
namespace vm {
31
32
// In this class:
33
// TODO (T25527350): Debug Dump
34
// TODO (T25527350): Heap Moving
35
36
/// An \c AlignedHeapSegment is a contiguous chunk of memory aligned to its own
37
/// storage size (which is a fixed power of two number of bytes).  The storage
38
/// is further split up according to the diagram below:
39
///
40
/// +----------------------------------------+
41
/// | (1) Card Table                         |
42
/// +----------------------------------------+
43
/// | (2) Mark Bit Array                     |
44
/// +----------------------------------------+
45
/// | (3) Allocation Space                   |
46
/// |                                        |
47
/// | ...                                    |
48
/// |                                        |
49
/// | (End)                                  |
50
/// +----------------------------------------+
51
///
52
/// The tables in (1), and (2) cover the contiguous allocation space (3)
53
/// into which GCCells are bump allocated.
54
class AlignedHeapSegment {
55
 public:
56
  explicit AlignedHeapSegment(AlignedStorage storage);
57
58
  /// Construct a null AlignedHeapSegment (one that does not own memory).
59
113
  AlignedHeapSegment() = default;
60
791
  AlignedHeapSegment(AlignedHeapSegment &&) = default;
61
226
  AlignedHeapSegment &operator=(AlignedHeapSegment &&) = default;
62
63
  ~AlignedHeapSegment();
64
65
  /// Contents of the memory region managed by this segment.
66
  class Contents {
67
    friend class AlignedHeapSegment;
68
69
    /// Note that because of the Contents object, the first few bytes of the
70
    /// card table are unused, we instead use them to store a small SegmentInfo
71
    /// struct.
72
    CardTable cardTable_;
73
74
    MarkBitArrayNC markBitArray_;
75
76
    static constexpr size_t kMetadataSize =
77
        sizeof(cardTable_) + sizeof(markBitArray_);
78
    /// Padding to ensure that the guard page is aligned to a page boundary.
79
    static constexpr size_t kGuardPagePadding =
80
        llvh::alignTo<pagesize::kExpectedPageSize>(kMetadataSize) -
81
        kMetadataSize;
82
83
    /// Memory made inaccessible through protectGuardPage, for security and
84
    /// earlier detection of corruption. Padded to contain at least one full
85
    /// aligned page.
86
    char paddedGuardPage_[pagesize::kExpectedPageSize + kGuardPagePadding];
87
88
    static constexpr size_t kMetadataAndGuardSize =
89
        kMetadataSize + sizeof(paddedGuardPage_);
90
91
    /// The first byte of the allocation region, which extends past the "end" of
92
    /// the struct, to the end of the memory region that contains it.
93
    char allocRegion_[1];
94
95
   public:
96
    /// Set the protection mode of paddedGuardPage_ (if system page size allows
97
    /// it).
98
    void protectGuardPage(oscompat::ProtectMode mode);
99
  };
100
101
  static_assert(
102
      offsetof(Contents, paddedGuardPage_) == Contents::kMetadataSize,
103
      "Should not need padding after metadata.");
104
105
  /// The total space at the start of the CardTable taken up by the metadata and
106
  /// guard page in the Contents struct.
107
  static constexpr size_t kCardTableUnusedPrefixBytes =
108
      Contents::kMetadataAndGuardSize / CardTable::kHeapBytesPerCardByte;
109
  static_assert(
110
      sizeof(SegmentInfo) < kCardTableUnusedPrefixBytes,
111
      "SegmentInfo does not fit in available unused CardTable space.");
112
113
  /// The offset from the beginning of a segment of the allocatable region.
114
  static constexpr size_t offsetOfAllocRegion{offsetof(Contents, allocRegion_)};
115
116
  static_assert(
117
      isSizeHeapAligned(offsetOfAllocRegion),
118
      "Allocation region must start at a heap aligned offset");
119
120
  static_assert(
121
      (offsetof(Contents, paddedGuardPage_) + Contents::kGuardPagePadding) %
122
              pagesize::kExpectedPageSize ==
123
          0,
124
      "Guard page must be aligned to likely page size");
125
126
  class HeapCellIterator : public llvh::iterator_facade_base<
127
                               HeapCellIterator,
128
                               std::forward_iterator_tag,
129
                               GCCell *> {
130
   public:
131
1.06k
    HeapCellIterator(GCCell *cell) : cell_(cell) {}
132
133
5.44M
    bool operator==(const HeapCellIterator &R) const {
134
5.44M
      return cell_ == R.cell_;
135
5.44M
    }
136
137
5.44M
    GCCell *const &operator*() const {
138
5.44M
      return cell_;
139
5.44M
    }
140
141
5.44M
    HeapCellIterator &operator++() {
142
5.44M
      cell_ = cell_->nextCell();
143
5.44M
      return *this;
144
5.44M
    }
145
146
   private:
147
    GCCell *cell_{nullptr};
148
  };
149
150
  /// Attempt an allocation of the given size in the segment.  If there is
151
  /// sufficent space, cast the space as a GCCell, and returns an uninitialized
152
  /// pointer to that cell (with success = true).  If there is not sufficient
153
  /// space, returns {nullptr, false}.
154
  inline AllocResult alloc(uint32_t size);
155
156
  /// Given the \p lowLim of some valid AlignedStorage's memory region, returns
157
  /// a pointer to the AlignedHeapSegment::Contents laid out in that storage,
158
  /// assuming it exists.
159
  inline static Contents *contents(void *lowLim);
160
  inline static const Contents *contents(const void *lowLim);
161
162
  /// Given a \p ptr into the memory region of some valid AlignedStorage \c s,
163
  /// returns a pointer to the CardTable covering the segment containing the
164
  /// pointer.
165
  ///
166
  /// \pre There exists a currently alive heap that claims to contain \c ptr.
167
  inline static CardTable *cardTableCovering(const void *ptr);
168
169
  /// Given a \p ptr into the memory region of some valid AlignedStorage \c s,
170
  /// returns a pointer to the MarkBitArrayNC covering the segment containing
171
  /// the pointer.
172
  ///
173
  /// \pre There exists a currently alive heap that claims to contain \c ptr.
174
  inline static MarkBitArrayNC *markBitArrayCovering(const void *ptr);
175
176
  /// Mark the given \p cell.  Assumes the given address is a valid heap object.
177
  inline static void setCellMarkBit(const GCCell *cell);
178
179
  /// Return whether the given \p cell is marked.  Assumes the given address is
180
  /// a valid heap object.
181
  inline static bool getCellMarkBit(const GCCell *cell);
182
183
  /// The largest size the allocation region of an aligned heap segment could
184
  /// be.
185
  inline static constexpr size_t maxSize();
186
187
  /// The size of the allocation region in this aligned heap segment. (Static
188
  /// override of \c AlignedStorage::size()).
189
  inline size_t size() const;
190
191
  /// The number of bytes in the segment that are currently allocated.
192
  inline size_t used() const;
193
194
  /// The number of bytes in the segment that are available for allocation.
195
  inline size_t available() const;
196
197
  /// Returns the address that is the lower bound of the segment.
198
  /// \post The returned pointer is guaranteed to be aligned to a segment
199
  ///   boundary.
200
  inline char *lowLim() const;
201
202
  /// Returns the address that is the upper bound of the segment.
203
  inline char *hiLim() const;
204
205
  /// Returns the address at which the first allocation in this segment would
206
  /// occur.
207
  /// Disable UB sanitization because 'this' may be null during the tests.
208
  inline char *start() const LLVM_NO_SANITIZE("undefined");
209
210
  /// Returns the first address after the region in which allocations can occur,
211
  /// taking external memory credits into a account (they decrease the effective
212
  /// end).
213
  inline char *effectiveEnd() const;
214
215
  /// The external memory charge of the generation owning this segment may have
216
  /// changed; set the effective end of the segment to the given \p
217
  /// effectiveEnd, which is required to be a valid effective end for the
218
  /// segment.
219
  void setEffectiveEnd(char *effectiveEnd);
220
221
  /// Clear any external memory charge for this segment.  This has the effect of
222
  /// equating the effective end to the real end.
223
  void clearExternalMemoryCharge();
224
225
  /// Returns the first address after the region in which allocations may occur,
226
  /// ignoring external memory credits.
227
  inline char *end() const;
228
229
  /// Returns the address at which the next allocation, if any, will occur.
230
  inline char *level() const;
231
232
  /// Returns an iterator range corresponding to the cells in this segment.
233
  inline llvh::iterator_range<HeapCellIterator> cells();
234
235
  /// Returns whether \p a and \p b are contained in the same
236
  /// AlignedHeapSegment.
237
  inline static bool containedInSame(const void *a, const void *b);
238
239
  /// Return a reference to the card table covering the memory region managed by
240
  /// this segment.
241
  /// Disable sanitization because 'this' may be null in the tests.
242
  inline CardTable &cardTable() const LLVM_NO_SANITIZE("null");
243
244
  /// Return a reference to the mark bit array covering the memory region
245
  /// managed by this segment.
246
  inline MarkBitArrayNC &markBitArray() const;
247
248
  explicit inline operator bool() const;
249
250
  /// \return \c true if and only if \p ptr is within the memory range owned by
251
  ///     this \c AlignedHeapSegment.
252
  inline bool contains(const void *ptr) const;
253
254
  /// Mark a set of pages as unused.
255
  ///
256
  /// \pre start and end must be aligned to the page boundary.
257
  void markUnused(char *start, char *end);
258
259
  /// Clear allocations after \p level in this segment.
260
  ///
261
  /// \p MU Indicate whether the newly freed pages should be returned to the OS.
262
  ///
263
  /// \post this->level() == level.
264
  template <AdviseUnused MU = AdviseUnused::No>
265
  void setLevel(char *level);
266
267
  /// Clear allocations in this segment.
268
  ///
269
  /// \p MU Indicate whether the newly freed pages should be returned to the OS.
270
  ///
271
  /// \post this->level() == this->start();
272
  template <AdviseUnused MU = AdviseUnused::No>
273
  void resetLevel();
274
275
#ifndef NDEBUG
276
  /// Returns true iff \p lvl could refer to a level within this segment.
277
  bool dbgContainsLevel(const void *lvl) const;
278
279
  /// Returns true iff \p p is located within a valid section of the segment,
280
  /// and not at dead memory.
281
  bool validPointer(const void *p) const;
282
283
  /// Set the contents of the segment to a dead value.
284
  void clear();
285
  /// Set the given range [start, end) to a dead value.
286
  static void clear(char *start, char *end);
287
  /// Checks that dead values are present in the [start, end) range.
288
  static void checkUnwritten(char *start, char *end);
289
#endif
290
291
 protected:
292
  /// Return a pointer to the contents of the memory region managed by this
293
  /// segment.
294
  inline Contents *contents() const;
295
296
  AlignedStorage storage_;
297
298
  char *level_{start()};
299
300
  /// The upper limit of the space that we can currently allocated into;
301
  /// this may be decreased when externally allocated memory is credited to
302
  /// the generation owning this space.
303
  char *effectiveEnd_{end()};
304
};
305
306
3.02M
AllocResult AlignedHeapSegment::alloc(uint32_t size) {
307
3.02M
  assert(lowLim() != nullptr && "Cannot allocate in a null segment");
308
3.02M
  assert(size >= sizeof(GCCell) && "cell must be larger than GCCell");
309
3.02M
  assert(isSizeHeapAligned(size) && "size must be heap aligned");
310
311
3.02M
  char *cellPtr; // Initialized in the if below.
312
313
  // On 64-bit systems, we know that we can't allocate a size large enough to
314
  // cause a pointer value to overflow. This is not true on 32-bit systems. This
315
  // test should be decided statically.
316
  // TODO: Is there a portable way of expressing this in the preprocessor?
317
3.02M
  if (sizeof(void *) == 8) {
318
    // Calculate the new level_ once.
319
3.02M
    char *newLevel = level_ + size;
320
3.02M
    if (LLVM_UNLIKELY(newLevel > effectiveEnd())) {
321
50
      return {nullptr, false};
322
50
    }
323
3.02M
    cellPtr = level_;
324
3.02M
    level_ = newLevel;
325
3.02M
  } else {
326
0
    if (LLVM_UNLIKELY(available() < size)) {
327
0
      return {nullptr, false};
328
0
    }
329
0
    cellPtr = level_;
330
0
    level_ += size;
331
0
  }
332
333
3.02M
  __asan_unpoison_memory_region(cellPtr, size);
334
3.02M
#ifndef NDEBUG
335
3.02M
  checkUnwritten(cellPtr, cellPtr + size);
336
3.02M
#endif
337
338
3.02M
  auto *cell = reinterpret_cast<GCCell *>(cellPtr);
339
3.02M
  return {cell, true};
340
3.02M
}
341
342
/*static*/
343
341k
MarkBitArrayNC *AlignedHeapSegment::markBitArrayCovering(const void *ptr) {
344
341k
  return &contents(AlignedStorage::start(ptr))->markBitArray_;
345
341k
}
346
347
/*static*/
348
260k
void AlignedHeapSegment::setCellMarkBit(const GCCell *cell) {
349
260k
  MarkBitArrayNC *markBits = markBitArrayCovering(cell);
350
260k
  size_t ind = markBits->addressToIndex(cell);
351
260k
  markBits->mark(ind);
352
260k
}
353
354
/*static*/
355
80.9k
bool AlignedHeapSegment::getCellMarkBit(const GCCell *cell) {
356
80.9k
  MarkBitArrayNC *markBits = markBitArrayCovering(cell);
357
80.9k
  size_t ind = markBits->addressToIndex(cell);
358
80.9k
  return markBits->at(ind);
359
80.9k
}
360
361
/* static */ AlignedHeapSegment::Contents *AlignedHeapSegment::contents(
362
2.19M
    void *lowLim) {
363
2.19M
  return reinterpret_cast<Contents *>(lowLim);
364
2.19M
}
365
366
/* static */ const AlignedHeapSegment::Contents *AlignedHeapSegment::contents(
367
0
    const void *lowLim) {
368
0
  return reinterpret_cast<const Contents *>(lowLim);
369
0
}
370
371
1.84M
/* static */ CardTable *AlignedHeapSegment::cardTableCovering(const void *ptr) {
372
1.84M
  return &AlignedHeapSegment::contents(AlignedStorage::start(ptr))->cardTable_;
373
1.84M
}
374
375
603k
/* static */ constexpr size_t AlignedHeapSegment::maxSize() {
376
603k
  return AlignedStorage::size() - offsetof(Contents, allocRegion_);
377
603k
}
378
379
0
size_t AlignedHeapSegment::size() const {
380
0
  return end() - start();
381
0
}
382
383
217
size_t AlignedHeapSegment::used() const {
384
217
  return level() - start();
385
217
}
386
387
1.29k
size_t AlignedHeapSegment::available() const {
388
1.29k
  return effectiveEnd() - level();
389
1.29k
}
390
391
31.1M
char *AlignedHeapSegment::lowLim() const {
392
31.1M
  return storage_.lowLim();
393
31.1M
}
394
395
226
char *AlignedHeapSegment::hiLim() const {
396
226
  return storage_.hiLim();
397
226
}
398
399
5.10k
char *AlignedHeapSegment::start() const {
400
5.10k
  return contents()->allocRegion_;
401
5.10k
}
402
403
3.02M
char *AlignedHeapSegment::effectiveEnd() const {
404
3.02M
  return effectiveEnd_;
405
3.02M
}
406
407
2.12k
char *AlignedHeapSegment::end() const {
408
2.12k
  return start() + maxSize();
409
2.12k
}
410
411
3.59k
char *AlignedHeapSegment::level() const {
412
3.59k
  return level_;
413
3.59k
}
414
415
llvh::iterator_range<AlignedHeapSegment::HeapCellIterator>
416
532
AlignedHeapSegment::cells() {
417
532
  return {
418
532
      HeapCellIterator(reinterpret_cast<GCCell *>(start())),
419
532
      HeapCellIterator(reinterpret_cast<GCCell *>(level()))};
420
532
}
421
422
/* static */
423
0
bool AlignedHeapSegment::containedInSame(const void *a, const void *b) {
424
0
  return AlignedStorage::containedInSame(a, b);
425
0
}
426
427
293
CardTable &AlignedHeapSegment::cardTable() const {
428
293
  return contents()->cardTable_;
429
293
}
430
431
435
MarkBitArrayNC &AlignedHeapSegment::markBitArray() const {
432
435
  return contents()->markBitArray_;
433
435
}
434
435
6.73k
AlignedHeapSegment::Contents *AlignedHeapSegment::contents() const {
436
6.73k
  return contents(lowLim());
437
6.73k
}
438
439
558
AlignedHeapSegment::operator bool() const {
440
558
  return static_cast<bool>(storage_);
441
558
}
442
443
7.18M
bool AlignedHeapSegment::contains(const void *ptr) const {
444
7.18M
  return storage_.contains(ptr);
445
7.18M
}
446
447
} // namespace vm
448
} // namespace hermes
449
450
#endif // HERMES_VM_SEGMENT_H