LCOV - code coverage report
Current view: top level - src/heap - mark-compact.h (source / functions) Hit Total Coverage
Test: app.info Lines: 81 103 78.6 %
Date: 2019-04-17 Functions: 22 39 56.4 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #ifndef V8_HEAP_MARK_COMPACT_H_
       6             : #define V8_HEAP_MARK_COMPACT_H_
       7             : 
       8             : #include <atomic>
       9             : #include <vector>
      10             : 
      11             : #include "src/heap/concurrent-marking.h"
      12             : #include "src/heap/marking.h"
      13             : #include "src/heap/objects-visiting.h"
      14             : #include "src/heap/spaces.h"
      15             : #include "src/heap/sweeper.h"
      16             : #include "src/heap/worklist.h"
      17             : #include "src/objects/heap-object.h"   // For Worklist<HeapObject, ...>
      18             : #include "src/objects/js-weak-refs.h"  // For Worklist<WeakCell, ...>
      19             : 
      20             : namespace v8 {
      21             : namespace internal {
      22             : 
      23             : // Forward declarations.
      24             : class EvacuationJobTraits;
      25             : class HeapObjectVisitor;
      26             : class ItemParallelJob;
      27             : class MigrationObserver;
      28             : class RecordMigratedSlotVisitor;
      29             : class UpdatingItem;
      30             : class YoungGenerationMarkingVisitor;
      31             : 
      32             : template <typename ConcreteState, AccessMode access_mode>
      33             : class MarkingStateBase {
      34             :  public:
      35             :   V8_INLINE MarkBit MarkBitFrom(HeapObject obj) {
      36     1972740 :     return MarkBitFrom(MemoryChunk::FromHeapObject(obj), obj->ptr());
      37             :   }
      38             : 
      39             :   // {addr} may be tagged or aligned.
      40             :   V8_INLINE MarkBit MarkBitFrom(MemoryChunk* p, Address addr) {
      41     1972740 :     return static_cast<ConcreteState*>(this)->bitmap(p)->MarkBitFromIndex(
      42    11004857 :         p->AddressToMarkbitIndex(addr));
      43             :   }
      44             : 
      45           0 :   Marking::ObjectColor Color(HeapObject obj) {
      46           0 :     return Marking::Color(MarkBitFrom(obj));
      47             :   }
      48             : 
      49             :   V8_INLINE bool IsImpossible(HeapObject obj) {
      50             :     return Marking::IsImpossible<access_mode>(MarkBitFrom(obj));
      51             :   }
      52             : 
      53             :   V8_INLINE bool IsBlack(HeapObject obj) {
      54             :     return Marking::IsBlack<access_mode>(MarkBitFrom(obj));
      55             :   }
      56             : 
      57             :   V8_INLINE bool IsWhite(HeapObject obj) {
      58             :     return Marking::IsWhite<access_mode>(MarkBitFrom(obj));
      59             :   }
      60             : 
      61             :   V8_INLINE bool IsGrey(HeapObject obj) {
      62             :     return Marking::IsGrey<access_mode>(MarkBitFrom(obj));
      63             :   }
      64             : 
      65             :   V8_INLINE bool IsBlackOrGrey(HeapObject obj) {
      66             :     return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj));
      67             :   }
      68             : 
      69             :   V8_INLINE bool WhiteToGrey(HeapObject obj);
      70             :   V8_INLINE bool WhiteToBlack(HeapObject obj);
      71             :   V8_INLINE bool GreyToBlack(HeapObject obj);
      72             : 
      73           0 :   void ClearLiveness(MemoryChunk* chunk) {
      74             :     static_cast<ConcreteState*>(this)->bitmap(chunk)->Clear();
      75             :     static_cast<ConcreteState*>(this)->SetLiveBytes(chunk, 0);
      76           0 :   }
      77             : };
      78             : 
      79             : class MarkBitCellIterator {
      80             :  public:
      81     1125395 :   MarkBitCellIterator(MemoryChunk* chunk, Bitmap* bitmap) : chunk_(chunk) {
      82             :     last_cell_index_ =
      83     1125395 :         Bitmap::IndexToCell(chunk_->AddressToMarkbitIndex(chunk_->area_end()));
      84     1125395 :     cell_base_ = chunk_->address();
      85             :     cell_index_ =
      86     1125395 :         Bitmap::IndexToCell(chunk_->AddressToMarkbitIndex(cell_base_));
      87     1125395 :     cells_ = bitmap->cells();
      88             :   }
      89             : 
      90             :   inline bool Done() { return cell_index_ >= last_cell_index_; }
      91             : 
      92             :   inline bool HasNext() { return cell_index_ < last_cell_index_ - 1; }
      93             : 
      94             :   inline MarkBit::CellType* CurrentCell() {
      95             :     DCHECK_EQ(cell_index_, Bitmap::IndexToCell(Bitmap::CellAlignIndex(
      96             :                                chunk_->AddressToMarkbitIndex(cell_base_))));
      97   377970826 :     return &cells_[cell_index_];
      98             :   }
      99             : 
     100             :   inline Address CurrentCellBase() {
     101             :     DCHECK_EQ(cell_index_, Bitmap::IndexToCell(Bitmap::CellAlignIndex(
     102             :                                chunk_->AddressToMarkbitIndex(cell_base_))));
     103             :     return cell_base_;
     104             :   }
     105             : 
     106             :   V8_WARN_UNUSED_RESULT inline bool Advance() {
     107   287605780 :     cell_base_ += Bitmap::kBitsPerCell * kTaggedSize;
     108   287605780 :     return ++cell_index_ != last_cell_index_;
     109             :   }
     110             : 
     111             :   inline bool Advance(unsigned int new_cell_index) {
     112   580831173 :     if (new_cell_index != cell_index_) {
     113             :       DCHECK_GT(new_cell_index, cell_index_);
     114             :       DCHECK_LE(new_cell_index, last_cell_index_);
     115    90176181 :       unsigned int diff = new_cell_index - cell_index_;
     116    91302275 :       cell_index_ = new_cell_index;
     117    91302275 :       cell_base_ += diff * (Bitmap::kBitsPerCell * kTaggedSize);
     118             :       return true;
     119             :     }
     120             :     return false;
     121             :   }
     122             : 
     123             :   // Return the next mark bit cell. If there is no next it returns 0;
     124             :   inline MarkBit::CellType PeekNext() {
     125             :     if (HasNext()) {
     126             :       return cells_[cell_index_ + 1];
     127             :     }
     128             :     return 0;
     129             :   }
     130             : 
     131             :  private:
     132             :   MemoryChunk* chunk_;
     133             :   MarkBit::CellType* cells_;
     134             :   unsigned int last_cell_index_;
     135             :   unsigned int cell_index_;
     136             :   Address cell_base_;
     137             : };
     138             : 
     139             : enum LiveObjectIterationMode {
     140             :   kBlackObjects,
     141             :   kGreyObjects,
     142             :   kAllLiveObjects
     143             : };
     144             : 
     145             : template <LiveObjectIterationMode mode>
     146             : class LiveObjectRange {
     147             :  public:
     148             :   class iterator {
     149             :    public:
     150             :     using value_type = std::pair<HeapObject, int /* size */>;
     151             :     using pointer = const value_type*;
     152             :     using reference = const value_type&;
     153             :     using iterator_category = std::forward_iterator_tag;
     154             : 
     155             :     inline iterator(MemoryChunk* chunk, Bitmap* bitmap, Address start);
     156             : 
     157             :     inline iterator& operator++();
     158             :     inline iterator operator++(int);
     159             : 
     160             :     bool operator==(iterator other) const {
     161             :       return current_object_ == other.current_object_;
     162             :     }
     163             : 
     164             :     bool operator!=(iterator other) const { return !(*this == other); }
     165             : 
     166             :     value_type operator*() {
     167             :       return std::make_pair(current_object_, current_size_);
     168             :     }
     169             : 
     170             :    private:
     171             :     inline void AdvanceToNextValidObject();
     172             : 
     173             :     MemoryChunk* const chunk_;
     174             :     Map const one_word_filler_map_;
     175             :     Map const two_word_filler_map_;
     176             :     Map const free_space_map_;
     177             :     MarkBitCellIterator it_;
     178             :     Address cell_base_;
     179             :     MarkBit::CellType current_cell_;
     180             :     HeapObject current_object_;
     181             :     int current_size_;
     182             :   };
     183             : 
     184             :   LiveObjectRange(MemoryChunk* chunk, Bitmap* bitmap)
     185             :       : chunk_(chunk),
     186             :         bitmap_(bitmap),
     187             :         start_(chunk_->area_start()),
     188             :         end_(chunk->area_end()) {
     189             :     DCHECK(!chunk->IsLargePage());
     190             :   }
     191             : 
     192             :   inline iterator begin();
     193             :   inline iterator end();
     194             : 
     195             :  private:
     196             :   MemoryChunk* const chunk_;
     197             :   Bitmap* bitmap_;
     198             :   Address start_;
     199             :   Address end_;
     200             : };
     201             : 
     202             : class LiveObjectVisitor : AllStatic {
     203             :  public:
     204             :   enum IterationMode {
     205             :     kKeepMarking,
     206             :     kClearMarkbits,
     207             :   };
     208             : 
     209             :   // Visits black objects on a MemoryChunk until the Visitor returns |false| for
     210             :   // an object. If IterationMode::kClearMarkbits is passed the markbits and
     211             :   // slots for visited objects are cleared for each successfully visited object.
     212             :   template <class Visitor, typename MarkingState>
     213             :   static bool VisitBlackObjects(MemoryChunk* chunk, MarkingState* state,
     214             :                                 Visitor* visitor, IterationMode iteration_mode,
     215             :                                 HeapObject* failed_object);
     216             : 
     217             :   // Visits black objects on a MemoryChunk. The visitor is not allowed to fail
     218             :   // visitation for an object.
     219             :   template <class Visitor, typename MarkingState>
     220             :   static void VisitBlackObjectsNoFail(MemoryChunk* chunk, MarkingState* state,
     221             :                                       Visitor* visitor,
     222             :                                       IterationMode iteration_mode);
     223             : 
     224             :   // Visits black objects on a MemoryChunk. The visitor is not allowed to fail
     225             :   // visitation for an object.
     226             :   template <class Visitor, typename MarkingState>
     227             :   static void VisitGreyObjectsNoFail(MemoryChunk* chunk, MarkingState* state,
     228             :                                      Visitor* visitor,
     229             :                                      IterationMode iteration_mode);
     230             : 
     231             :   template <typename MarkingState>
     232             :   static void RecomputeLiveBytes(MemoryChunk* chunk, MarkingState* state);
     233             : };
     234             : 
     235             : enum PageEvacuationMode { NEW_TO_NEW, NEW_TO_OLD };
     236             : enum MarkingTreatmentMode { KEEP, CLEAR };
     237             : enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY };
     238             : 
     239             : // Base class for minor and full MC collectors.
     240             : class MarkCompactCollectorBase {
     241             :  public:
     242             :   static const int kMainThread = 0;
     243             : 
     244      124814 :   virtual ~MarkCompactCollectorBase() = default;
     245             : 
     246             :   virtual void SetUp() = 0;
     247             :   virtual void TearDown() = 0;
     248             :   virtual void CollectGarbage() = 0;
     249             : 
     250             :   inline Heap* heap() const { return heap_; }
     251             :   inline Isolate* isolate();
     252             : 
     253             :  protected:
     254             :   explicit MarkCompactCollectorBase(Heap* heap)
     255      124844 :       : heap_(heap), old_to_new_slots_(0) {}
     256             : 
     257             :   // Marking operations for objects reachable from roots.
     258             :   virtual void MarkLiveObjects() = 0;
     259             :   // Mark objects reachable (transitively) from objects in the marking
     260             :   // work list.
     261             :   virtual void ProcessMarkingWorklist() = 0;
     262             :   // Clear non-live references held in side data structures.
     263             :   virtual void ClearNonLiveReferences() = 0;
     264             :   virtual void EvacuatePrologue() = 0;
     265             :   virtual void EvacuateEpilogue() = 0;
     266             :   virtual void Evacuate() = 0;
     267             :   virtual void EvacuatePagesInParallel() = 0;
     268             :   virtual void UpdatePointersAfterEvacuation() = 0;
     269             :   virtual UpdatingItem* CreateToSpaceUpdatingItem(MemoryChunk* chunk,
     270             :                                                   Address start,
     271             :                                                   Address end) = 0;
     272             :   virtual UpdatingItem* CreateRememberedSetUpdatingItem(
     273             :       MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) = 0;
     274             : 
     275             :   template <class Evacuator, class Collector>
     276             :   void CreateAndExecuteEvacuationTasks(Collector* collector,
     277             :                                        ItemParallelJob* job,
     278             :                                        MigrationObserver* migration_observer,
     279             :                                        const intptr_t live_bytes);
     280             : 
     281             :   // Returns whether this page should be moved according to heuristics.
     282             :   bool ShouldMovePage(Page* p, intptr_t live_bytes);
     283             : 
     284             :   int CollectToSpaceUpdatingItems(ItemParallelJob* job);
     285             :   template <typename IterateableSpace>
     286             :   int CollectRememberedSetUpdatingItems(ItemParallelJob* job,
     287             :                                         IterateableSpace* space,
     288             :                                         RememberedSetUpdatingMode mode);
     289             : 
     290             :   int NumberOfParallelCompactionTasks(int pages);
     291             :   int NumberOfParallelPointerUpdateTasks(int pages, int slots);
     292             :   int NumberOfParallelToSpacePointerUpdateTasks(int pages);
     293             : 
     294             :   Heap* heap_;
     295             :   // Number of old to new slots. Should be computed during MarkLiveObjects.
     296             :   // -1 indicates that the value couldn't be computed.
     297             :   int old_to_new_slots_;
     298             : };
     299             : 
     300             : class MinorMarkingState final
     301             :     : public MarkingStateBase<MinorMarkingState, AccessMode::ATOMIC> {
     302             :  public:
     303             :   ConcurrentBitmap<AccessMode::ATOMIC>* bitmap(const MemoryChunk* chunk) const {
     304             :     return chunk->young_generation_bitmap<AccessMode::ATOMIC>();
     305             :   }
     306             : 
     307             :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     308             :     chunk->young_generation_live_byte_count_ += by;
     309             :   }
     310             : 
     311             :   intptr_t live_bytes(MemoryChunk* chunk) const {
     312             :     return chunk->young_generation_live_byte_count_;
     313             :   }
     314             : 
     315             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
     316             :     chunk->young_generation_live_byte_count_ = value;
     317             :   }
     318             : };
     319             : 
     320             : class MinorNonAtomicMarkingState final
     321             :     : public MarkingStateBase<MinorNonAtomicMarkingState,
     322             :                               AccessMode::NON_ATOMIC> {
     323             :  public:
     324             :   ConcurrentBitmap<AccessMode::NON_ATOMIC>* bitmap(
     325             :       const MemoryChunk* chunk) const {
     326             :     return chunk->young_generation_bitmap<AccessMode::NON_ATOMIC>();
     327             :   }
     328             : 
     329             :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     330             :     chunk->young_generation_live_byte_count_.fetch_add(
     331             :         by, std::memory_order_relaxed);
     332             :   }
     333             : 
     334             :   intptr_t live_bytes(MemoryChunk* chunk) const {
     335             :     return chunk->young_generation_live_byte_count_.load(
     336             :         std::memory_order_relaxed);
     337             :   }
     338             : 
     339             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
     340             :     chunk->young_generation_live_byte_count_.store(value,
     341             :                                                    std::memory_order_relaxed);
     342             :   }
     343             : };
     344             : 
     345             : // This marking state is used when concurrent marking is running.
     346             : class IncrementalMarkingState final
     347             :     : public MarkingStateBase<IncrementalMarkingState, AccessMode::ATOMIC> {
     348             :  public:
     349     1972740 :   ConcurrentBitmap<AccessMode::ATOMIC>* bitmap(const MemoryChunk* chunk) const {
     350             :     DCHECK_EQ(reinterpret_cast<intptr_t>(&chunk->marking_bitmap_) -
     351             :                   reinterpret_cast<intptr_t>(chunk),
     352             :               MemoryChunk::kMarkBitmapOffset);
     353     1972740 :     return chunk->marking_bitmap<AccessMode::ATOMIC>();
     354             :   }
     355             : 
     356             :   // Concurrent marking uses local live bytes so we may do these accesses
     357             :   // non-atomically.
     358             :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     359   195173441 :     chunk->live_byte_count_ += by;
     360             :   }
     361             : 
     362             :   intptr_t live_bytes(MemoryChunk* chunk) const {
     363             :     return chunk->live_byte_count_;
     364             :   }
     365             : 
     366             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
     367             :     chunk->live_byte_count_ = value;
     368             :   }
     369             : };
     370             : 
     371             : class MajorAtomicMarkingState final
     372             :     : public MarkingStateBase<MajorAtomicMarkingState, AccessMode::ATOMIC> {
     373             :  public:
     374             :   ConcurrentBitmap<AccessMode::ATOMIC>* bitmap(const MemoryChunk* chunk) const {
     375             :     DCHECK_EQ(reinterpret_cast<intptr_t>(&chunk->marking_bitmap_) -
     376             :                   reinterpret_cast<intptr_t>(chunk),
     377             :               MemoryChunk::kMarkBitmapOffset);
     378             :     return chunk->marking_bitmap<AccessMode::ATOMIC>();
     379             :   }
     380             : 
     381      402976 :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     382             :     std::atomic_fetch_add(
     383             :         reinterpret_cast<std::atomic<intptr_t>*>(&chunk->live_byte_count_), by);
     384      402976 :   }
     385             : };
     386             : 
     387             : class MajorNonAtomicMarkingState final
     388             :     : public MarkingStateBase<MajorNonAtomicMarkingState,
     389             :                               AccessMode::NON_ATOMIC> {
     390             :  public:
     391             :   ConcurrentBitmap<AccessMode::NON_ATOMIC>* bitmap(
     392             :       const MemoryChunk* chunk) const {
     393             :     DCHECK_EQ(reinterpret_cast<intptr_t>(&chunk->marking_bitmap_) -
     394             :                   reinterpret_cast<intptr_t>(chunk),
     395             :               MemoryChunk::kMarkBitmapOffset);
     396             :     return chunk->marking_bitmap<AccessMode::NON_ATOMIC>();
     397             :   }
     398             : 
     399             :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     400     1123430 :     chunk->live_byte_count_ += by;
     401             :   }
     402             : 
     403             :   intptr_t live_bytes(MemoryChunk* chunk) const {
     404             :     return chunk->live_byte_count_;
     405             :   }
     406             : 
     407             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
     408     3133441 :     chunk->live_byte_count_ = value;
     409             :   }
     410             : };
     411             : 
     412             : struct Ephemeron {
     413             :   HeapObject key;
     414             :   HeapObject value;
     415             : };
     416             : 
     417             : using EphemeronWorklist = Worklist<Ephemeron, 64>;
     418             : 
     419             : // Weak objects encountered during marking.
     420      811665 : struct WeakObjects {
     421             :   Worklist<TransitionArray, 64> transition_arrays;
     422             : 
     423             :   // Keep track of all EphemeronHashTables in the heap to process
     424             :   // them in the atomic pause.
     425             :   Worklist<EphemeronHashTable, 64> ephemeron_hash_tables;
     426             : 
     427             :   // Keep track of all ephemerons for concurrent marking tasks. Only store
     428             :   // ephemerons in these Worklists if both key and value are unreachable at the
     429             :   // moment.
     430             :   //
     431             :   // MarkCompactCollector::ProcessEphemeronsUntilFixpoint drains and fills these
     432             :   // worklists.
     433             :   //
     434             :   // current_ephemerons is used as draining worklist in the current fixpoint
     435             :   // iteration.
     436             :   EphemeronWorklist current_ephemerons;
     437             : 
     438             :   // Stores ephemerons to visit in the next fixpoint iteration.
     439             :   EphemeronWorklist next_ephemerons;
     440             : 
     441             :   // When draining the marking worklist new discovered ephemerons are pushed
     442             :   // into this worklist.
     443             :   EphemeronWorklist discovered_ephemerons;
     444             : 
     445             :   // TODO(marja): For old space, we only need the slot, not the host
     446             :   // object. Optimize this by adding a different storage for old space.
     447             :   Worklist<std::pair<HeapObject, HeapObjectSlot>, 64> weak_references;
     448             :   Worklist<std::pair<HeapObject, Code>, 64> weak_objects_in_code;
     449             : 
     450             :   Worklist<JSWeakRef, 64> js_weak_refs;
     451             :   Worklist<WeakCell, 64> weak_cells;
     452             : 
     453             :   Worklist<SharedFunctionInfo, 64> bytecode_flushing_candidates;
     454             :   Worklist<JSFunction, 64> flushed_js_functions;
     455             : };
     456             : 
     457       62407 : struct EphemeronMarking {
     458             :   std::vector<HeapObject> newly_discovered;
     459             :   bool newly_discovered_overflowed;
     460             :   size_t newly_discovered_limit;
     461             : };
     462             : 
     463             : // Collector for young and old generation.
     464             : class MarkCompactCollector final : public MarkCompactCollectorBase {
     465             :  public:
     466             : #ifdef V8_CONCURRENT_MARKING
     467             :   using MarkingState = IncrementalMarkingState;
     468             : #else
     469             :   using MarkingState = MajorNonAtomicMarkingState;
     470             : #endif  // V8_CONCURRENT_MARKING
     471             : 
     472             :   using NonAtomicMarkingState = MajorNonAtomicMarkingState;
     473             : 
     474             :   // Wrapper for the shared worklist.
     475       62405 :   class MarkingWorklist {
     476             :    public:
     477             :     using ConcurrentMarkingWorklist = Worklist<HeapObject, 64>;
     478             :     using EmbedderTracingWorklist = Worklist<HeapObject, 16>;
     479             : 
     480             :     // The heap parameter is not used but needed to match the sequential case.
     481      249688 :     explicit MarkingWorklist(Heap* heap) {}
     482             : 
     483      597054 :     void Push(HeapObject object) {
     484   341826010 :       bool success = shared_.Push(kMainThread, object);
     485             :       USE(success);
     486             :       DCHECK(success);
     487      597054 :     }
     488             : 
     489   190791391 :     HeapObject Pop() {
     490   190791391 :       HeapObject result;
     491   190791391 :       if (shared_.Pop(kMainThread, &result)) return result;
     492             : #ifdef V8_CONCURRENT_MARKING
     493             :       // The expectation is that this work list is empty almost all the time
     494             :       // and we can thus avoid the emptiness checks by putting it last.
     495     5053334 :       if (on_hold_.Pop(kMainThread, &result)) return result;
     496             : #endif
     497     1544746 :       return HeapObject();
     498             :     }
     499             : 
     500        4915 :     void Clear() {
     501             :       shared_.Clear();
     502             :       on_hold_.Clear();
     503             :       embedder_.Clear();
     504        4915 :     }
     505             : 
     506     1613140 :     bool IsEmpty() {
     507     1557521 :       return shared_.IsLocalEmpty(kMainThread) &&
     508     1557357 :              on_hold_.IsLocalEmpty(kMainThread) &&
     509     3168068 :              shared_.IsGlobalPoolEmpty() && on_hold_.IsGlobalPoolEmpty();
     510             :     }
     511             : 
     512      206592 :     bool IsEmbedderEmpty() {
     513      413184 :       return embedder_.IsLocalEmpty(kMainThread) &&
     514      206592 :              embedder_.IsGlobalPoolEmpty();
     515             :     }
     516             : 
     517             :     int Size() {
     518             :       return static_cast<int>(shared_.LocalSize(kMainThread) +
     519             :                               on_hold_.LocalSize(kMainThread));
     520             :     }
     521             : 
     522             :     // Calls the specified callback on each element of the deques and replaces
     523             :     // the element with the result of the callback. If the callback returns
     524             :     // nullptr then the element is removed from the deque.
     525             :     // The callback must accept HeapObject and return HeapObject.
     526             :     template <typename Callback>
     527        1064 :     void Update(Callback callback) {
     528        1064 :       shared_.Update(callback);
     529        1064 :       on_hold_.Update(callback);
     530        1064 :       embedder_.Update(callback);
     531        1064 :     }
     532             : 
     533     1499555 :     void ShareWorkIfGlobalPoolIsEmpty() {
     534     1524999 :       if (!shared_.IsLocalEmpty(kMainThread) && shared_.IsGlobalPoolEmpty()) {
     535        7331 :         shared_.FlushToGlobal(kMainThread);
     536             :       }
     537     1499555 :     }
     538             : 
     539     1145293 :     ConcurrentMarkingWorklist* shared() { return &shared_; }
     540     1145293 :     ConcurrentMarkingWorklist* on_hold() { return &on_hold_; }
     541       62517 :     EmbedderTracingWorklist* embedder() { return &embedder_; }
     542             : 
     543             :     void Print() {
     544             :       PrintWorklist("shared", &shared_);
     545             :       PrintWorklist("on_hold", &on_hold_);
     546             :     }
     547             : 
     548             :    private:
     549             :     // Prints the stats about the global pool of the worklist.
     550             :     void PrintWorklist(const char* worklist_name,
     551             :                        ConcurrentMarkingWorklist* worklist);
     552             : 
     553             :     // Worklist used for most objects.
     554             :     ConcurrentMarkingWorklist shared_;
     555             : 
     556             :     // Concurrent marking uses this worklist to bail out of marking objects
     557             :     // in new space's linear allocation area. Used to avoid black allocation
     558             :     // for new space. This allow the compiler to remove write barriers
     559             :     // for freshly allocatd objects.
     560             :     ConcurrentMarkingWorklist on_hold_;
     561             : 
     562             :     // Worklist for objects that potentially require embedder tracing, i.e.,
     563             :     // these objects need to be handed over to the embedder to find the full
     564             :     // transitive closure.
     565             :     EmbedderTracingWorklist embedder_;
     566             :   };
     567             : 
     568             :   class RootMarkingVisitor;
     569             :   class CustomRootBodyMarkingVisitor;
     570             : 
     571             :   enum IterationMode {
     572             :     kKeepMarking,
     573             :     kClearMarkbits,
     574             :   };
     575             : 
     576      482089 :   MarkingState* marking_state() { return &marking_state_; }
     577             : 
     578             :   NonAtomicMarkingState* non_atomic_marking_state() {
     579      877115 :     return &non_atomic_marking_state_;
     580             :   }
     581             : 
     582             :   void SetUp() override;
     583             :   void TearDown() override;
     584             :   // Performs a global garbage collection.
     585             :   void CollectGarbage() override;
     586             : 
     587             :   void CollectEvacuationCandidates(PagedSpace* space);
     588             : 
     589             :   void AddEvacuationCandidate(Page* p);
     590             : 
     591             :   // Prepares for GC by resetting relocation info in old and map spaces and
     592             :   // choosing spaces to compact.
     593             :   void Prepare();
     594             : 
     595             :   // Stop concurrent marking (either by preempting it right away or waiting for
     596             :   // it to complete as requested by |stop_request|).
     597             :   void FinishConcurrentMarking(ConcurrentMarking::StopRequest stop_request);
     598             : 
     599             :   bool StartCompaction();
     600             : 
     601             :   void AbortCompaction();
     602             : 
     603           0 :   static inline bool IsOnEvacuationCandidate(Object obj) {
     604           0 :     return Page::FromAddress(obj->ptr())->IsEvacuationCandidate();
     605             :   }
     606             : 
     607             :   static bool IsOnEvacuationCandidate(MaybeObject obj);
     608             : 
     609             :   struct RecordRelocSlotInfo {
     610             :     MemoryChunk* memory_chunk;
     611             :     SlotType slot_type;
     612             :     bool should_record;
     613             :     uint32_t offset;
     614             :   };
     615             :   static RecordRelocSlotInfo PrepareRecordRelocSlot(Code host, RelocInfo* rinfo,
     616             :                                                     HeapObject target);
     617             :   static void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target);
     618             :   V8_INLINE static void RecordSlot(HeapObject object, ObjectSlot slot,
     619             :                                    HeapObject target);
     620             :   V8_INLINE static void RecordSlot(HeapObject object, HeapObjectSlot slot,
     621             :                                    HeapObject target);
     622             :   void RecordLiveSlotsOnPage(Page* page);
     623             : 
     624             :   void UpdateSlots(SlotsBuffer* buffer);
     625             :   void UpdateSlotsRecordedIn(SlotsBuffer* buffer);
     626             : 
     627             :   bool is_compacting() const { return compacting_; }
     628             : 
     629             :   // Ensures that sweeping is finished.
     630             :   //
     631             :   // Note: Can only be called safely from main thread.
     632             :   V8_EXPORT_PRIVATE void EnsureSweepingCompleted();
     633             : 
     634             :   // Checks if sweeping is in progress right now on any space.
     635             :   bool sweeping_in_progress() const { return sweeper_->sweeping_in_progress(); }
     636             : 
     637      137728 :   void set_evacuation(bool evacuation) { evacuation_ = evacuation; }
     638             : 
     639             :   bool evacuation() const { return evacuation_; }
     640             : 
     641   149334537 :   MarkingWorklist* marking_worklist() { return &marking_worklist_; }
     642             : 
     643      124734 :   WeakObjects* weak_objects() { return &weak_objects_; }
     644             : 
     645             :   inline void AddTransitionArray(TransitionArray array);
     646             : 
     647             :   void AddEphemeronHashTable(EphemeronHashTable table) {
     648       15185 :     weak_objects_.ephemeron_hash_tables.Push(kMainThread, table);
     649             :   }
     650             : 
     651             :   void AddEphemeron(HeapObject key, HeapObject value) {
     652          89 :     weak_objects_.discovered_ephemerons.Push(kMainThread,
     653         178 :                                              Ephemeron{key, value});
     654             :   }
     655             : 
     656       10784 :   void AddWeakReference(HeapObject host, HeapObjectSlot slot) {
     657     7169822 :     weak_objects_.weak_references.Push(kMainThread, std::make_pair(host, slot));
     658       10784 :   }
     659             : 
     660       22661 :   void AddWeakObjectInCode(HeapObject object, Code code) {
     661       22661 :     weak_objects_.weak_objects_in_code.Push(kMainThread,
     662       22661 :                                             std::make_pair(object, code));
     663       22661 :   }
     664             : 
     665             :   void AddWeakRef(JSWeakRef weak_ref) {
     666          86 :     weak_objects_.js_weak_refs.Push(kMainThread, weak_ref);
     667             :   }
     668             : 
     669             :   void AddWeakCell(WeakCell weak_cell) {
     670         267 :     weak_objects_.weak_cells.Push(kMainThread, weak_cell);
     671             :   }
     672             : 
     673             :   inline void AddBytecodeFlushingCandidate(SharedFunctionInfo flush_candidate);
     674             :   inline void AddFlushedJSFunction(JSFunction flushed_function);
     675             : 
     676           0 :   void AddNewlyDiscovered(HeapObject object) {
     677           0 :     if (ephemeron_marking_.newly_discovered_overflowed) return;
     678             : 
     679           0 :     if (ephemeron_marking_.newly_discovered.size() <
     680           0 :         ephemeron_marking_.newly_discovered_limit) {
     681           0 :       ephemeron_marking_.newly_discovered.push_back(object);
     682             :     } else {
     683           0 :       ephemeron_marking_.newly_discovered_overflowed = true;
     684             :     }
     685             :   }
     686             : 
     687             :   void ResetNewlyDiscovered() {
     688           0 :     ephemeron_marking_.newly_discovered_overflowed = false;
     689             :     ephemeron_marking_.newly_discovered.clear();
     690             :   }
     691             : 
     692             :   Sweeper* sweeper() { return sweeper_; }
     693             : 
     694             : #ifdef DEBUG
     695             :   // Checks whether performing mark-compact collection.
     696             :   bool in_use() { return state_ > PREPARE_GC; }
     697             :   bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; }
     698             : #endif
     699             : 
     700             :   void VerifyMarking();
     701             : #ifdef VERIFY_HEAP
     702             :   void VerifyValidStoreAndSlotsBufferEntries();
     703             :   void VerifyMarkbitsAreClean();
     704             :   void VerifyMarkbitsAreDirty(PagedSpace* space);
     705             :   void VerifyMarkbitsAreClean(PagedSpace* space);
     706             :   void VerifyMarkbitsAreClean(NewSpace* space);
     707             :   void VerifyMarkbitsAreClean(LargeObjectSpace* space);
     708             : #endif
     709             : 
     710             :   unsigned epoch() const { return epoch_; }
     711             : 
     712             :   explicit MarkCompactCollector(Heap* heap);
     713             :   ~MarkCompactCollector() override;
     714             : 
     715             :   // Used by wrapper tracing.
     716             :   V8_INLINE void MarkExternallyReferencedObject(HeapObject obj);
     717             : 
     718             :  private:
     719             :   void ComputeEvacuationHeuristics(size_t area_size,
     720             :                                    int* target_fragmentation_percent,
     721             :                                    size_t* max_evacuated_bytes);
     722             : 
     723             :   void RecordObjectStats();
     724             : 
     725             :   // Finishes GC, performs heap verification if enabled.
     726             :   void Finish();
     727             : 
     728             :   void MarkLiveObjects() override;
     729             : 
     730             :   // Marks the object black and adds it to the marking work list.
     731             :   // This is for non-incremental marking only.
     732             :   V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
     733             : 
     734             :   // Marks the object black and adds it to the marking work list.
     735             :   // This is for non-incremental marking only.
     736             :   V8_INLINE void MarkRootObject(Root root, HeapObject obj);
     737             : 
     738             :   // Mark the heap roots and all objects reachable from them.
     739             :   void MarkRoots(RootVisitor* root_visitor,
     740             :                  ObjectVisitor* custom_root_body_visitor);
     741             : 
     742             :   // Mark the string table specially.  References to internalized strings from
     743             :   // the string table are weak.
     744             :   void MarkStringTable(ObjectVisitor* visitor);
     745             : 
     746             :   // Marks object reachable from harmony weak maps and wrapper tracing.
     747             :   void ProcessEphemeronMarking();
     748             : 
     749             :   // If the call-site of the top optimized code was not prepared for
     750             :   // deoptimization, then treat embedded pointers in the code as strong as
     751             :   // otherwise they can die and try to deoptimize the underlying code.
     752             :   void ProcessTopOptimizedFrame(ObjectVisitor* visitor);
     753             : 
     754             :   // Drains the main thread marking work list. Will mark all pending objects
     755             :   // if no concurrent threads are running.
     756             :   void ProcessMarkingWorklist() override;
     757             : 
     758             :   enum class MarkingWorklistProcessingMode {
     759             :     kDefault,
     760             :     kTrackNewlyDiscoveredObjects
     761             :   };
     762             : 
     763             :   template <MarkingWorklistProcessingMode mode>
     764             :   void ProcessMarkingWorklistInternal();
     765             : 
     766             :   // Implements ephemeron semantics: Marks value if key is already reachable.
     767             :   // Returns true if value was actually marked.
     768             :   bool ProcessEphemeron(HeapObject key, HeapObject value);
     769             : 
     770             :   // Marks ephemerons and drains marking worklist iteratively
     771             :   // until a fixpoint is reached.
     772             :   void ProcessEphemeronsUntilFixpoint();
     773             : 
     774             :   // Drains ephemeron and marking worklists. Single iteration of the
     775             :   // fixpoint iteration.
     776             :   bool ProcessEphemerons();
     777             : 
     778             :   // Mark ephemerons and drain marking worklist with a linear algorithm.
     779             :   // Only used if fixpoint iteration doesn't finish within a few iterations.
     780             :   void ProcessEphemeronsLinear();
     781             : 
     782             :   // Perform Wrapper Tracing if in use.
     783             :   void PerformWrapperTracing();
     784             : 
     785             :   // Callback function for telling whether the object *p is an unmarked
     786             :   // heap object.
     787             :   static bool IsUnmarkedHeapObject(Heap* heap, FullObjectSlot p);
     788             : 
     789             :   // Clear non-live references in weak cells, transition and descriptor arrays,
     790             :   // and deoptimize dependent code of non-live maps.
     791             :   void ClearNonLiveReferences() override;
     792             :   void MarkDependentCodeForDeoptimization();
     793             :   // Checks if the given weak cell is a simple transition from the parent map
     794             :   // of the given dead target. If so it clears the transition and trims
     795             :   // the descriptor array of the parent if needed.
     796             :   void ClearPotentialSimpleMapTransition(Map dead_target);
     797             :   void ClearPotentialSimpleMapTransition(Map map, Map dead_target);
     798             : 
     799             :   // Flushes a weakly held bytecode array from a shared function info.
     800             :   void FlushBytecodeFromSFI(SharedFunctionInfo shared_info);
     801             : 
     802             :   // Clears bytecode arrays that have not been executed for multiple
     803             :   // collections.
     804             :   void ClearOldBytecodeCandidates();
     805             : 
     806             :   // Resets any JSFunctions which have had their bytecode flushed.
     807             :   void ClearFlushedJsFunctions();
     808             : 
     809             :   // Compact every array in the global list of transition arrays and
     810             :   // trim the corresponding descriptor array if a transition target is non-live.
     811             :   void ClearFullMapTransitions();
     812             :   void TrimDescriptorArray(Map map, DescriptorArray descriptors);
     813             :   void TrimEnumCache(Map map, DescriptorArray descriptors);
     814             :   bool CompactTransitionArray(Map map, TransitionArray transitions,
     815             :                               DescriptorArray descriptors);
     816             : 
     817             :   // After all reachable objects have been marked those weak map entries
     818             :   // with an unreachable key are removed from all encountered weak maps.
     819             :   // The linked list of all encountered weak maps is destroyed.
     820             :   void ClearWeakCollections();
     821             : 
     822             :   // Goes through the list of encountered weak references and clears those with
     823             :   // dead values. If the value is a dead map and the parent map transitions to
     824             :   // the dead map via weak cell, then this function also clears the map
     825             :   // transition.
     826             :   void ClearWeakReferences();
     827             : 
     828             :   // Goes through the list of encountered JSWeakRefs and WeakCells and clears
     829             :   // those with dead values.
     830             :   void ClearJSWeakRefs();
     831             : 
     832             :   void AbortWeakObjects();
     833             : 
     834             :   // Starts sweeping of spaces by contributing on the main thread and setting
     835             :   // up other pages for sweeping. Does not start sweeper tasks.
     836             :   void StartSweepSpaces();
     837             :   void StartSweepSpace(PagedSpace* space);
     838             : 
     839             :   void EvacuatePrologue() override;
     840             :   void EvacuateEpilogue() override;
     841             :   void Evacuate() override;
     842             :   void EvacuatePagesInParallel() override;
     843             :   void UpdatePointersAfterEvacuation() override;
     844             : 
     845             :   UpdatingItem* CreateToSpaceUpdatingItem(MemoryChunk* chunk, Address start,
     846             :                                           Address end) override;
     847             :   UpdatingItem* CreateRememberedSetUpdatingItem(
     848             :       MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
     849             : 
     850             :   int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
     851             :   int CollectOldSpaceArrayBufferTrackerItems(ItemParallelJob* job);
     852             : 
     853             :   void ReleaseEvacuationCandidates();
     854             :   void PostProcessEvacuationCandidates();
     855             :   void ReportAbortedEvacuationCandidate(HeapObject failed_object,
     856             :                                         MemoryChunk* chunk);
     857             : 
     858             :   static const int kEphemeronChunkSize = 8 * KB;
     859             : 
     860             :   int NumberOfParallelEphemeronVisitingTasks(size_t elements);
     861             : 
     862             :   void RightTrimDescriptorArray(DescriptorArray array, int descriptors_to_trim);
     863             : 
     864             :   base::Mutex mutex_;
     865             :   base::Semaphore page_parallel_job_semaphore_;
     866             : 
     867             : #ifdef DEBUG
     868             :   enum CollectorState {
     869             :     IDLE,
     870             :     PREPARE_GC,
     871             :     MARK_LIVE_OBJECTS,
     872             :     SWEEP_SPACES,
     873             :     ENCODE_FORWARDING_ADDRESSES,
     874             :     UPDATE_POINTERS,
     875             :     RELOCATE_OBJECTS
     876             :   };
     877             : 
     878             :   // The current stage of the collector.
     879             :   CollectorState state_;
     880             : #endif
     881             : 
     882             :   bool was_marked_incrementally_;
     883             : 
     884             :   bool evacuation_;
     885             : 
     886             :   // True if we are collecting slots to perform evacuation from evacuation
     887             :   // candidates.
     888             :   bool compacting_;
     889             : 
     890             :   bool black_allocation_;
     891             : 
     892             :   bool have_code_to_deoptimize_;
     893             : 
     894             :   MarkingWorklist marking_worklist_;
     895             :   WeakObjects weak_objects_;
     896             :   EphemeronMarking ephemeron_marking_;
     897             : 
     898             :   // Candidates for pages that should be evacuated.
     899             :   std::vector<Page*> evacuation_candidates_;
     900             :   // Pages that are actually processed during evacuation.
     901             :   std::vector<Page*> old_space_evacuation_pages_;
     902             :   std::vector<Page*> new_space_evacuation_pages_;
     903             :   std::vector<std::pair<HeapObject, Page*>> aborted_evacuation_candidates_;
     904             : 
     905             :   Sweeper* sweeper_;
     906             : 
     907             :   MarkingState marking_state_;
     908             :   NonAtomicMarkingState non_atomic_marking_state_;
     909             : 
     910             :   // Counts the number of mark-compact collections. This is used for marking
     911             :   // descriptor arrays. See NumberOfMarkedDescriptors. Only lower two bits are
     912             :   // used, so it is okay if this counter overflows and wraps around.
     913             :   unsigned epoch_ = 0;
     914             : 
     915             :   friend class FullEvacuator;
     916             :   friend class RecordMigratedSlotVisitor;
     917             : };
     918             : 
     919             : template <FixedArrayVisitationMode fixed_array_mode,
     920             :           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
     921   106647131 : class MarkingVisitor final
     922             :     : public HeapVisitor<
     923             :           int,
     924             :           MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>> {
     925             :  public:
     926             :   using Parent = HeapVisitor<
     927             :       int, MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>>;
     928             : 
     929             :   V8_INLINE MarkingVisitor(MarkCompactCollector* collector,
     930             :                            MarkingState* marking_state);
     931             : 
     932             :   V8_INLINE bool ShouldVisitMapPointer() { return false; }
     933             : 
     934             :   V8_INLINE int VisitBytecodeArray(Map map, BytecodeArray object);
     935             :   V8_INLINE int VisitDescriptorArray(Map map, DescriptorArray object);
     936             :   V8_INLINE int VisitEphemeronHashTable(Map map, EphemeronHashTable object);
     937             :   V8_INLINE int VisitFixedArray(Map map, FixedArray object);
     938             :   V8_INLINE int VisitJSApiObject(Map map, JSObject object);
     939             :   V8_INLINE int VisitJSArrayBuffer(Map map, JSArrayBuffer object);
     940             :   V8_INLINE int VisitJSFunction(Map map, JSFunction object);
     941             :   V8_INLINE int VisitJSDataView(Map map, JSDataView object);
     942             :   V8_INLINE int VisitJSTypedArray(Map map, JSTypedArray object);
     943             :   V8_INLINE int VisitMap(Map map, Map object);
     944             :   V8_INLINE int VisitSharedFunctionInfo(Map map, SharedFunctionInfo object);
     945             :   V8_INLINE int VisitTransitionArray(Map map, TransitionArray object);
     946             :   V8_INLINE int VisitWeakCell(Map map, WeakCell object);
     947             :   V8_INLINE int VisitJSWeakRef(Map map, JSWeakRef object);
     948             : 
     949             :   // ObjectVisitor implementation.
     950           0 :   V8_INLINE void VisitPointer(HeapObject host, ObjectSlot p) final {
     951             :     VisitPointerImpl(host, p);
     952           0 :   }
     953           0 :   V8_INLINE void VisitPointer(HeapObject host, MaybeObjectSlot p) final {
     954             :     VisitPointerImpl(host, p);
     955           0 :   }
     956           0 :   V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
     957             :                                ObjectSlot end) final {
     958             :     VisitPointersImpl(host, start, end);
     959           0 :   }
     960           0 :   V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
     961             :                                MaybeObjectSlot end) final {
     962             :     VisitPointersImpl(host, start, end);
     963           0 :   }
     964             :   V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final;
     965             :   V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final;
     966             : 
     967             :   // Weak list pointers should be ignored during marking. The lists are
     968             :   // reconstructed after GC.
     969    17653863 :   void VisitCustomWeakPointers(HeapObject host, ObjectSlot start,
     970    17653863 :                                ObjectSlot end) final {}
     971             : 
     972             :   V8_INLINE void VisitDescriptors(DescriptorArray descriptors,
     973             :                                   int number_of_own_descriptors);
     974             :   // Marks the descriptor array black without pushing it on the marking work
     975             :   // list and visits its header.
     976             :   V8_INLINE void MarkDescriptorArrayBlack(HeapObject host,
     977             :                                           DescriptorArray descriptors);
     978             : 
     979             :  private:
     980             :   // Granularity in which FixedArrays are scanned if |fixed_array_mode|
     981             :   // is true.
     982             :   static const int kProgressBarScanningChunk = 32 * KB;
     983             : 
     984             :   template <typename TSlot>
     985             :   V8_INLINE void VisitPointerImpl(HeapObject host, TSlot p);
     986             : 
     987             :   template <typename TSlot>
     988             :   V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end);
     989             : 
     990             :   V8_INLINE int VisitFixedArrayIncremental(Map map, FixedArray object);
     991             : 
     992             :   template <typename T>
     993             :   V8_INLINE int VisitEmbedderTracingSubclass(Map map, T object);
     994             : 
     995             :   // Marks the object grey and pushes it on the marking work list.
     996             :   V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
     997             : 
     998     1909586 :   MarkingState* marking_state() { return marking_state_; }
     999             : 
    1000      140404 :   MarkCompactCollector::MarkingWorklist* marking_worklist() const {
    1001      280808 :     return collector_->marking_worklist();
    1002             :   }
    1003             : 
    1004             :   Heap* const heap_;
    1005             :   MarkCompactCollector* const collector_;
    1006             :   MarkingState* const marking_state_;
    1007             :   const unsigned mark_compact_epoch_;
    1008             : };
    1009             : 
    1010             : class EvacuationScope {
    1011             :  public:
    1012             :   explicit EvacuationScope(MarkCompactCollector* collector)
    1013             :       : collector_(collector) {
    1014             :     collector_->set_evacuation(true);
    1015             :   }
    1016             : 
    1017             :   ~EvacuationScope() { collector_->set_evacuation(false); }
    1018             : 
    1019             :  private:
    1020             :   MarkCompactCollector* collector_;
    1021             : };
    1022             : 
    1023             : #ifdef ENABLE_MINOR_MC
    1024             : 
    1025             : // Collector for young-generation only.
    1026             : class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
    1027             :  public:
    1028             :   using MarkingState = MinorMarkingState;
    1029             :   using NonAtomicMarkingState = MinorNonAtomicMarkingState;
    1030             : 
    1031             :   explicit MinorMarkCompactCollector(Heap* heap);
    1032             :   ~MinorMarkCompactCollector() override;
    1033             : 
    1034       63486 :   MarkingState* marking_state() { return &marking_state_; }
    1035             : 
    1036             :   NonAtomicMarkingState* non_atomic_marking_state() {
    1037           0 :     return &non_atomic_marking_state_;
    1038             :   }
    1039             : 
    1040             :   void SetUp() override;
    1041             :   void TearDown() override;
    1042             :   void CollectGarbage() override;
    1043             : 
    1044             :   void MakeIterable(Page* page, MarkingTreatmentMode marking_mode,
    1045             :                     FreeSpaceTreatmentMode free_space_mode);
    1046             :   void CleanupSweepToIteratePages();
    1047             : 
    1048             :  private:
    1049             :   using MarkingWorklist = Worklist<HeapObject, 64 /* segment size */>;
    1050             :   class RootMarkingVisitor;
    1051             : 
    1052             :   static const int kNumMarkers = 8;
    1053             :   static const int kMainMarker = 0;
    1054             : 
    1055             :   inline MarkingWorklist* worklist() { return worklist_; }
    1056             : 
    1057             :   inline YoungGenerationMarkingVisitor* main_marking_visitor() {
    1058             :     return main_marking_visitor_;
    1059             :   }
    1060             : 
    1061             :   void MarkLiveObjects() override;
    1062             :   void MarkRootSetInParallel(RootMarkingVisitor* root_visitor);
    1063             :   V8_INLINE void MarkRootObject(HeapObject obj);
    1064             :   void ProcessMarkingWorklist() override;
    1065             :   void ClearNonLiveReferences() override;
    1066             : 
    1067             :   void EvacuatePrologue() override;
    1068             :   void EvacuateEpilogue() override;
    1069             :   void Evacuate() override;
    1070             :   void EvacuatePagesInParallel() override;
    1071             :   void UpdatePointersAfterEvacuation() override;
    1072             : 
    1073             :   UpdatingItem* CreateToSpaceUpdatingItem(MemoryChunk* chunk, Address start,
    1074             :                                           Address end) override;
    1075             :   UpdatingItem* CreateRememberedSetUpdatingItem(
    1076             :       MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
    1077             : 
    1078             :   int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
    1079             : 
    1080             :   int NumberOfParallelMarkingTasks(int pages);
    1081             : 
    1082             :   MarkingWorklist* worklist_;
    1083             : 
    1084             :   YoungGenerationMarkingVisitor* main_marking_visitor_;
    1085             :   base::Semaphore page_parallel_job_semaphore_;
    1086             :   std::vector<Page*> new_space_evacuation_pages_;
    1087             :   std::vector<Page*> sweep_to_iterate_pages_;
    1088             : 
    1089             :   MarkingState marking_state_;
    1090             :   NonAtomicMarkingState non_atomic_marking_state_;
    1091             : 
    1092             :   friend class YoungGenerationMarkingTask;
    1093             :   friend class YoungGenerationMarkingVisitor;
    1094             : };
    1095             : 
    1096             : #endif  // ENABLE_MINOR_MC
    1097             : 
    1098             : }  // namespace internal
    1099             : }  // namespace v8
    1100             : 
    1101             : #endif  // V8_HEAP_MARK_COMPACT_H_

Generated by: LCOV version 1.10