LCOV - code coverage report
Current view: top level - src/heap - mark-compact.h (source / functions) Hit Total Coverage
Test: app.info Lines: 98 118 83.1 %
Date: 2019-01-20 Functions: 47 73 64.4 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #ifndef V8_HEAP_MARK_COMPACT_H_
       6             : #define V8_HEAP_MARK_COMPACT_H_
       7             : 
       8             : #include <vector>
       9             : 
      10             : #include "src/heap/concurrent-marking.h"
      11             : #include "src/heap/marking.h"
      12             : #include "src/heap/objects-visiting.h"
      13             : #include "src/heap/spaces.h"
      14             : #include "src/heap/sweeper.h"
      15             : #include "src/heap/worklist.h"
      16             : #include "src/objects/heap-object.h"   // For Worklist<HeapObject, ...>
      17             : #include "src/objects/js-weak-refs.h"  // For Worklist<JSWeakCell, ...>
      18             : 
      19             : namespace v8 {
      20             : namespace internal {
      21             : 
      22             : // Forward declarations.
      23             : class EvacuationJobTraits;
      24             : class HeapObjectVisitor;
      25             : class ItemParallelJob;
      26             : class MigrationObserver;
      27             : class RecordMigratedSlotVisitor;
      28             : class UpdatingItem;
      29             : class YoungGenerationMarkingVisitor;
      30             : 
      31             : template <typename ConcreteState, AccessMode access_mode>
      32             : class MarkingStateBase {
      33             :  public:
      34             :   V8_INLINE MarkBit MarkBitFrom(HeapObject obj) {
      35  7590701810 :     return MarkBitFrom(MemoryChunk::FromHeapObject(obj), obj->ptr());
      36             :   }
      37             : 
      38             :   // {addr} may be tagged or aligned.
      39  8366344577 :   V8_INLINE MarkBit MarkBitFrom(MemoryChunk* p, Address addr) {
      40             :     return static_cast<ConcreteState*>(this)->bitmap(p)->MarkBitFromIndex(
      41 16734536510 :         p->AddressToMarkbitIndex(addr));
      42             :   }
      43             : 
      44           0 :   Marking::ObjectColor Color(HeapObject obj) {
      45           0 :     return Marking::Color(MarkBitFrom(obj));
      46             :   }
      47             : 
      48             :   V8_INLINE bool IsImpossible(HeapObject obj) {
      49             :     return Marking::IsImpossible<access_mode>(MarkBitFrom(obj));
      50             :   }
      51             : 
      52             :   V8_INLINE bool IsBlack(HeapObject obj) {
      53             :     return Marking::IsBlack<access_mode>(MarkBitFrom(obj));
      54             :   }
      55             : 
      56             :   V8_INLINE bool IsWhite(HeapObject obj) {
      57             :     return Marking::IsWhite<access_mode>(MarkBitFrom(obj));
      58             :   }
      59             : 
      60             :   V8_INLINE bool IsGrey(HeapObject obj) {
      61             :     return Marking::IsGrey<access_mode>(MarkBitFrom(obj));
      62             :   }
      63             : 
      64             :   V8_INLINE bool IsBlackOrGrey(HeapObject obj) {
      65             :     return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj));
      66             :   }
      67             : 
      68             :   V8_INLINE bool WhiteToGrey(HeapObject obj);
      69             :   V8_INLINE bool WhiteToBlack(HeapObject obj);
      70             :   V8_INLINE bool GreyToBlack(HeapObject obj);
      71             : 
      72      531180 :   void ClearLiveness(MemoryChunk* chunk) {
      73      531180 :     static_cast<ConcreteState*>(this)->bitmap(chunk)->Clear();
      74             :     static_cast<ConcreteState*>(this)->SetLiveBytes(chunk, 0);
      75      531181 :   }
      76             : };
      77             : 
      78             : class MarkBitCellIterator {
      79             :  public:
      80     1227812 :   MarkBitCellIterator(MemoryChunk* chunk, Bitmap* bitmap) : chunk_(chunk) {
      81             :     last_cell_index_ =
      82     2455624 :         Bitmap::IndexToCell(chunk_->AddressToMarkbitIndex(chunk_->area_end()));
      83     1227812 :     cell_base_ = chunk_->address();
      84             :     cell_index_ =
      85     1227812 :         Bitmap::IndexToCell(chunk_->AddressToMarkbitIndex(cell_base_));
      86     1227812 :     cells_ = bitmap->cells();
      87             :   }
      88             : 
      89             :   inline bool Done() { return cell_index_ >= last_cell_index_; }
      90             : 
      91             :   inline bool HasNext() { return cell_index_ < last_cell_index_ - 1; }
      92             : 
      93             :   inline MarkBit::CellType* CurrentCell() {
      94             :     DCHECK_EQ(cell_index_, Bitmap::IndexToCell(Bitmap::CellAlignIndex(
      95             :                                chunk_->AddressToMarkbitIndex(cell_base_))));
      96   755080195 :     return &cells_[cell_index_];
      97             :   }
      98             : 
      99             :   inline Address CurrentCellBase() {
     100             :     DCHECK_EQ(cell_index_, Bitmap::IndexToCell(Bitmap::CellAlignIndex(
     101             :                                chunk_->AddressToMarkbitIndex(cell_base_))));
     102             :     return cell_base_;
     103             :   }
     104             : 
     105             :   V8_WARN_UNUSED_RESULT inline bool Advance() {
     106   640095084 :     cell_base_ += Bitmap::kBitsPerCell * kTaggedSize;
     107   640095084 :     return ++cell_index_ != last_cell_index_;
     108             :   }
     109             : 
     110             :   inline bool Advance(unsigned int new_cell_index) {
     111   733864860 :     if (new_cell_index != cell_index_) {
     112             :       DCHECK_GT(new_cell_index, cell_index_);
     113             :       DCHECK_LE(new_cell_index, last_cell_index_);
     114   111290080 :       unsigned int diff = new_cell_index - cell_index_;
     115   111290080 :       cell_index_ = new_cell_index;
     116   111290080 :       cell_base_ += diff * (Bitmap::kBitsPerCell * kTaggedSize);
     117             :       return true;
     118             :     }
     119             :     return false;
     120             :   }
     121             : 
     122             :   // Return the next mark bit cell. If there is no next it returns 0;
     123             :   inline MarkBit::CellType PeekNext() {
     124             :     if (HasNext()) {
     125             :       return cells_[cell_index_ + 1];
     126             :     }
     127             :     return 0;
     128             :   }
     129             : 
     130             :  private:
     131             :   MemoryChunk* chunk_;
     132             :   MarkBit::CellType* cells_;
     133             :   unsigned int last_cell_index_;
     134             :   unsigned int cell_index_;
     135             :   Address cell_base_;
     136             : };
     137             : 
     138             : enum LiveObjectIterationMode {
     139             :   kBlackObjects,
     140             :   kGreyObjects,
     141             :   kAllLiveObjects
     142             : };
     143             : 
     144             : template <LiveObjectIterationMode mode>
     145             : class LiveObjectRange {
     146             :  public:
     147             :   class iterator {
     148             :    public:
     149             :     using value_type = std::pair<HeapObject, int /* size */>;
     150             :     using pointer = const value_type*;
     151             :     using reference = const value_type&;
     152             :     using iterator_category = std::forward_iterator_tag;
     153             : 
     154             :     inline iterator(MemoryChunk* chunk, Bitmap* bitmap, Address start);
     155             : 
     156             :     inline iterator& operator++();
     157             :     inline iterator operator++(int);
     158             : 
     159             :     bool operator==(iterator other) const {
     160             :       return current_object_ == other.current_object_;
     161             :     }
     162             : 
     163             :     bool operator!=(iterator other) const { return !(*this == other); }
     164             : 
     165             :     value_type operator*() {
     166    80553756 :       return std::make_pair(current_object_, current_size_);
     167             :     }
     168             : 
     169             :    private:
     170             :     inline void AdvanceToNextValidObject();
     171             : 
     172             :     MemoryChunk* const chunk_;
     173             :     Map const one_word_filler_map_;
     174             :     Map const two_word_filler_map_;
     175             :     Map const free_space_map_;
     176             :     MarkBitCellIterator it_;
     177             :     Address cell_base_;
     178             :     MarkBit::CellType current_cell_;
     179             :     HeapObject current_object_;
     180             :     int current_size_;
     181             :   };
     182             : 
     183      614328 :   LiveObjectRange(MemoryChunk* chunk, Bitmap* bitmap)
     184             :       : chunk_(chunk),
     185             :         bitmap_(bitmap),
     186      614328 :         start_(chunk_->area_start()),
     187             :         end_(chunk->area_end()) {}
     188             : 
     189             :   inline iterator begin();
     190             :   inline iterator end();
     191             : 
     192             :  private:
     193             :   MemoryChunk* const chunk_;
     194             :   Bitmap* bitmap_;
     195             :   Address start_;
     196             :   Address end_;
     197             : };
     198             : 
     199             : class LiveObjectVisitor : AllStatic {
     200             :  public:
     201             :   enum IterationMode {
     202             :     kKeepMarking,
     203             :     kClearMarkbits,
     204             :   };
     205             : 
     206             :   // Visits black objects on a MemoryChunk until the Visitor returns |false| for
     207             :   // an object. If IterationMode::kClearMarkbits is passed the markbits and
     208             :   // slots for visited objects are cleared for each successfully visited object.
     209             :   template <class Visitor, typename MarkingState>
     210       10399 :   static bool VisitBlackObjects(MemoryChunk* chunk, MarkingState* state,
     211             :                                 Visitor* visitor, IterationMode iteration_mode,
     212             :                                 HeapObject* failed_object);
     213             : 
     214             :   // Visits black objects on a MemoryChunk. The visitor is not allowed to fail
     215             :   // visitation for an object.
     216             :   template <class Visitor, typename MarkingState>
     217       76882 :   static void VisitBlackObjectsNoFail(MemoryChunk* chunk, MarkingState* state,
     218             :                                       Visitor* visitor,
     219             :                                       IterationMode iteration_mode);
     220             : 
     221             :   // Visits black objects on a MemoryChunk. The visitor is not allowed to fail
     222             :   // visitation for an object.
     223             :   template <class Visitor, typename MarkingState>
     224             :   static void VisitGreyObjectsNoFail(MemoryChunk* chunk, MarkingState* state,
     225             :                                      Visitor* visitor,
     226             :                                      IterationMode iteration_mode);
     227             : 
     228             :   template <typename MarkingState>
     229             :   static void RecomputeLiveBytes(MemoryChunk* chunk, MarkingState* state);
     230             : };
     231             : 
     232             : enum PageEvacuationMode { NEW_TO_NEW, NEW_TO_OLD };
     233             : enum MarkingTreatmentMode { KEEP, CLEAR };
     234             : enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY };
     235             : 
     236             : // Base class for minor and full MC collectors.
     237             : class MarkCompactCollectorBase {
     238             :  public:
     239             :   static const int kMainThread = 0;
     240             : 
     241      125736 :   virtual ~MarkCompactCollectorBase() = default;
     242             : 
     243             :   virtual void SetUp() = 0;
     244             :   virtual void TearDown() = 0;
     245             :   virtual void CollectGarbage() = 0;
     246             : 
     247    83597442 :   inline Heap* heap() const { return heap_; }
     248             :   inline Isolate* isolate();
     249             : 
     250             :  protected:
     251             :   explicit MarkCompactCollectorBase(Heap* heap)
     252      125766 :       : heap_(heap), old_to_new_slots_(0) {}
     253             : 
     254             :   // Marking operations for objects reachable from roots.
     255             :   virtual void MarkLiveObjects() = 0;
     256             :   // Mark objects reachable (transitively) from objects in the marking
     257             :   // work list.
     258             :   virtual void ProcessMarkingWorklist() = 0;
     259             :   // Clear non-live references held in side data structures.
     260             :   virtual void ClearNonLiveReferences() = 0;
     261             :   virtual void EvacuatePrologue() = 0;
     262             :   virtual void EvacuateEpilogue() = 0;
     263             :   virtual void Evacuate() = 0;
     264             :   virtual void EvacuatePagesInParallel() = 0;
     265             :   virtual void UpdatePointersAfterEvacuation() = 0;
     266             :   virtual UpdatingItem* CreateToSpaceUpdatingItem(MemoryChunk* chunk,
     267             :                                                   Address start,
     268             :                                                   Address end) = 0;
     269             :   virtual UpdatingItem* CreateRememberedSetUpdatingItem(
     270             :       MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) = 0;
     271             : 
     272             :   template <class Evacuator, class Collector>
     273             :   void CreateAndExecuteEvacuationTasks(
     274             :       Collector* collector, ItemParallelJob* job,
     275             :       RecordMigratedSlotVisitor* record_visitor,
     276             :       MigrationObserver* migration_observer, const intptr_t live_bytes);
     277             : 
     278             :   // Returns whether this page should be moved according to heuristics.
     279             :   bool ShouldMovePage(Page* p, intptr_t live_bytes);
     280             : 
     281             :   int CollectToSpaceUpdatingItems(ItemParallelJob* job);
     282             :   template <typename IterateableSpace>
     283             :   int CollectRememberedSetUpdatingItems(ItemParallelJob* job,
     284             :                                         IterateableSpace* space,
     285             :                                         RememberedSetUpdatingMode mode);
     286             : 
     287             :   int NumberOfParallelCompactionTasks(int pages);
     288             :   int NumberOfParallelPointerUpdateTasks(int pages, int slots);
     289             :   int NumberOfParallelToSpacePointerUpdateTasks(int pages);
     290             : 
     291             :   Heap* heap_;
     292             :   // Number of old to new slots. Should be computed during MarkLiveObjects.
     293             :   // -1 indicates that the value couldn't be computed.
     294             :   int old_to_new_slots_;
     295             : };
     296             : 
     297             : class MinorMarkingState final
     298             :     : public MarkingStateBase<MinorMarkingState, AccessMode::ATOMIC> {
     299             :  public:
     300           0 :   Bitmap* bitmap(const MemoryChunk* chunk) const {
     301           0 :     return chunk->young_generation_bitmap_;
     302             :   }
     303             : 
     304             :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     305             :     chunk->young_generation_live_byte_count_ += by;
     306             :   }
     307             : 
     308             :   intptr_t live_bytes(MemoryChunk* chunk) const {
     309             :     return chunk->young_generation_live_byte_count_;
     310             :   }
     311             : 
     312             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
     313             :     chunk->young_generation_live_byte_count_ = value;
     314             :   }
     315             : };
     316             : 
     317             : class MinorNonAtomicMarkingState final
     318             :     : public MarkingStateBase<MinorNonAtomicMarkingState,
     319             :                               AccessMode::NON_ATOMIC> {
     320             :  public:
     321           0 :   Bitmap* bitmap(const MemoryChunk* chunk) const {
     322           0 :     return chunk->young_generation_bitmap_;
     323             :   }
     324             : 
     325             :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     326             :     chunk->young_generation_live_byte_count_ += by;
     327             :   }
     328             : 
     329             :   intptr_t live_bytes(MemoryChunk* chunk) const {
     330             :     return chunk->young_generation_live_byte_count_;
     331             :   }
     332             : 
     333             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
     334             :     chunk->young_generation_live_byte_count_ = value;
     335             :   }
     336             : };
     337             : 
     338             : // This marking state is used when concurrent marking is running.
     339             : class IncrementalMarkingState final
     340             :     : public MarkingStateBase<IncrementalMarkingState, AccessMode::ATOMIC> {
     341             :  public:
     342  4304315338 :   Bitmap* bitmap(const MemoryChunk* chunk) const {
     343             :     DCHECK_EQ(reinterpret_cast<intptr_t>(&chunk->marking_bitmap_) -
     344             :                   reinterpret_cast<intptr_t>(chunk),
     345             :               MemoryChunk::kMarkBitmapOffset);
     346  4304315338 :     return chunk->marking_bitmap_;
     347             :   }
     348             : 
     349             :   // Concurrent marking uses local live bytes.
     350   223759777 :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     351             :     chunk->live_byte_count_ += by;
     352   223759777 :   }
     353             : 
     354             :   intptr_t live_bytes(MemoryChunk* chunk) const {
     355             :     return chunk->live_byte_count_;
     356             :   }
     357             : 
     358             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
     359             :     chunk->live_byte_count_ = value;
     360             :   }
     361             : };
     362             : 
     363             : class MajorAtomicMarkingState final
     364             :     : public MarkingStateBase<MajorAtomicMarkingState, AccessMode::ATOMIC> {
     365             :  public:
     366    22661600 :   Bitmap* bitmap(const MemoryChunk* chunk) const {
     367             :     DCHECK_EQ(reinterpret_cast<intptr_t>(&chunk->marking_bitmap_) -
     368             :                   reinterpret_cast<intptr_t>(chunk),
     369             :               MemoryChunk::kMarkBitmapOffset);
     370    22661600 :     return chunk->marking_bitmap_;
     371             :   }
     372             : 
     373     3437609 :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     374             :     chunk->live_byte_count_ += by;
     375     3437609 :   }
     376             : 
     377             :   intptr_t live_bytes(MemoryChunk* chunk) const {
     378             :     return chunk->live_byte_count_;
     379             :   }
     380             : 
     381             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
     382             :     chunk->live_byte_count_ = value;
     383             :   }
     384             : };
     385             : 
     386             : class MajorNonAtomicMarkingState final
     387             :     : public MarkingStateBase<MajorNonAtomicMarkingState,
     388             :                               AccessMode::NON_ATOMIC> {
     389             :  public:
     390   273929587 :   Bitmap* bitmap(const MemoryChunk* chunk) const {
     391             :     DCHECK_EQ(reinterpret_cast<intptr_t>(&chunk->marking_bitmap_) -
     392             :                   reinterpret_cast<intptr_t>(chunk),
     393             :               MemoryChunk::kMarkBitmapOffset);
     394   273929587 :     return chunk->marking_bitmap_;
     395             :   }
     396             : 
     397       80294 :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     398             :     chunk->live_byte_count_ += by;
     399       80294 :   }
     400             : 
     401             :   intptr_t live_bytes(MemoryChunk* chunk) const {
     402             :     return chunk->live_byte_count_;
     403             :   }
     404             : 
     405             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
     406             :     chunk->live_byte_count_ = value;
     407             :   }
     408             : };
     409             : 
     410   193208027 : struct Ephemeron {
     411             :   HeapObject key;
     412             :   HeapObject value;
     413             : };
     414             : 
     415             : typedef Worklist<Ephemeron, 64> EphemeronWorklist;
     416             : 
     417             : // Weak objects encountered during marking.
     418      754760 : struct WeakObjects {
     419             :   Worklist<TransitionArray, 64> transition_arrays;
     420             : 
     421             :   // Keep track of all EphemeronHashTables in the heap to process
     422             :   // them in the atomic pause.
     423             :   Worklist<EphemeronHashTable, 64> ephemeron_hash_tables;
     424             : 
     425             :   // Keep track of all ephemerons for concurrent marking tasks. Only store
     426             :   // ephemerons in these Worklists if both key and value are unreachable at the
     427             :   // moment.
     428             :   //
     429             :   // MarkCompactCollector::ProcessEphemeronsUntilFixpoint drains and fills these
     430             :   // worklists.
     431             :   //
     432             :   // current_ephemerons is used as draining worklist in the current fixpoint
     433             :   // iteration.
     434             :   EphemeronWorklist current_ephemerons;
     435             : 
     436             :   // Stores ephemerons to visit in the next fixpoint iteration.
     437             :   EphemeronWorklist next_ephemerons;
     438             : 
     439             :   // When draining the marking worklist new discovered ephemerons are pushed
     440             :   // into this worklist.
     441             :   EphemeronWorklist discovered_ephemerons;
     442             : 
     443             :   // TODO(marja): For old space, we only need the slot, not the host
     444             :   // object. Optimize this by adding a different storage for old space.
     445             :   Worklist<std::pair<HeapObject, HeapObjectSlot>, 64> weak_references;
     446             :   Worklist<std::pair<HeapObject, Code>, 64> weak_objects_in_code;
     447             : 
     448             :   Worklist<JSWeakRef, 64> js_weak_refs;
     449             :   Worklist<JSWeakCell, 64> js_weak_cells;
     450             : 
     451             :   Worklist<SharedFunctionInfo, 64> bytecode_flushing_candidates;
     452             : };
     453             : 
     454             : struct EphemeronMarking {
     455             :   std::vector<HeapObject> newly_discovered;
     456             :   bool newly_discovered_overflowed;
     457             :   size_t newly_discovered_limit;
     458             : };
     459             : 
     460             : // Collector for young and old generation.
     461             : class MarkCompactCollector final : public MarkCompactCollectorBase {
     462             :  public:
     463             : #ifdef V8_CONCURRENT_MARKING
     464             :   using MarkingState = IncrementalMarkingState;
     465             : #else
     466             :   using MarkingState = MajorNonAtomicMarkingState;
     467             : #endif  // V8_CONCURRENT_MARKING
     468             : 
     469             :   using NonAtomicMarkingState = MajorNonAtomicMarkingState;
     470             : 
     471             :   // Wrapper for the shared worklist.
     472       62867 :   class MarkingWorklist {
     473             :    public:
     474             :     using ConcurrentMarkingWorklist = Worklist<HeapObject, 64>;
     475             :     using EmbedderTracingWorklist = Worklist<HeapObject, 16>;
     476             : 
     477             :     // The heap parameter is not used but needed to match the sequential case.
     478      251532 :     explicit MarkingWorklist(Heap* heap) {}
     479             : 
     480   296841969 :     void Push(HeapObject object) {
     481   389962511 :       bool success = shared_.Push(kMainThread, object);
     482             :       USE(success);
     483             :       DCHECK(success);
     484   296841969 :     }
     485             : 
     486   219865382 :     HeapObject Pop() {
     487   219865382 :       HeapObject result;
     488   219865382 :       if (shared_.Pop(kMainThread, &result)) return result;
     489             : #ifdef V8_CONCURRENT_MARKING
     490             :       // The expectation is that this work list is empty almost all the time
     491             :       // and we can thus avoid the emptiness checks by putting it last.
     492     7620151 :       if (on_hold_.Pop(kMainThread, &result)) return result;
     493             : #endif
     494     1860504 :       return HeapObject();
     495             :     }
     496             : 
     497         536 :     void Clear() {
     498         536 :       shared_.Clear();
     499         536 :       on_hold_.Clear();
     500         536 :       embedder_.Clear();
     501         536 :     }
     502             : 
     503     3123210 :     bool IsEmpty() {
     504     3072185 :       return shared_.IsLocalEmpty(kMainThread) &&
     505     3072134 :              on_hold_.IsLocalEmpty(kMainThread) &&
     506     6194630 :              shared_.IsGlobalPoolEmpty() && on_hold_.IsGlobalPoolEmpty();
     507             :     }
     508             : 
     509      250476 :     bool IsEmbedderEmpty() {
     510      500952 :       return embedder_.IsLocalEmpty(kMainThread) &&
     511      250476 :              embedder_.IsGlobalPoolEmpty();
     512             :     }
     513             : 
     514             :     int Size() {
     515             :       return static_cast<int>(shared_.LocalSize(kMainThread) +
     516             :                               on_hold_.LocalSize(kMainThread));
     517             :     }
     518             : 
     519             :     // Calls the specified callback on each element of the deques and replaces
     520             :     // the element with the result of the callback. If the callback returns
     521             :     // nullptr then the element is removed from the deque.
     522             :     // The callback must accept HeapObject and return HeapObject.
     523             :     template <typename Callback>
     524         636 :     void Update(Callback callback) {
     525         636 :       shared_.Update(callback);
     526         636 :       on_hold_.Update(callback);
     527         636 :       embedder_.Update(callback);
     528         636 :     }
     529             : 
     530             :     ConcurrentMarkingWorklist* shared() { return &shared_; }
     531             :     ConcurrentMarkingWorklist* on_hold() { return &on_hold_; }
     532          30 :     EmbedderTracingWorklist* embedder() { return &embedder_; }
     533             : 
     534             :     void Print() {
     535             :       PrintWorklist("shared", &shared_);
     536             :       PrintWorklist("on_hold", &on_hold_);
     537             :     }
     538             : 
     539             :    private:
     540             :     // Prints the stats about the global pool of the worklist.
     541             :     void PrintWorklist(const char* worklist_name,
     542             :                        ConcurrentMarkingWorklist* worklist);
     543             : 
     544             :     // Worklist used for most objects.
     545             :     ConcurrentMarkingWorklist shared_;
     546             : 
     547             :     // Concurrent marking uses this worklist to bail out of marking objects
     548             :     // in new space's linear allocation area. Used to avoid black allocation
     549             :     // for new space. This allow the compiler to remove write barriers
     550             :     // for freshly allocatd objects.
     551             :     ConcurrentMarkingWorklist on_hold_;
     552             : 
     553             :     // Worklist for objects that potentially require embedder tracing, i.e.,
     554             :     // these objects need to be handed over to the embedder to find the full
     555             :     // transitive closure.
     556             :     EmbedderTracingWorklist embedder_;
     557             :   };
     558             : 
     559             :   class RootMarkingVisitor;
     560             :   class CustomRootBodyMarkingVisitor;
     561             : 
     562             :   enum IterationMode {
     563             :     kKeepMarking,
     564             :     kClearMarkbits,
     565             :   };
     566             : 
     567             :   MarkingState* marking_state() { return &marking_state_; }
     568             : 
     569             :   NonAtomicMarkingState* non_atomic_marking_state() {
     570             :     return &non_atomic_marking_state_;
     571             :   }
     572             : 
     573             :   void SetUp() override;
     574             :   void TearDown() override;
     575             :   // Performs a global garbage collection.
     576             :   void CollectGarbage() override;
     577             : 
     578             :   void CollectEvacuationCandidates(PagedSpace* space);
     579             : 
     580             :   void AddEvacuationCandidate(Page* p);
     581             : 
     582             :   // Prepares for GC by resetting relocation info in old and map spaces and
     583             :   // choosing spaces to compact.
     584             :   void Prepare();
     585             : 
     586             :   // Stop concurrent marking (either by preempting it right away or waiting for
     587             :   // it to complete as requested by |stop_request|).
     588             :   void FinishConcurrentMarking(ConcurrentMarking::StopRequest stop_request);
     589             : 
     590             :   bool StartCompaction();
     591             : 
     592             :   void AbortCompaction();
     593             : 
     594     1385341 :   static inline bool IsOnEvacuationCandidate(Object obj) {
     595     1385341 :     return Page::FromAddress(obj->ptr())->IsEvacuationCandidate();
     596             :   }
     597             : 
     598             :   static bool IsOnEvacuationCandidate(MaybeObject obj);
     599             : 
     600             :   struct RecordRelocSlotInfo {
     601             :     MemoryChunk* memory_chunk;
     602             :     SlotType slot_type;
     603             :     bool should_record;
     604             :     uint32_t offset;
     605             :   };
     606             :   static RecordRelocSlotInfo PrepareRecordRelocSlot(Code host, RelocInfo* rinfo,
     607             :                                                     HeapObject target);
     608             :   static void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target);
     609             :   V8_INLINE static void RecordSlot(HeapObject object, ObjectSlot slot,
     610             :                                    HeapObject target);
     611             :   V8_INLINE static void RecordSlot(HeapObject object, HeapObjectSlot slot,
     612             :                                    HeapObject target);
     613             :   void RecordLiveSlotsOnPage(Page* page);
     614             : 
     615             :   void UpdateSlots(SlotsBuffer* buffer);
     616             :   void UpdateSlotsRecordedIn(SlotsBuffer* buffer);
     617             : 
     618             :   bool is_compacting() const { return compacting_; }
     619             : 
     620             :   // Ensures that sweeping is finished.
     621             :   //
     622             :   // Note: Can only be called safely from main thread.
     623             :   void EnsureSweepingCompleted();
     624             : 
     625             :   // Checks if sweeping is in progress right now on any space.
     626      603116 :   bool sweeping_in_progress() const { return sweeper_->sweeping_in_progress(); }
     627             : 
     628      166984 :   void set_evacuation(bool evacuation) { evacuation_ = evacuation; }
     629             : 
     630             :   bool evacuation() const { return evacuation_; }
     631             : 
     632   158606589 :   MarkingWorklist* marking_worklist() { return &marking_worklist_; }
     633             : 
     634             :   WeakObjects* weak_objects() { return &weak_objects_; }
     635             : 
     636             :   inline void AddTransitionArray(TransitionArray array);
     637             : 
     638       16219 :   void AddEphemeronHashTable(EphemeronHashTable table) {
     639       16219 :     weak_objects_.ephemeron_hash_tables.Push(kMainThread, table);
     640       16219 :   }
     641             : 
     642          85 :   void AddEphemeron(HeapObject key, HeapObject value) {
     643             :     weak_objects_.discovered_ephemerons.Push(kMainThread,
     644          85 :                                              Ephemeron{key, value});
     645          85 :   }
     646             : 
     647    10519980 :   void AddWeakReference(HeapObject host, HeapObjectSlot slot) {
     648    10519980 :     weak_objects_.weak_references.Push(kMainThread, std::make_pair(host, slot));
     649    10519980 :   }
     650             : 
     651       68694 :   void AddWeakObjectInCode(HeapObject object, Code code) {
     652             :     weak_objects_.weak_objects_in_code.Push(kMainThread,
     653       68694 :                                             std::make_pair(object, code));
     654       68694 :   }
     655             : 
     656          95 :   void AddWeakRef(JSWeakRef weak_ref) {
     657          95 :     weak_objects_.js_weak_refs.Push(kMainThread, weak_ref);
     658          95 :   }
     659             : 
     660         116 :   void AddWeakCell(JSWeakCell weak_cell) {
     661         116 :     weak_objects_.js_weak_cells.Push(kMainThread, weak_cell);
     662         116 :   }
     663             : 
     664             :   inline void AddBytecodeFlushingCandidate(SharedFunctionInfo flush_candidate);
     665             : 
     666           0 :   void AddNewlyDiscovered(HeapObject object) {
     667           0 :     if (ephemeron_marking_.newly_discovered_overflowed) return;
     668             : 
     669           0 :     if (ephemeron_marking_.newly_discovered.size() <
     670             :         ephemeron_marking_.newly_discovered_limit) {
     671           0 :       ephemeron_marking_.newly_discovered.push_back(object);
     672             :     } else {
     673           0 :       ephemeron_marking_.newly_discovered_overflowed = true;
     674             :     }
     675             :   }
     676             : 
     677             :   void ResetNewlyDiscovered() {
     678           0 :     ephemeron_marking_.newly_discovered_overflowed = false;
     679             :     ephemeron_marking_.newly_discovered.clear();
     680             :   }
     681             : 
     682             :   Sweeper* sweeper() { return sweeper_; }
     683             : 
     684             : #ifdef DEBUG
     685             :   // Checks whether performing mark-compact collection.
     686             :   bool in_use() { return state_ > PREPARE_GC; }
     687             :   bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; }
     688             : #endif
     689             : 
     690             :   void VerifyMarking();
     691             : #ifdef VERIFY_HEAP
     692             :   void VerifyValidStoreAndSlotsBufferEntries();
     693             :   void VerifyMarkbitsAreClean();
     694             :   void VerifyMarkbitsAreDirty(PagedSpace* space);
     695             :   void VerifyMarkbitsAreClean(PagedSpace* space);
     696             :   void VerifyMarkbitsAreClean(NewSpace* space);
     697             :   void VerifyMarkbitsAreClean(LargeObjectSpace* space);
     698             : #endif
     699             : 
     700    83597443 :   unsigned epoch() const { return epoch_; }
     701             : 
     702             :  private:
     703             :   explicit MarkCompactCollector(Heap* heap);
     704             :   ~MarkCompactCollector() override;
     705             : 
     706             :   void ComputeEvacuationHeuristics(size_t area_size,
     707             :                                    int* target_fragmentation_percent,
     708             :                                    size_t* max_evacuated_bytes);
     709             : 
     710             :   void RecordObjectStats();
     711             : 
     712             :   // Finishes GC, performs heap verification if enabled.
     713             :   void Finish();
     714             : 
     715             :   void MarkLiveObjects() override;
     716             : 
     717             :   // Marks the object black and adds it to the marking work list.
     718             :   // This is for non-incremental marking only.
     719             :   V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
     720             : 
     721             :   // Marks the object black and adds it to the marking work list.
     722             :   // This is for non-incremental marking only.
     723             :   V8_INLINE void MarkRootObject(Root root, HeapObject obj);
     724             : 
     725             :   // Used by wrapper tracing.
     726             :   V8_INLINE void MarkExternallyReferencedObject(HeapObject obj);
     727             : 
     728             :   // Mark the heap roots and all objects reachable from them.
     729             :   void MarkRoots(RootVisitor* root_visitor,
     730             :                  ObjectVisitor* custom_root_body_visitor);
     731             : 
     732             :   // Mark the string table specially.  References to internalized strings from
     733             :   // the string table are weak.
     734             :   void MarkStringTable(ObjectVisitor* visitor);
     735             : 
     736             :   // Marks object reachable from harmony weak maps and wrapper tracing.
     737             :   void ProcessEphemeronMarking();
     738             : 
     739             :   // If the call-site of the top optimized code was not prepared for
     740             :   // deoptimization, then treat embedded pointers in the code as strong as
     741             :   // otherwise they can die and try to deoptimize the underlying code.
     742             :   void ProcessTopOptimizedFrame(ObjectVisitor* visitor);
     743             : 
     744             :   // Drains the main thread marking work list. Will mark all pending objects
     745             :   // if no concurrent threads are running.
     746             :   void ProcessMarkingWorklist() override;
     747             : 
     748             :   enum class MarkingWorklistProcessingMode {
     749             :     kDefault,
     750             :     kTrackNewlyDiscoveredObjects
     751             :   };
     752             : 
     753             :   template <MarkingWorklistProcessingMode mode>
     754             :   void ProcessMarkingWorklistInternal();
     755             : 
     756             :   // Implements ephemeron semantics: Marks value if key is already reachable.
     757             :   // Returns true if value was actually marked.
     758             :   bool VisitEphemeron(HeapObject key, HeapObject value);
     759             : 
     760             :   // Marks ephemerons and drains marking worklist iteratively
     761             :   // until a fixpoint is reached.
     762             :   void ProcessEphemeronsUntilFixpoint();
     763             : 
     764             :   // Drains ephemeron and marking worklists. Single iteration of the
     765             :   // fixpoint iteration.
     766             :   bool ProcessEphemerons();
     767             : 
     768             :   // Mark ephemerons and drain marking worklist with a linear algorithm.
     769             :   // Only used if fixpoint iteration doesn't finish within a few iterations.
     770             :   void ProcessEphemeronsLinear();
     771             : 
     772             :   // Perform Wrapper Tracing if in use.
     773             :   void PerformWrapperTracing();
     774             : 
     775             :   // Callback function for telling whether the object *p is an unmarked
     776             :   // heap object.
     777             :   static bool IsUnmarkedHeapObject(Heap* heap, FullObjectSlot p);
     778             : 
     779             :   // Clear non-live references in weak cells, transition and descriptor arrays,
     780             :   // and deoptimize dependent code of non-live maps.
     781             :   void ClearNonLiveReferences() override;
     782             :   void MarkDependentCodeForDeoptimization();
     783             :   // Checks if the given weak cell is a simple transition from the parent map
     784             :   // of the given dead target. If so it clears the transition and trims
     785             :   // the descriptor array of the parent if needed.
     786             :   void ClearPotentialSimpleMapTransition(Map dead_target);
     787             :   void ClearPotentialSimpleMapTransition(Map map, Map dead_target);
     788             : 
     789             :   // Flushes a weakly held bytecode array from a shared function info.
     790             :   void FlushBytecodeFromSFI(SharedFunctionInfo shared_info);
     791             : 
     792             :   // Clears bytecode arrays that have not been executed for multiple
     793             :   // collections.
     794             :   void ClearOldBytecodeCandidates();
     795             : 
     796             :   // Compact every array in the global list of transition arrays and
     797             :   // trim the corresponding descriptor array if a transition target is non-live.
     798             :   void ClearFullMapTransitions();
     799             :   void TrimDescriptorArray(Map map, DescriptorArray descriptors);
     800             :   void TrimEnumCache(Map map, DescriptorArray descriptors);
     801             :   bool CompactTransitionArray(Map map, TransitionArray transitions,
     802             :                               DescriptorArray descriptors);
     803             : 
     804             :   // After all reachable objects have been marked those weak map entries
     805             :   // with an unreachable key are removed from all encountered weak maps.
     806             :   // The linked list of all encountered weak maps is destroyed.
     807             :   void ClearWeakCollections();
     808             : 
     809             :   // Goes through the list of encountered weak references and clears those with
     810             :   // dead values. If the value is a dead map and the parent map transitions to
     811             :   // the dead map via weak cell, then this function also clears the map
     812             :   // transition.
     813             :   void ClearWeakReferences();
     814             : 
     815             :   // Goes through the list of encountered JSWeakCells and clears those with dead
     816             :   // values.
     817             :   void ClearJSWeakCells();
     818             : 
     819             :   void AbortWeakObjects();
     820             : 
     821             :   // Starts sweeping of spaces by contributing on the main thread and setting
     822             :   // up other pages for sweeping. Does not start sweeper tasks.
     823             :   void StartSweepSpaces();
     824             :   void StartSweepSpace(PagedSpace* space);
     825             : 
     826             :   void EvacuatePrologue() override;
     827             :   void EvacuateEpilogue() override;
     828             :   void Evacuate() override;
     829             :   void EvacuatePagesInParallel() override;
     830             :   void UpdatePointersAfterEvacuation() override;
     831             : 
     832             :   UpdatingItem* CreateToSpaceUpdatingItem(MemoryChunk* chunk, Address start,
     833             :                                           Address end) override;
     834             :   UpdatingItem* CreateRememberedSetUpdatingItem(
     835             :       MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
     836             : 
     837             :   int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
     838             :   int CollectOldSpaceArrayBufferTrackerItems(ItemParallelJob* job);
     839             : 
     840             :   void ReleaseEvacuationCandidates();
     841             :   void PostProcessEvacuationCandidates();
     842             :   void ReportAbortedEvacuationCandidate(HeapObject failed_object,
     843             :                                         MemoryChunk* chunk);
     844             : 
     845             :   static const int kEphemeronChunkSize = 8 * KB;
     846             : 
     847             :   int NumberOfParallelEphemeronVisitingTasks(size_t elements);
     848             : 
     849             :   void RightTrimDescriptorArray(DescriptorArray array, int descriptors_to_trim);
     850             : 
     851             :   base::Mutex mutex_;
     852             :   base::Semaphore page_parallel_job_semaphore_;
     853             : 
     854             : #ifdef DEBUG
     855             :   enum CollectorState {
     856             :     IDLE,
     857             :     PREPARE_GC,
     858             :     MARK_LIVE_OBJECTS,
     859             :     SWEEP_SPACES,
     860             :     ENCODE_FORWARDING_ADDRESSES,
     861             :     UPDATE_POINTERS,
     862             :     RELOCATE_OBJECTS
     863             :   };
     864             : 
     865             :   // The current stage of the collector.
     866             :   CollectorState state_;
     867             : #endif
     868             : 
     869             :   bool was_marked_incrementally_;
     870             : 
     871             :   bool evacuation_;
     872             : 
     873             :   // True if we are collecting slots to perform evacuation from evacuation
     874             :   // candidates.
     875             :   bool compacting_;
     876             : 
     877             :   bool black_allocation_;
     878             : 
     879             :   bool have_code_to_deoptimize_;
     880             : 
     881             :   MarkingWorklist marking_worklist_;
     882             :   WeakObjects weak_objects_;
     883             :   EphemeronMarking ephemeron_marking_;
     884             : 
     885             :   // Candidates for pages that should be evacuated.
     886             :   std::vector<Page*> evacuation_candidates_;
     887             :   // Pages that are actually processed during evacuation.
     888             :   std::vector<Page*> old_space_evacuation_pages_;
     889             :   std::vector<Page*> new_space_evacuation_pages_;
     890             :   std::vector<std::pair<HeapObject, Page*>> aborted_evacuation_candidates_;
     891             : 
     892             :   Sweeper* sweeper_;
     893             : 
     894             :   MarkingState marking_state_;
     895             :   NonAtomicMarkingState non_atomic_marking_state_;
     896             : 
     897             :   // Counts the number of mark-compact collections. This is used for marking
     898             :   // descriptor arrays. See NumberOfMarkedDescriptors. Only lower two bits are
     899             :   // used, so it is okay if this counter overflows and wraps around.
     900             :   unsigned epoch_ = 0;
     901             : 
     902             :   friend class EphemeronHashTableMarkingTask;
     903             :   friend class FullEvacuator;
     904             :   friend class Heap;
     905             :   friend class RecordMigratedSlotVisitor;
     906             : };
     907             : 
     908             : template <FixedArrayVisitationMode fixed_array_mode,
     909             :           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
     910   121873988 : class MarkingVisitor final
     911             :     : public HeapVisitor<
     912             :           int,
     913             :           MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>> {
     914             :  public:
     915             :   typedef HeapVisitor<
     916             :       int, MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>>
     917             :       Parent;
     918             : 
     919             :   V8_INLINE MarkingVisitor(MarkCompactCollector* collector,
     920             :                            MarkingState* marking_state);
     921             : 
     922             :   V8_INLINE bool ShouldVisitMapPointer() { return false; }
     923             : 
     924             :   V8_INLINE int VisitBytecodeArray(Map map, BytecodeArray object);
     925             :   V8_INLINE int VisitDescriptorArray(Map map, DescriptorArray object);
     926             :   V8_INLINE int VisitEphemeronHashTable(Map map, EphemeronHashTable object);
     927             :   V8_INLINE int VisitFixedArray(Map map, FixedArray object);
     928             :   V8_INLINE int VisitJSApiObject(Map map, JSObject object);
     929             :   V8_INLINE int VisitJSArrayBuffer(Map map, JSArrayBuffer object);
     930             :   V8_INLINE int VisitJSDataView(Map map, JSDataView object);
     931             :   V8_INLINE int VisitJSTypedArray(Map map, JSTypedArray object);
     932             :   V8_INLINE int VisitMap(Map map, Map object);
     933             :   V8_INLINE int VisitSharedFunctionInfo(Map map, SharedFunctionInfo object);
     934             :   V8_INLINE int VisitTransitionArray(Map map, TransitionArray object);
     935             :   V8_INLINE int VisitJSWeakCell(Map map, JSWeakCell object);
     936             :   V8_INLINE int VisitJSWeakRef(Map map, JSWeakRef object);
     937             : 
     938             :   // ObjectVisitor implementation.
     939           0 :   V8_INLINE void VisitPointer(HeapObject host, ObjectSlot p) final {
     940             :     VisitPointerImpl(host, p);
     941           0 :   }
     942           0 :   V8_INLINE void VisitPointer(HeapObject host, MaybeObjectSlot p) final {
     943             :     VisitPointerImpl(host, p);
     944           0 :   }
     945           0 :   V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
     946             :                                ObjectSlot end) final {
     947             :     VisitPointersImpl(host, start, end);
     948           0 :   }
     949           0 :   V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
     950             :                                MaybeObjectSlot end) final {
     951             :     VisitPointersImpl(host, start, end);
     952           0 :   }
     953             :   V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final;
     954             :   V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final;
     955             : 
     956             :   // Weak list pointers should be ignored during marking. The lists are
     957             :   // reconstructed after GC.
     958    19495844 :   void VisitCustomWeakPointers(HeapObject host, ObjectSlot start,
     959    19495844 :                                ObjectSlot end) final {}
     960             : 
     961             :   V8_INLINE void VisitDescriptors(DescriptorArray descriptors,
     962             :                                   int number_of_own_descriptors);
     963             :   // Marks the descriptor array black without pushing it on the marking work
     964             :   // list and visits its header.
     965             :   V8_INLINE void MarkDescriptorArrayBlack(HeapObject host,
     966             :                                           DescriptorArray descriptors);
     967             : 
     968             :  private:
     969             :   // Granularity in which FixedArrays are scanned if |fixed_array_mode|
     970             :   // is true.
     971             :   static const int kProgressBarScanningChunk = 32 * KB;
     972             : 
     973             :   template <typename TSlot>
     974             :   V8_INLINE void VisitPointerImpl(HeapObject host, TSlot p);
     975             : 
     976             :   template <typename TSlot>
     977             :   V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end);
     978             : 
     979             :   V8_INLINE int VisitFixedArrayIncremental(Map map, FixedArray object);
     980             : 
     981             :   template <typename T>
     982             :   V8_INLINE int VisitEmbedderTracingSubclass(Map map, T object);
     983             : 
     984             :   // Marks the object grey and pushes it on the marking work list.
     985             :   V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
     986             : 
     987  2439219345 :   MarkingState* marking_state() { return marking_state_; }
     988             : 
     989   138235410 :   MarkCompactCollector::MarkingWorklist* marking_worklist() const {
     990   138235410 :     return collector_->marking_worklist();
     991             :   }
     992             : 
     993             :   Heap* const heap_;
     994             :   MarkCompactCollector* const collector_;
     995             :   MarkingState* const marking_state_;
     996             :   const unsigned mark_compact_epoch_;
     997             : };
     998             : 
     999             : class EvacuationScope {
    1000             :  public:
    1001             :   explicit EvacuationScope(MarkCompactCollector* collector)
    1002             :       : collector_(collector) {
    1003             :     collector_->set_evacuation(true);
    1004             :   }
    1005             : 
    1006             :   ~EvacuationScope() { collector_->set_evacuation(false); }
    1007             : 
    1008             :  private:
    1009             :   MarkCompactCollector* collector_;
    1010             : };
    1011             : 
    1012             : #ifdef ENABLE_MINOR_MC
    1013             : 
    1014             : // Collector for young-generation only.
    1015             : class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
    1016             :  public:
    1017             :   using MarkingState = MinorMarkingState;
    1018             :   using NonAtomicMarkingState = MinorNonAtomicMarkingState;
    1019             : 
    1020             :   explicit MinorMarkCompactCollector(Heap* heap);
    1021             :   ~MinorMarkCompactCollector() override;
    1022             : 
    1023             :   MarkingState* marking_state() { return &marking_state_; }
    1024             : 
    1025             :   NonAtomicMarkingState* non_atomic_marking_state() {
    1026             :     return &non_atomic_marking_state_;
    1027             :   }
    1028             : 
    1029             :   void SetUp() override;
    1030             :   void TearDown() override;
    1031             :   void CollectGarbage() override;
    1032             : 
    1033             :   void MakeIterable(Page* page, MarkingTreatmentMode marking_mode,
    1034             :                     FreeSpaceTreatmentMode free_space_mode);
    1035             :   void CleanupSweepToIteratePages();
    1036             : 
    1037             :  private:
    1038             :   using MarkingWorklist = Worklist<HeapObject, 64 /* segment size */>;
    1039             :   class RootMarkingVisitor;
    1040             : 
    1041             :   static const int kNumMarkers = 8;
    1042             :   static const int kMainMarker = 0;
    1043             : 
    1044             :   inline MarkingWorklist* worklist() { return worklist_; }
    1045             : 
    1046             :   inline YoungGenerationMarkingVisitor* main_marking_visitor() {
    1047             :     return main_marking_visitor_;
    1048             :   }
    1049             : 
    1050             :   void MarkLiveObjects() override;
    1051             :   void MarkRootSetInParallel(RootMarkingVisitor* root_visitor);
    1052             :   V8_INLINE void MarkRootObject(HeapObject obj);
    1053             :   void ProcessMarkingWorklist() override;
    1054             :   void ClearNonLiveReferences() override;
    1055             : 
    1056             :   void EvacuatePrologue() override;
    1057             :   void EvacuateEpilogue() override;
    1058             :   void Evacuate() override;
    1059             :   void EvacuatePagesInParallel() override;
    1060             :   void UpdatePointersAfterEvacuation() override;
    1061             : 
    1062             :   UpdatingItem* CreateToSpaceUpdatingItem(MemoryChunk* chunk, Address start,
    1063             :                                           Address end) override;
    1064             :   UpdatingItem* CreateRememberedSetUpdatingItem(
    1065             :       MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
    1066             : 
    1067             :   int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
    1068             : 
    1069             :   int NumberOfParallelMarkingTasks(int pages);
    1070             : 
    1071             :   MarkingWorklist* worklist_;
    1072             : 
    1073             :   YoungGenerationMarkingVisitor* main_marking_visitor_;
    1074             :   base::Semaphore page_parallel_job_semaphore_;
    1075             :   std::vector<Page*> new_space_evacuation_pages_;
    1076             :   std::vector<Page*> sweep_to_iterate_pages_;
    1077             : 
    1078             :   MarkingState marking_state_;
    1079             :   NonAtomicMarkingState non_atomic_marking_state_;
    1080             : 
    1081             :   friend class YoungGenerationMarkingTask;
    1082             :   friend class YoungGenerationMarkingVisitor;
    1083             : };
    1084             : 
    1085             : #endif  // ENABLE_MINOR_MC
    1086             : 
    1087             : }  // namespace internal
    1088             : }  // namespace v8
    1089             : 
    1090             : #endif  // V8_HEAP_MARK_COMPACT_H_

Generated by: LCOV version 1.10