LCOV - code coverage report
Current view: top level - src/heap - mark-compact.h (source / functions) Hit Total Coverage
Test: app.info Lines: 100 121 82.6 %
Date: 2019-02-19 Functions: 45 71 63.4 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #ifndef V8_HEAP_MARK_COMPACT_H_
       6             : #define V8_HEAP_MARK_COMPACT_H_
       7             : 
       8             : #include <atomic>
       9             : #include <vector>
      10             : 
      11             : #include "src/heap/concurrent-marking.h"
      12             : #include "src/heap/marking.h"
      13             : #include "src/heap/objects-visiting.h"
      14             : #include "src/heap/spaces.h"
      15             : #include "src/heap/sweeper.h"
      16             : #include "src/heap/worklist.h"
      17             : #include "src/objects/heap-object.h"   // For Worklist<HeapObject, ...>
      18             : #include "src/objects/js-weak-refs.h"  // For Worklist<WeakCell, ...>
      19             : 
      20             : namespace v8 {
      21             : namespace internal {
      22             : 
      23             : // Forward declarations.
      24             : class EvacuationJobTraits;
      25             : class HeapObjectVisitor;
      26             : class ItemParallelJob;
      27             : class MigrationObserver;
      28             : class RecordMigratedSlotVisitor;
      29             : class UpdatingItem;
      30             : class YoungGenerationMarkingVisitor;
      31             : 
      32             : template <typename ConcreteState, AccessMode access_mode>
      33             : class MarkingStateBase {
      34             :  public:
      35             :   V8_INLINE MarkBit MarkBitFrom(HeapObject obj) {
      36  7486987019 :     return MarkBitFrom(MemoryChunk::FromHeapObject(obj), obj->ptr());
      37             :   }
      38             : 
      39             :   // {addr} may be tagged or aligned.
      40  8215136058 :   V8_INLINE MarkBit MarkBitFrom(MemoryChunk* p, Address addr) {
      41             :     return static_cast<ConcreteState*>(this)->bitmap(p)->MarkBitFromIndex(
      42 16431786370 :         p->AddressToMarkbitIndex(addr));
      43             :   }
      44             : 
      45           0 :   Marking::ObjectColor Color(HeapObject obj) {
      46           0 :     return Marking::Color(MarkBitFrom(obj));
      47             :   }
      48             : 
      49             :   V8_INLINE bool IsImpossible(HeapObject obj) {
      50             :     return Marking::IsImpossible<access_mode>(MarkBitFrom(obj));
      51             :   }
      52             : 
      53             :   V8_INLINE bool IsBlack(HeapObject obj) {
      54             :     return Marking::IsBlack<access_mode>(MarkBitFrom(obj));
      55             :   }
      56             : 
      57             :   V8_INLINE bool IsWhite(HeapObject obj) {
      58             :     return Marking::IsWhite<access_mode>(MarkBitFrom(obj));
      59             :   }
      60             : 
      61             :   V8_INLINE bool IsGrey(HeapObject obj) {
      62             :     return Marking::IsGrey<access_mode>(MarkBitFrom(obj));
      63             :   }
      64             : 
      65             :   V8_INLINE bool IsBlackOrGrey(HeapObject obj) {
      66             :     return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj));
      67             :   }
      68             : 
      69             :   V8_INLINE bool WhiteToGrey(HeapObject obj);
      70             :   V8_INLINE bool WhiteToBlack(HeapObject obj);
      71             :   V8_INLINE bool GreyToBlack(HeapObject obj);
      72             : 
      73      502441 :   void ClearLiveness(MemoryChunk* chunk) {
      74      502441 :     static_cast<ConcreteState*>(this)->bitmap(chunk)->Clear();
      75             :     static_cast<ConcreteState*>(this)->SetLiveBytes(chunk, 0);
      76           0 :   }
      77             : };
      78             : 
      79             : class MarkBitCellIterator {
      80             :  public:
      81     1120146 :   MarkBitCellIterator(MemoryChunk* chunk, Bitmap* bitmap) : chunk_(chunk) {
      82             :     last_cell_index_ =
      83     2240292 :         Bitmap::IndexToCell(chunk_->AddressToMarkbitIndex(chunk_->area_end()));
      84     1120146 :     cell_base_ = chunk_->address();
      85             :     cell_index_ =
      86     1120146 :         Bitmap::IndexToCell(chunk_->AddressToMarkbitIndex(cell_base_));
      87     1120146 :     cells_ = bitmap->cells();
      88             :   }
      89             : 
      90             :   inline bool Done() { return cell_index_ >= last_cell_index_; }
      91             : 
      92             :   inline bool HasNext() { return cell_index_ < last_cell_index_ - 1; }
      93             : 
      94             :   inline MarkBit::CellType* CurrentCell() {
      95             :     DCHECK_EQ(cell_index_, Bitmap::IndexToCell(Bitmap::CellAlignIndex(
      96             :                                chunk_->AddressToMarkbitIndex(cell_base_))));
      97   690702601 :     return &cells_[cell_index_];
      98             :   }
      99             : 
     100             :   inline Address CurrentCellBase() {
     101             :     DCHECK_EQ(cell_index_, Bitmap::IndexToCell(Bitmap::CellAlignIndex(
     102             :                                chunk_->AddressToMarkbitIndex(cell_base_))));
     103             :     return cell_base_;
     104             :   }
     105             : 
     106             :   V8_WARN_UNUSED_RESULT inline bool Advance() {
     107   587461481 :     cell_base_ += Bitmap::kBitsPerCell * kTaggedSize;
     108   587461481 :     return ++cell_index_ != last_cell_index_;
     109             :   }
     110             : 
     111             :   inline bool Advance(unsigned int new_cell_index) {
     112   665735485 :     if (new_cell_index != cell_index_) {
     113             :       DCHECK_GT(new_cell_index, cell_index_);
     114             :       DCHECK_LE(new_cell_index, last_cell_index_);
     115    99326653 :       unsigned int diff = new_cell_index - cell_index_;
     116    99326653 :       cell_index_ = new_cell_index;
     117    99326653 :       cell_base_ += diff * (Bitmap::kBitsPerCell * kTaggedSize);
     118             :       return true;
     119             :     }
     120             :     return false;
     121             :   }
     122             : 
     123             :   // Return the next mark bit cell. If there is no next it returns 0;
     124             :   inline MarkBit::CellType PeekNext() {
     125             :     if (HasNext()) {
     126             :       return cells_[cell_index_ + 1];
     127             :     }
     128             :     return 0;
     129             :   }
     130             : 
     131             :  private:
     132             :   MemoryChunk* chunk_;
     133             :   MarkBit::CellType* cells_;
     134             :   unsigned int last_cell_index_;
     135             :   unsigned int cell_index_;
     136             :   Address cell_base_;
     137             : };
     138             : 
     139             : enum LiveObjectIterationMode {
     140             :   kBlackObjects,
     141             :   kGreyObjects,
     142             :   kAllLiveObjects
     143             : };
     144             : 
     145             : template <LiveObjectIterationMode mode>
     146             : class LiveObjectRange {
     147             :  public:
     148             :   class iterator {
     149             :    public:
     150             :     using value_type = std::pair<HeapObject, int /* size */>;
     151             :     using pointer = const value_type*;
     152             :     using reference = const value_type&;
     153             :     using iterator_category = std::forward_iterator_tag;
     154             : 
     155             :     inline iterator(MemoryChunk* chunk, Bitmap* bitmap, Address start);
     156             : 
     157             :     inline iterator& operator++();
     158             :     inline iterator operator++(int);
     159             : 
     160             :     bool operator==(iterator other) const {
     161             :       return current_object_ == other.current_object_;
     162             :     }
     163             : 
     164             :     bool operator!=(iterator other) const { return !(*this == other); }
     165             : 
     166             :     value_type operator*() {
     167    81185465 :       return std::make_pair(current_object_, current_size_);
     168             :     }
     169             : 
     170             :    private:
     171             :     inline void AdvanceToNextValidObject();
     172             : 
     173             :     MemoryChunk* const chunk_;
     174             :     Map const one_word_filler_map_;
     175             :     Map const two_word_filler_map_;
     176             :     Map const free_space_map_;
     177             :     MarkBitCellIterator it_;
     178             :     Address cell_base_;
     179             :     MarkBit::CellType current_cell_;
     180             :     HeapObject current_object_;
     181             :     int current_size_;
     182             :   };
     183             : 
     184      560610 :   LiveObjectRange(MemoryChunk* chunk, Bitmap* bitmap)
     185             :       : chunk_(chunk),
     186             :         bitmap_(bitmap),
     187      560610 :         start_(chunk_->area_start()),
     188             :         end_(chunk->area_end()) {
     189             :     DCHECK(!chunk->IsLargePage());
     190             :   }
     191             : 
     192             :   inline iterator begin();
     193             :   inline iterator end();
     194             : 
     195             :  private:
     196             :   MemoryChunk* const chunk_;
     197             :   Bitmap* bitmap_;
     198             :   Address start_;
     199             :   Address end_;
     200             : };
     201             : 
     202             : class LiveObjectVisitor : AllStatic {
     203             :  public:
     204             :   enum IterationMode {
     205             :     kKeepMarking,
     206             :     kClearMarkbits,
     207             :   };
     208             : 
     209             :   // Visits black objects on a MemoryChunk until the Visitor returns |false| for
     210             :   // an object. If IterationMode::kClearMarkbits is passed the markbits and
     211             :   // slots for visited objects are cleared for each successfully visited object.
     212             :   template <class Visitor, typename MarkingState>
     213       10399 :   static bool VisitBlackObjects(MemoryChunk* chunk, MarkingState* state,
     214             :                                 Visitor* visitor, IterationMode iteration_mode,
     215             :                                 HeapObject* failed_object);
     216             : 
     217             :   // Visits black objects on a MemoryChunk. The visitor is not allowed to fail
     218             :   // visitation for an object.
     219             :   template <class Visitor, typename MarkingState>
     220      137060 :   static void VisitBlackObjectsNoFail(MemoryChunk* chunk, MarkingState* state,
     221             :                                       Visitor* visitor,
     222             :                                       IterationMode iteration_mode);
     223             : 
     224             :   // Visits black objects on a MemoryChunk. The visitor is not allowed to fail
     225             :   // visitation for an object.
     226             :   template <class Visitor, typename MarkingState>
     227             :   static void VisitGreyObjectsNoFail(MemoryChunk* chunk, MarkingState* state,
     228             :                                      Visitor* visitor,
     229             :                                      IterationMode iteration_mode);
     230             : 
     231             :   template <typename MarkingState>
     232             :   static void RecomputeLiveBytes(MemoryChunk* chunk, MarkingState* state);
     233             : };
     234             : 
     235             : enum PageEvacuationMode { NEW_TO_NEW, NEW_TO_OLD };
     236             : enum MarkingTreatmentMode { KEEP, CLEAR };
     237             : enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY };
     238             : 
     239             : // Base class for minor and full MC collectors.
     240             : class MarkCompactCollectorBase {
     241             :  public:
     242             :   static const int kMainThread = 0;
     243             : 
     244      122068 :   virtual ~MarkCompactCollectorBase() = default;
     245             : 
     246             :   virtual void SetUp() = 0;
     247             :   virtual void TearDown() = 0;
     248             :   virtual void CollectGarbage() = 0;
     249             : 
     250    76340925 :   inline Heap* heap() const { return heap_; }
     251             :   inline Isolate* isolate();
     252             : 
     253             :  protected:
     254             :   explicit MarkCompactCollectorBase(Heap* heap)
     255      122097 :       : heap_(heap), old_to_new_slots_(0) {}
     256             : 
     257             :   // Marking operations for objects reachable from roots.
     258             :   virtual void MarkLiveObjects() = 0;
     259             :   // Mark objects reachable (transitively) from objects in the marking
     260             :   // work list.
     261             :   virtual void ProcessMarkingWorklist() = 0;
     262             :   // Clear non-live references held in side data structures.
     263             :   virtual void ClearNonLiveReferences() = 0;
     264             :   virtual void EvacuatePrologue() = 0;
     265             :   virtual void EvacuateEpilogue() = 0;
     266             :   virtual void Evacuate() = 0;
     267             :   virtual void EvacuatePagesInParallel() = 0;
     268             :   virtual void UpdatePointersAfterEvacuation() = 0;
     269             :   virtual UpdatingItem* CreateToSpaceUpdatingItem(MemoryChunk* chunk,
     270             :                                                   Address start,
     271             :                                                   Address end) = 0;
     272             :   virtual UpdatingItem* CreateRememberedSetUpdatingItem(
     273             :       MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) = 0;
     274             : 
     275             :   template <class Evacuator, class Collector>
     276             :   void CreateAndExecuteEvacuationTasks(
     277             :       Collector* collector, ItemParallelJob* job,
     278             :       RecordMigratedSlotVisitor* record_visitor,
     279             :       MigrationObserver* migration_observer, const intptr_t live_bytes);
     280             : 
     281             :   // Returns whether this page should be moved according to heuristics.
     282             :   bool ShouldMovePage(Page* p, intptr_t live_bytes);
     283             : 
     284             :   int CollectToSpaceUpdatingItems(ItemParallelJob* job);
     285             :   template <typename IterateableSpace>
     286             :   int CollectRememberedSetUpdatingItems(ItemParallelJob* job,
     287             :                                         IterateableSpace* space,
     288             :                                         RememberedSetUpdatingMode mode);
     289             : 
     290             :   int NumberOfParallelCompactionTasks(int pages);
     291             :   int NumberOfParallelPointerUpdateTasks(int pages, int slots);
     292             :   int NumberOfParallelToSpacePointerUpdateTasks(int pages);
     293             : 
     294             :   Heap* heap_;
     295             :   // Number of old to new slots. Should be computed during MarkLiveObjects.
     296             :   // -1 indicates that the value couldn't be computed.
     297             :   int old_to_new_slots_;
     298             : };
     299             : 
     300             : class MinorMarkingState final
     301             :     : public MarkingStateBase<MinorMarkingState, AccessMode::ATOMIC> {
     302             :  public:
     303           0 :   Bitmap* bitmap(const MemoryChunk* chunk) const {
     304           0 :     return chunk->young_generation_bitmap_;
     305             :   }
     306             : 
     307             :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     308             :     chunk->young_generation_live_byte_count_ += by;
     309             :   }
     310             : 
     311             :   intptr_t live_bytes(MemoryChunk* chunk) const {
     312             :     return chunk->young_generation_live_byte_count_;
     313             :   }
     314             : 
     315             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
     316             :     chunk->young_generation_live_byte_count_ = value;
     317             :   }
     318             : };
     319             : 
     320             : class MinorNonAtomicMarkingState final
     321             :     : public MarkingStateBase<MinorNonAtomicMarkingState,
     322             :                               AccessMode::NON_ATOMIC> {
     323             :  public:
     324           0 :   Bitmap* bitmap(const MemoryChunk* chunk) const {
     325           0 :     return chunk->young_generation_bitmap_;
     326             :   }
     327             : 
     328             :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     329             :     chunk->young_generation_live_byte_count_.fetch_add(
     330             :         by, std::memory_order_relaxed);
     331             :   }
     332             : 
     333             :   intptr_t live_bytes(MemoryChunk* chunk) const {
     334             :     return chunk->young_generation_live_byte_count_.load(
     335             :         std::memory_order_relaxed);
     336             :   }
     337             : 
     338             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
     339             :     chunk->young_generation_live_byte_count_.store(value,
     340             :                                                    std::memory_order_relaxed);
     341             :   }
     342             : };
     343             : 
     344             : // This marking state is used when concurrent marking is running.
     345             : class IncrementalMarkingState final
     346             :     : public MarkingStateBase<IncrementalMarkingState, AccessMode::ATOMIC> {
     347             :  public:
     348  4373493741 :   Bitmap* bitmap(const MemoryChunk* chunk) const {
     349             :     DCHECK_EQ(reinterpret_cast<intptr_t>(&chunk->marking_bitmap_) -
     350             :                   reinterpret_cast<intptr_t>(chunk),
     351             :               MemoryChunk::kMarkBitmapOffset);
     352  4373493741 :     return chunk->marking_bitmap_;
     353             :   }
     354             : 
     355             :   // Concurrent marking uses local live bytes so we may do these accesses
     356             :   // non-atomically.
     357   205585856 :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     358   205879113 :     chunk->live_byte_count_ += by;
     359   205585856 :   }
     360             : 
     361             :   intptr_t live_bytes(MemoryChunk* chunk) const {
     362             :     return chunk->live_byte_count_;
     363             :   }
     364             : 
     365             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
     366             :     chunk->live_byte_count_ = value;
     367             :   }
     368             : };
     369             : 
     370             : class MajorAtomicMarkingState final
     371             :     : public MarkingStateBase<MajorAtomicMarkingState, AccessMode::ATOMIC> {
     372             :  public:
     373    35283238 :   Bitmap* bitmap(const MemoryChunk* chunk) const {
     374             :     DCHECK_EQ(reinterpret_cast<intptr_t>(&chunk->marking_bitmap_) -
     375             :                   reinterpret_cast<intptr_t>(chunk),
     376             :               MemoryChunk::kMarkBitmapOffset);
     377    35283238 :     return chunk->marking_bitmap_;
     378             :   }
     379             : 
     380     5569793 :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     381             :     std::atomic_fetch_add(
     382             :         reinterpret_cast<std::atomic<intptr_t>*>(&chunk->live_byte_count_), by);
     383     5569793 :   }
     384             : };
     385             : 
     386             : class MajorNonAtomicMarkingState final
     387             :     : public MarkingStateBase<MajorNonAtomicMarkingState,
     388             :                               AccessMode::NON_ATOMIC> {
     389             :  public:
     390   242050211 :   Bitmap* bitmap(const MemoryChunk* chunk) const {
     391             :     DCHECK_EQ(reinterpret_cast<intptr_t>(&chunk->marking_bitmap_) -
     392             :                   reinterpret_cast<intptr_t>(chunk),
     393             :               MemoryChunk::kMarkBitmapOffset);
     394   242050211 :     return chunk->marking_bitmap_;
     395             :   }
     396             : 
     397      189612 :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
     398     1031405 :     chunk->live_byte_count_ += by;
     399      189612 :   }
     400             : 
     401             :   intptr_t live_bytes(MemoryChunk* chunk) const {
     402             :     return chunk->live_byte_count_;
     403             :   }
     404             : 
     405             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
     406     2004066 :     chunk->live_byte_count_ = value;
     407             :   }
     408             : };
     409             : 
     410   187578024 : struct Ephemeron {
     411             :   HeapObject key;
     412             :   HeapObject value;
     413             : };
     414             : 
     415             : typedef Worklist<Ephemeron, 64> EphemeronWorklist;
     416             : 
     417             : // Weak objects encountered during marking.
     418      793817 : struct WeakObjects {
     419             :   Worklist<TransitionArray, 64> transition_arrays;
     420             : 
     421             :   // Keep track of all EphemeronHashTables in the heap to process
     422             :   // them in the atomic pause.
     423             :   Worklist<EphemeronHashTable, 64> ephemeron_hash_tables;
     424             : 
     425             :   // Keep track of all ephemerons for concurrent marking tasks. Only store
     426             :   // ephemerons in these Worklists if both key and value are unreachable at the
     427             :   // moment.
     428             :   //
     429             :   // MarkCompactCollector::ProcessEphemeronsUntilFixpoint drains and fills these
     430             :   // worklists.
     431             :   //
     432             :   // current_ephemerons is used as draining worklist in the current fixpoint
     433             :   // iteration.
     434             :   EphemeronWorklist current_ephemerons;
     435             : 
     436             :   // Stores ephemerons to visit in the next fixpoint iteration.
     437             :   EphemeronWorklist next_ephemerons;
     438             : 
     439             :   // When draining the marking worklist new discovered ephemerons are pushed
     440             :   // into this worklist.
     441             :   EphemeronWorklist discovered_ephemerons;
     442             : 
     443             :   // TODO(marja): For old space, we only need the slot, not the host
     444             :   // object. Optimize this by adding a different storage for old space.
     445             :   Worklist<std::pair<HeapObject, HeapObjectSlot>, 64> weak_references;
     446             :   Worklist<std::pair<HeapObject, Code>, 64> weak_objects_in_code;
     447             : 
     448             :   Worklist<JSWeakRef, 64> js_weak_refs;
     449             :   Worklist<WeakCell, 64> weak_cells;
     450             : 
     451             :   Worklist<SharedFunctionInfo, 64> bytecode_flushing_candidates;
     452             :   Worklist<JSFunction, 64> flushed_js_functions;
     453             : };
     454             : 
     455             : struct EphemeronMarking {
     456             :   std::vector<HeapObject> newly_discovered;
     457             :   bool newly_discovered_overflowed;
     458             :   size_t newly_discovered_limit;
     459             : };
     460             : 
     461             : // Collector for young and old generation.
     462             : class MarkCompactCollector final : public MarkCompactCollectorBase {
     463             :  public:
     464             : #ifdef V8_CONCURRENT_MARKING
     465             :   using MarkingState = IncrementalMarkingState;
     466             : #else
     467             :   using MarkingState = MajorNonAtomicMarkingState;
     468             : #endif  // V8_CONCURRENT_MARKING
     469             : 
     470             :   using NonAtomicMarkingState = MajorNonAtomicMarkingState;
     471             : 
     472             :   // Wrapper for the shared worklist.
     473       61034 :   class MarkingWorklist {
     474             :    public:
     475             :     using ConcurrentMarkingWorklist = Worklist<HeapObject, 64>;
     476             :     using EmbedderTracingWorklist = Worklist<HeapObject, 16>;
     477             : 
     478             :     // The heap parameter is not used but needed to match the sequential case.
     479      244195 :     explicit MarkingWorklist(Heap* heap) {}
     480             : 
     481   276977473 :     void Push(HeapObject object) {
     482   371292800 :       bool success = shared_.Push(kMainThread, object);
     483             :       USE(success);
     484             :       DCHECK(success);
     485   276977472 :     }
     486             : 
     487   201493729 :     HeapObject Pop() {
     488   201493729 :       HeapObject result;
     489   201493729 :       if (shared_.Pop(kMainThread, &result)) return result;
     490             : #ifdef V8_CONCURRENT_MARKING
     491             :       // The expectation is that this work list is empty almost all the time
     492             :       // and we can thus avoid the emptiness checks by putting it last.
     493     7333778 :       if (on_hold_.Pop(kMainThread, &result)) return result;
     494             : #endif
     495     1550868 :       return HeapObject();
     496             :     }
     497             : 
     498        5300 :     void Clear() {
     499        5300 :       shared_.Clear();
     500        5300 :       on_hold_.Clear();
     501        5300 :       embedder_.Clear();
     502        5300 :     }
     503             : 
     504     2605332 :     bool IsEmpty() {
     505     2528133 :       return shared_.IsLocalEmpty(kMainThread) &&
     506     2528023 :              on_hold_.IsLocalEmpty(kMainThread) &&
     507     5132582 :              shared_.IsGlobalPoolEmpty() && on_hold_.IsGlobalPoolEmpty();
     508             :     }
     509             : 
     510      223530 :     bool IsEmbedderEmpty() {
     511      447060 :       return embedder_.IsLocalEmpty(kMainThread) &&
     512      223530 :              embedder_.IsGlobalPoolEmpty();
     513             :     }
     514             : 
     515             :     int Size() {
     516             :       return static_cast<int>(shared_.LocalSize(kMainThread) +
     517             :                               on_hold_.LocalSize(kMainThread));
     518             :     }
     519             : 
     520             :     // Calls the specified callback on each element of the deques and replaces
     521             :     // the element with the result of the callback. If the callback returns
     522             :     // nullptr then the element is removed from the deque.
     523             :     // The callback must accept HeapObject and return HeapObject.
     524             :     template <typename Callback>
     525         789 :     void Update(Callback callback) {
     526         789 :       shared_.Update(callback);
     527         789 :       on_hold_.Update(callback);
     528         789 :       embedder_.Update(callback);
     529         789 :     }
     530             : 
     531             :     ConcurrentMarkingWorklist* shared() { return &shared_; }
     532             :     ConcurrentMarkingWorklist* on_hold() { return &on_hold_; }
     533          35 :     EmbedderTracingWorklist* embedder() { return &embedder_; }
     534             : 
     535             :     void Print() {
     536             :       PrintWorklist("shared", &shared_);
     537             :       PrintWorklist("on_hold", &on_hold_);
     538             :     }
     539             : 
     540             :    private:
     541             :     // Prints the stats about the global pool of the worklist.
     542             :     void PrintWorklist(const char* worklist_name,
     543             :                        ConcurrentMarkingWorklist* worklist);
     544             : 
     545             :     // Worklist used for most objects.
     546             :     ConcurrentMarkingWorklist shared_;
     547             : 
     548             :     // Concurrent marking uses this worklist to bail out of marking objects
     549             :     // in new space's linear allocation area. Used to avoid black allocation
     550             :     // for new space. This allow the compiler to remove write barriers
     551             :     // for freshly allocatd objects.
     552             :     ConcurrentMarkingWorklist on_hold_;
     553             : 
     554             :     // Worklist for objects that potentially require embedder tracing, i.e.,
     555             :     // these objects need to be handed over to the embedder to find the full
     556             :     // transitive closure.
     557             :     EmbedderTracingWorklist embedder_;
     558             :   };
     559             : 
     560             :   class RootMarkingVisitor;
     561             :   class CustomRootBodyMarkingVisitor;
     562             : 
     563             :   enum IterationMode {
     564             :     kKeepMarking,
     565             :     kClearMarkbits,
     566             :   };
     567             : 
     568             :   MarkingState* marking_state() { return &marking_state_; }
     569             : 
     570             :   NonAtomicMarkingState* non_atomic_marking_state() {
     571             :     return &non_atomic_marking_state_;
     572             :   }
     573             : 
     574             :   void SetUp() override;
     575             :   void TearDown() override;
     576             :   // Performs a global garbage collection.
     577             :   void CollectGarbage() override;
     578             : 
     579             :   void CollectEvacuationCandidates(PagedSpace* space);
     580             : 
     581             :   void AddEvacuationCandidate(Page* p);
     582             : 
     583             :   // Prepares for GC by resetting relocation info in old and map spaces and
     584             :   // choosing spaces to compact.
     585             :   void Prepare();
     586             : 
     587             :   // Stop concurrent marking (either by preempting it right away or waiting for
     588             :   // it to complete as requested by |stop_request|).
     589             :   void FinishConcurrentMarking(ConcurrentMarking::StopRequest stop_request);
     590             : 
     591             :   bool StartCompaction();
     592             : 
     593             :   void AbortCompaction();
     594             : 
     595     2500149 :   static inline bool IsOnEvacuationCandidate(Object obj) {
     596     2500149 :     return Page::FromAddress(obj->ptr())->IsEvacuationCandidate();
     597             :   }
     598             : 
     599             :   static bool IsOnEvacuationCandidate(MaybeObject obj);
     600             : 
     601             :   struct RecordRelocSlotInfo {
     602             :     MemoryChunk* memory_chunk;
     603             :     SlotType slot_type;
     604             :     bool should_record;
     605             :     uint32_t offset;
     606             :   };
     607             :   static RecordRelocSlotInfo PrepareRecordRelocSlot(Code host, RelocInfo* rinfo,
     608             :                                                     HeapObject target);
     609             :   static void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target);
     610             :   V8_INLINE static void RecordSlot(HeapObject object, ObjectSlot slot,
     611             :                                    HeapObject target);
     612             :   V8_INLINE static void RecordSlot(HeapObject object, HeapObjectSlot slot,
     613             :                                    HeapObject target);
     614             :   void RecordLiveSlotsOnPage(Page* page);
     615             : 
     616             :   void UpdateSlots(SlotsBuffer* buffer);
     617             :   void UpdateSlotsRecordedIn(SlotsBuffer* buffer);
     618             : 
     619             :   bool is_compacting() const { return compacting_; }
     620             : 
     621             :   // Ensures that sweeping is finished.
     622             :   //
     623             :   // Note: Can only be called safely from main thread.
     624             :   void EnsureSweepingCompleted();
     625             : 
     626             :   // Checks if sweeping is in progress right now on any space.
     627      578587 :   bool sweeping_in_progress() const { return sweeper_->sweeping_in_progress(); }
     628             : 
     629      149020 :   void set_evacuation(bool evacuation) { evacuation_ = evacuation; }
     630             : 
     631             :   bool evacuation() const { return evacuation_; }
     632             : 
     633   149891586 :   MarkingWorklist* marking_worklist() { return &marking_worklist_; }
     634             : 
     635             :   WeakObjects* weak_objects() { return &weak_objects_; }
     636             : 
     637             :   inline void AddTransitionArray(TransitionArray array);
     638             : 
     639       16625 :   void AddEphemeronHashTable(EphemeronHashTable table) {
     640       16625 :     weak_objects_.ephemeron_hash_tables.Push(kMainThread, table);
     641       16625 :   }
     642             : 
     643          87 :   void AddEphemeron(HeapObject key, HeapObject value) {
     644             :     weak_objects_.discovered_ephemerons.Push(kMainThread,
     645          87 :                                              Ephemeron{key, value});
     646          87 :   }
     647             : 
     648    10070025 :   void AddWeakReference(HeapObject host, HeapObjectSlot slot) {
     649    10070025 :     weak_objects_.weak_references.Push(kMainThread, std::make_pair(host, slot));
     650    10070025 :   }
     651             : 
     652       22242 :   void AddWeakObjectInCode(HeapObject object, Code code) {
     653             :     weak_objects_.weak_objects_in_code.Push(kMainThread,
     654       22242 :                                             std::make_pair(object, code));
     655       22242 :   }
     656             : 
     657          96 :   void AddWeakRef(JSWeakRef weak_ref) {
     658          96 :     weak_objects_.js_weak_refs.Push(kMainThread, weak_ref);
     659          96 :   }
     660             : 
     661         268 :   void AddWeakCell(WeakCell weak_cell) {
     662         268 :     weak_objects_.weak_cells.Push(kMainThread, weak_cell);
     663         268 :   }
     664             : 
     665             :   inline void AddBytecodeFlushingCandidate(SharedFunctionInfo flush_candidate);
     666             :   inline void AddFlushedJSFunction(JSFunction flushed_function);
     667             : 
     668           0 :   void AddNewlyDiscovered(HeapObject object) {
     669           0 :     if (ephemeron_marking_.newly_discovered_overflowed) return;
     670             : 
     671           0 :     if (ephemeron_marking_.newly_discovered.size() <
     672             :         ephemeron_marking_.newly_discovered_limit) {
     673           0 :       ephemeron_marking_.newly_discovered.push_back(object);
     674             :     } else {
     675           0 :       ephemeron_marking_.newly_discovered_overflowed = true;
     676             :     }
     677             :   }
     678             : 
     679             :   void ResetNewlyDiscovered() {
     680           0 :     ephemeron_marking_.newly_discovered_overflowed = false;
     681             :     ephemeron_marking_.newly_discovered.clear();
     682             :   }
     683             : 
     684             :   Sweeper* sweeper() { return sweeper_; }
     685             : 
     686             : #ifdef DEBUG
     687             :   // Checks whether performing mark-compact collection.
     688             :   bool in_use() { return state_ > PREPARE_GC; }
     689             :   bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; }
     690             : #endif
     691             : 
     692             :   void VerifyMarking();
     693             : #ifdef VERIFY_HEAP
     694             :   void VerifyValidStoreAndSlotsBufferEntries();
     695             :   void VerifyMarkbitsAreClean();
     696             :   void VerifyMarkbitsAreDirty(PagedSpace* space);
     697             :   void VerifyMarkbitsAreClean(PagedSpace* space);
     698             :   void VerifyMarkbitsAreClean(NewSpace* space);
     699             :   void VerifyMarkbitsAreClean(LargeObjectSpace* space);
     700             : #endif
     701             : 
     702    76340924 :   unsigned epoch() const { return epoch_; }
     703             : 
     704             :   explicit MarkCompactCollector(Heap* heap);
     705             :   ~MarkCompactCollector() override;
     706             : 
     707             :   // Used by wrapper tracing.
     708             :   V8_INLINE void MarkExternallyReferencedObject(HeapObject obj);
     709             : 
     710             :  private:
     711             :   void ComputeEvacuationHeuristics(size_t area_size,
     712             :                                    int* target_fragmentation_percent,
     713             :                                    size_t* max_evacuated_bytes);
     714             : 
     715             :   void RecordObjectStats();
     716             : 
     717             :   // Finishes GC, performs heap verification if enabled.
     718             :   void Finish();
     719             : 
     720             :   void MarkLiveObjects() override;
     721             : 
     722             :   // Marks the object black and adds it to the marking work list.
     723             :   // This is for non-incremental marking only.
     724             :   V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
     725             : 
     726             :   // Marks the object black and adds it to the marking work list.
     727             :   // This is for non-incremental marking only.
     728             :   V8_INLINE void MarkRootObject(Root root, HeapObject obj);
     729             : 
     730             :   // Mark the heap roots and all objects reachable from them.
     731             :   void MarkRoots(RootVisitor* root_visitor,
     732             :                  ObjectVisitor* custom_root_body_visitor);
     733             : 
     734             :   // Mark the string table specially.  References to internalized strings from
     735             :   // the string table are weak.
     736             :   void MarkStringTable(ObjectVisitor* visitor);
     737             : 
     738             :   // Marks object reachable from harmony weak maps and wrapper tracing.
     739             :   void ProcessEphemeronMarking();
     740             : 
     741             :   // If the call-site of the top optimized code was not prepared for
     742             :   // deoptimization, then treat embedded pointers in the code as strong as
     743             :   // otherwise they can die and try to deoptimize the underlying code.
     744             :   void ProcessTopOptimizedFrame(ObjectVisitor* visitor);
     745             : 
     746             :   // Drains the main thread marking work list. Will mark all pending objects
     747             :   // if no concurrent threads are running.
     748             :   void ProcessMarkingWorklist() override;
     749             : 
     750             :   enum class MarkingWorklistProcessingMode {
     751             :     kDefault,
     752             :     kTrackNewlyDiscoveredObjects
     753             :   };
     754             : 
     755             :   template <MarkingWorklistProcessingMode mode>
     756             :   void ProcessMarkingWorklistInternal();
     757             : 
     758             :   // Implements ephemeron semantics: Marks value if key is already reachable.
     759             :   // Returns true if value was actually marked.
     760             :   bool VisitEphemeron(HeapObject key, HeapObject value);
     761             : 
     762             :   // Marks ephemerons and drains marking worklist iteratively
     763             :   // until a fixpoint is reached.
     764             :   void ProcessEphemeronsUntilFixpoint();
     765             : 
     766             :   // Drains ephemeron and marking worklists. Single iteration of the
     767             :   // fixpoint iteration.
     768             :   bool ProcessEphemerons();
     769             : 
     770             :   // Mark ephemerons and drain marking worklist with a linear algorithm.
     771             :   // Only used if fixpoint iteration doesn't finish within a few iterations.
     772             :   void ProcessEphemeronsLinear();
     773             : 
     774             :   // Perform Wrapper Tracing if in use.
     775             :   void PerformWrapperTracing();
     776             : 
     777             :   // Callback function for telling whether the object *p is an unmarked
     778             :   // heap object.
     779             :   static bool IsUnmarkedHeapObject(Heap* heap, FullObjectSlot p);
     780             : 
     781             :   // Clear non-live references in weak cells, transition and descriptor arrays,
     782             :   // and deoptimize dependent code of non-live maps.
     783             :   void ClearNonLiveReferences() override;
     784             :   void MarkDependentCodeForDeoptimization();
     785             :   // Checks if the given weak cell is a simple transition from the parent map
     786             :   // of the given dead target. If so it clears the transition and trims
     787             :   // the descriptor array of the parent if needed.
     788             :   void ClearPotentialSimpleMapTransition(Map dead_target);
     789             :   void ClearPotentialSimpleMapTransition(Map map, Map dead_target);
     790             : 
     791             :   // Flushes a weakly held bytecode array from a shared function info.
     792             :   void FlushBytecodeFromSFI(SharedFunctionInfo shared_info);
     793             : 
     794             :   // Clears bytecode arrays that have not been executed for multiple
     795             :   // collections.
     796             :   void ClearOldBytecodeCandidates();
     797             : 
     798             :   // Resets any JSFunctions which have had their bytecode flushed.
     799             :   void ClearFlushedJsFunctions();
     800             : 
     801             :   // Compact every array in the global list of transition arrays and
     802             :   // trim the corresponding descriptor array if a transition target is non-live.
     803             :   void ClearFullMapTransitions();
     804             :   void TrimDescriptorArray(Map map, DescriptorArray descriptors);
     805             :   void TrimEnumCache(Map map, DescriptorArray descriptors);
     806             :   bool CompactTransitionArray(Map map, TransitionArray transitions,
     807             :                               DescriptorArray descriptors);
     808             : 
     809             :   // After all reachable objects have been marked those weak map entries
     810             :   // with an unreachable key are removed from all encountered weak maps.
     811             :   // The linked list of all encountered weak maps is destroyed.
     812             :   void ClearWeakCollections();
     813             : 
     814             :   // Goes through the list of encountered weak references and clears those with
     815             :   // dead values. If the value is a dead map and the parent map transitions to
     816             :   // the dead map via weak cell, then this function also clears the map
     817             :   // transition.
     818             :   void ClearWeakReferences();
     819             : 
     820             :   // Goes through the list of encountered JSWeakRefs and WeakCells and clears
     821             :   // those with dead values.
     822             :   void ClearJSWeakRefs();
     823             : 
     824             :   void AbortWeakObjects();
     825             : 
     826             :   // Starts sweeping of spaces by contributing on the main thread and setting
     827             :   // up other pages for sweeping. Does not start sweeper tasks.
     828             :   void StartSweepSpaces();
     829             :   void StartSweepSpace(PagedSpace* space);
     830             : 
     831             :   void EvacuatePrologue() override;
     832             :   void EvacuateEpilogue() override;
     833             :   void Evacuate() override;
     834             :   void EvacuatePagesInParallel() override;
     835             :   void UpdatePointersAfterEvacuation() override;
     836             : 
     837             :   UpdatingItem* CreateToSpaceUpdatingItem(MemoryChunk* chunk, Address start,
     838             :                                           Address end) override;
     839             :   UpdatingItem* CreateRememberedSetUpdatingItem(
     840             :       MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
     841             : 
     842             :   int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
     843             :   int CollectOldSpaceArrayBufferTrackerItems(ItemParallelJob* job);
     844             : 
     845             :   void ReleaseEvacuationCandidates();
     846             :   void PostProcessEvacuationCandidates();
     847             :   void ReportAbortedEvacuationCandidate(HeapObject failed_object,
     848             :                                         MemoryChunk* chunk);
     849             : 
     850             :   static const int kEphemeronChunkSize = 8 * KB;
     851             : 
     852             :   int NumberOfParallelEphemeronVisitingTasks(size_t elements);
     853             : 
     854             :   void RightTrimDescriptorArray(DescriptorArray array, int descriptors_to_trim);
     855             : 
     856             :   base::Mutex mutex_;
     857             :   base::Semaphore page_parallel_job_semaphore_;
     858             : 
     859             : #ifdef DEBUG
     860             :   enum CollectorState {
     861             :     IDLE,
     862             :     PREPARE_GC,
     863             :     MARK_LIVE_OBJECTS,
     864             :     SWEEP_SPACES,
     865             :     ENCODE_FORWARDING_ADDRESSES,
     866             :     UPDATE_POINTERS,
     867             :     RELOCATE_OBJECTS
     868             :   };
     869             : 
     870             :   // The current stage of the collector.
     871             :   CollectorState state_;
     872             : #endif
     873             : 
     874             :   bool was_marked_incrementally_;
     875             : 
     876             :   bool evacuation_;
     877             : 
     878             :   // True if we are collecting slots to perform evacuation from evacuation
     879             :   // candidates.
     880             :   bool compacting_;
     881             : 
     882             :   bool black_allocation_;
     883             : 
     884             :   bool have_code_to_deoptimize_;
     885             : 
     886             :   MarkingWorklist marking_worklist_;
     887             :   WeakObjects weak_objects_;
     888             :   EphemeronMarking ephemeron_marking_;
     889             : 
     890             :   // Candidates for pages that should be evacuated.
     891             :   std::vector<Page*> evacuation_candidates_;
     892             :   // Pages that are actually processed during evacuation.
     893             :   std::vector<Page*> old_space_evacuation_pages_;
     894             :   std::vector<Page*> new_space_evacuation_pages_;
     895             :   std::vector<std::pair<HeapObject, Page*>> aborted_evacuation_candidates_;
     896             : 
     897             :   Sweeper* sweeper_;
     898             : 
     899             :   MarkingState marking_state_;
     900             :   NonAtomicMarkingState non_atomic_marking_state_;
     901             : 
     902             :   // Counts the number of mark-compact collections. This is used for marking
     903             :   // descriptor arrays. See NumberOfMarkedDescriptors. Only lower two bits are
     904             :   // used, so it is okay if this counter overflows and wraps around.
     905             :   unsigned epoch_ = 0;
     906             : 
     907             :   friend class FullEvacuator;
     908             :   friend class RecordMigratedSlotVisitor;
     909             : };
     910             : 
     911             : template <FixedArrayVisitationMode fixed_array_mode,
     912             :           TraceRetainingPathMode retaining_path_mode, typename MarkingState>
     913   103494290 : class MarkingVisitor final
     914             :     : public HeapVisitor<
     915             :           int,
     916             :           MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>> {
     917             :  public:
     918             :   typedef HeapVisitor<
     919             :       int, MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>>
     920             :       Parent;
     921             : 
     922             :   V8_INLINE MarkingVisitor(MarkCompactCollector* collector,
     923             :                            MarkingState* marking_state);
     924             : 
     925             :   V8_INLINE bool ShouldVisitMapPointer() { return false; }
     926             : 
     927             :   V8_INLINE int VisitBytecodeArray(Map map, BytecodeArray object);
     928             :   V8_INLINE int VisitDescriptorArray(Map map, DescriptorArray object);
     929             :   V8_INLINE int VisitEphemeronHashTable(Map map, EphemeronHashTable object);
     930             :   V8_INLINE int VisitFixedArray(Map map, FixedArray object);
     931             :   V8_INLINE int VisitJSApiObject(Map map, JSObject object);
     932             :   V8_INLINE int VisitJSArrayBuffer(Map map, JSArrayBuffer object);
     933             :   V8_INLINE int VisitJSFunction(Map map, JSFunction object);
     934             :   V8_INLINE int VisitJSDataView(Map map, JSDataView object);
     935             :   V8_INLINE int VisitJSTypedArray(Map map, JSTypedArray object);
     936             :   V8_INLINE int VisitMap(Map map, Map object);
     937             :   V8_INLINE int VisitSharedFunctionInfo(Map map, SharedFunctionInfo object);
     938             :   V8_INLINE int VisitTransitionArray(Map map, TransitionArray object);
     939             :   V8_INLINE int VisitWeakCell(Map map, WeakCell object);
     940             :   V8_INLINE int VisitJSWeakRef(Map map, JSWeakRef object);
     941             : 
     942             :   // ObjectVisitor implementation.
     943           0 :   V8_INLINE void VisitPointer(HeapObject host, ObjectSlot p) final {
     944             :     VisitPointerImpl(host, p);
     945           0 :   }
     946           0 :   V8_INLINE void VisitPointer(HeapObject host, MaybeObjectSlot p) final {
     947             :     VisitPointerImpl(host, p);
     948           0 :   }
     949           0 :   V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
     950             :                                ObjectSlot end) final {
     951             :     VisitPointersImpl(host, start, end);
     952           0 :   }
     953           0 :   V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
     954             :                                MaybeObjectSlot end) final {
     955             :     VisitPointersImpl(host, start, end);
     956           0 :   }
     957             :   V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final;
     958             :   V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final;
     959             : 
     960             :   // Weak list pointers should be ignored during marking. The lists are
     961             :   // reconstructed after GC.
     962    19013540 :   void VisitCustomWeakPointers(HeapObject host, ObjectSlot start,
     963    19013540 :                                ObjectSlot end) final {}
     964             : 
     965             :   V8_INLINE void VisitDescriptors(DescriptorArray descriptors,
     966             :                                   int number_of_own_descriptors);
     967             :   // Marks the descriptor array black without pushing it on the marking work
     968             :   // list and visits its header.
     969             :   V8_INLINE void MarkDescriptorArrayBlack(HeapObject host,
     970             :                                           DescriptorArray descriptors);
     971             : 
     972             :  private:
     973             :   // Granularity in which FixedArrays are scanned if |fixed_array_mode|
     974             :   // is true.
     975             :   static const int kProgressBarScanningChunk = 32 * KB;
     976             : 
     977             :   template <typename TSlot>
     978             :   V8_INLINE void VisitPointerImpl(HeapObject host, TSlot p);
     979             : 
     980             :   template <typename TSlot>
     981             :   V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end);
     982             : 
     983             :   V8_INLINE int VisitFixedArrayIncremental(Map map, FixedArray object);
     984             : 
     985             :   template <typename T>
     986             :   V8_INLINE int VisitEmbedderTracingSubclass(Map map, T object);
     987             : 
     988             :   // Marks the object grey and pushes it on the marking work list.
     989             :   V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
     990             : 
     991  1791060665 :   MarkingState* marking_state() { return marking_state_; }
     992             : 
     993   127085922 :   MarkCompactCollector::MarkingWorklist* marking_worklist() const {
     994   127085922 :     return collector_->marking_worklist();
     995             :   }
     996             : 
     997             :   Heap* const heap_;
     998             :   MarkCompactCollector* const collector_;
     999             :   MarkingState* const marking_state_;
    1000             :   const unsigned mark_compact_epoch_;
    1001             : };
    1002             : 
    1003             : class EvacuationScope {
    1004             :  public:
    1005             :   explicit EvacuationScope(MarkCompactCollector* collector)
    1006             :       : collector_(collector) {
    1007             :     collector_->set_evacuation(true);
    1008             :   }
    1009             : 
    1010             :   ~EvacuationScope() { collector_->set_evacuation(false); }
    1011             : 
    1012             :  private:
    1013             :   MarkCompactCollector* collector_;
    1014             : };
    1015             : 
    1016             : #ifdef ENABLE_MINOR_MC
    1017             : 
    1018             : // Collector for young-generation only.
    1019             : class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
    1020             :  public:
    1021             :   using MarkingState = MinorMarkingState;
    1022             :   using NonAtomicMarkingState = MinorNonAtomicMarkingState;
    1023             : 
    1024             :   explicit MinorMarkCompactCollector(Heap* heap);
    1025             :   ~MinorMarkCompactCollector() override;
    1026             : 
    1027             :   MarkingState* marking_state() { return &marking_state_; }
    1028             : 
    1029             :   NonAtomicMarkingState* non_atomic_marking_state() {
    1030             :     return &non_atomic_marking_state_;
    1031             :   }
    1032             : 
    1033             :   void SetUp() override;
    1034             :   void TearDown() override;
    1035             :   void CollectGarbage() override;
    1036             : 
    1037             :   void MakeIterable(Page* page, MarkingTreatmentMode marking_mode,
    1038             :                     FreeSpaceTreatmentMode free_space_mode);
    1039             :   void CleanupSweepToIteratePages();
    1040             : 
    1041             :  private:
    1042             :   using MarkingWorklist = Worklist<HeapObject, 64 /* segment size */>;
    1043             :   class RootMarkingVisitor;
    1044             : 
    1045             :   static const int kNumMarkers = 8;
    1046             :   static const int kMainMarker = 0;
    1047             : 
    1048             :   inline MarkingWorklist* worklist() { return worklist_; }
    1049             : 
    1050             :   inline YoungGenerationMarkingVisitor* main_marking_visitor() {
    1051             :     return main_marking_visitor_;
    1052             :   }
    1053             : 
    1054             :   void MarkLiveObjects() override;
    1055             :   void MarkRootSetInParallel(RootMarkingVisitor* root_visitor);
    1056             :   V8_INLINE void MarkRootObject(HeapObject obj);
    1057             :   void ProcessMarkingWorklist() override;
    1058             :   void ClearNonLiveReferences() override;
    1059             : 
    1060             :   void EvacuatePrologue() override;
    1061             :   void EvacuateEpilogue() override;
    1062             :   void Evacuate() override;
    1063             :   void EvacuatePagesInParallel() override;
    1064             :   void UpdatePointersAfterEvacuation() override;
    1065             : 
    1066             :   UpdatingItem* CreateToSpaceUpdatingItem(MemoryChunk* chunk, Address start,
    1067             :                                           Address end) override;
    1068             :   UpdatingItem* CreateRememberedSetUpdatingItem(
    1069             :       MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
    1070             : 
    1071             :   int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
    1072             : 
    1073             :   int NumberOfParallelMarkingTasks(int pages);
    1074             : 
    1075             :   MarkingWorklist* worklist_;
    1076             : 
    1077             :   YoungGenerationMarkingVisitor* main_marking_visitor_;
    1078             :   base::Semaphore page_parallel_job_semaphore_;
    1079             :   std::vector<Page*> new_space_evacuation_pages_;
    1080             :   std::vector<Page*> sweep_to_iterate_pages_;
    1081             : 
    1082             :   MarkingState marking_state_;
    1083             :   NonAtomicMarkingState non_atomic_marking_state_;
    1084             : 
    1085             :   friend class YoungGenerationMarkingTask;
    1086             :   friend class YoungGenerationMarkingVisitor;
    1087             : };
    1088             : 
    1089             : #endif  // ENABLE_MINOR_MC
    1090             : 
    1091             : }  // namespace internal
    1092             : }  // namespace v8
    1093             : 
    1094             : #endif  // V8_HEAP_MARK_COMPACT_H_

Generated by: LCOV version 1.10