LCOV - code coverage report
Current view: top level - src/heap - mark-compact.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 1063 1540 69.0 %
Date: 2019-04-19 Functions: 155 298 52.0 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/heap/mark-compact.h"
       6             : 
       7             : #include <unordered_map>
       8             : 
       9             : #include "src/base/utils/random-number-generator.h"
      10             : #include "src/cancelable-task.h"
      11             : #include "src/compilation-cache.h"
      12             : #include "src/deoptimizer.h"
      13             : #include "src/execution.h"
      14             : #include "src/frames-inl.h"
      15             : #include "src/global-handles.h"
      16             : #include "src/heap/array-buffer-collector.h"
      17             : #include "src/heap/array-buffer-tracker-inl.h"
      18             : #include "src/heap/gc-tracer.h"
      19             : #include "src/heap/incremental-marking-inl.h"
      20             : #include "src/heap/invalidated-slots-inl.h"
      21             : #include "src/heap/item-parallel-job.h"
      22             : #include "src/heap/local-allocator-inl.h"
      23             : #include "src/heap/mark-compact-inl.h"
      24             : #include "src/heap/object-stats.h"
      25             : #include "src/heap/objects-visiting-inl.h"
      26             : #include "src/heap/spaces-inl.h"
      27             : #include "src/heap/sweeper.h"
      28             : #include "src/heap/worklist.h"
      29             : #include "src/ic/stub-cache.h"
      30             : #include "src/objects/embedder-data-array-inl.h"
      31             : #include "src/objects/foreign.h"
      32             : #include "src/objects/hash-table-inl.h"
      33             : #include "src/objects/js-objects-inl.h"
      34             : #include "src/objects/maybe-object.h"
      35             : #include "src/objects/slots-inl.h"
      36             : #include "src/transitions-inl.h"
      37             : #include "src/utils-inl.h"
      38             : #include "src/v8.h"
      39             : #include "src/vm-state-inl.h"
      40             : 
      41             : namespace v8 {
      42             : namespace internal {
      43             : 
      44             : const char* Marking::kWhiteBitPattern = "00";
      45             : const char* Marking::kBlackBitPattern = "11";
      46             : const char* Marking::kGreyBitPattern = "10";
      47             : const char* Marking::kImpossibleBitPattern = "01";
      48             : 
      49             : // The following has to hold in order for {MarkingState::MarkBitFrom} to not
      50             : // produce invalid {kImpossibleBitPattern} in the marking bitmap by overlapping.
      51             : STATIC_ASSERT(Heap::kMinObjectSizeInTaggedWords >= 2);
      52             : 
      53             : // =============================================================================
      54             : // Verifiers
      55             : // =============================================================================
      56             : 
      57             : #ifdef VERIFY_HEAP
      58             : namespace {
      59             : 
      60             : class MarkingVerifier : public ObjectVisitor, public RootVisitor {
      61             :  public:
      62             :   virtual void Run() = 0;
      63             : 
      64             :  protected:
      65             :   explicit MarkingVerifier(Heap* heap) : heap_(heap) {}
      66             : 
      67             :   virtual ConcurrentBitmap<AccessMode::NON_ATOMIC>* bitmap(
      68             :       const MemoryChunk* chunk) = 0;
      69             : 
      70             :   virtual void VerifyPointers(ObjectSlot start, ObjectSlot end) = 0;
      71             :   virtual void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) = 0;
      72             :   virtual void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) = 0;
      73             : 
      74             :   virtual bool IsMarked(HeapObject object) = 0;
      75             : 
      76             :   virtual bool IsBlackOrGrey(HeapObject object) = 0;
      77             : 
      78             :   void VisitPointers(HeapObject host, ObjectSlot start,
      79             :                      ObjectSlot end) override {
      80             :     VerifyPointers(start, end);
      81             :   }
      82             : 
      83             :   void VisitPointers(HeapObject host, MaybeObjectSlot start,
      84             :                      MaybeObjectSlot end) override {
      85             :     VerifyPointers(start, end);
      86             :   }
      87             : 
      88             :   void VisitRootPointers(Root root, const char* description,
      89             :                          FullObjectSlot start, FullObjectSlot end) override {
      90             :     VerifyRootPointers(start, end);
      91             :   }
      92             : 
      93             :   void VerifyRoots(VisitMode mode);
      94             :   void VerifyMarkingOnPage(const Page* page, Address start, Address end);
      95             :   void VerifyMarking(NewSpace* new_space);
      96             :   void VerifyMarking(PagedSpace* paged_space);
      97             :   void VerifyMarking(LargeObjectSpace* lo_space);
      98             : 
      99             :   Heap* heap_;
     100             : };
     101             : 
     102             : void MarkingVerifier::VerifyRoots(VisitMode mode) {
     103             :   heap_->IterateStrongRoots(this, mode);
     104             : }
     105             : 
     106             : void MarkingVerifier::VerifyMarkingOnPage(const Page* page, Address start,
     107             :                                           Address end) {
     108             :   HeapObject object;
     109             :   Address next_object_must_be_here_or_later = start;
     110             :   for (Address current = start; current < end;) {
     111             :     object = HeapObject::FromAddress(current);
     112             :     // One word fillers at the end of a black area can be grey.
     113             :     if (IsBlackOrGrey(object) &&
     114             :         object->map() != ReadOnlyRoots(heap_).one_pointer_filler_map()) {
     115             :       CHECK(IsMarked(object));
     116             :       CHECK(current >= next_object_must_be_here_or_later);
     117             :       object->Iterate(this);
     118             :       next_object_must_be_here_or_later = current + object->Size();
     119             :       // The object is either part of a black area of black allocation or a
     120             :       // regular black object
     121             :       CHECK(
     122             :           bitmap(page)->AllBitsSetInRange(
     123             :               page->AddressToMarkbitIndex(current),
     124             :               page->AddressToMarkbitIndex(next_object_must_be_here_or_later)) ||
     125             :           bitmap(page)->AllBitsClearInRange(
     126             :               page->AddressToMarkbitIndex(current + kTaggedSize * 2),
     127             :               page->AddressToMarkbitIndex(next_object_must_be_here_or_later)));
     128             :       current = next_object_must_be_here_or_later;
     129             :     } else {
     130             :       current += kTaggedSize;
     131             :     }
     132             :   }
     133             : }
     134             : 
     135             : void MarkingVerifier::VerifyMarking(NewSpace* space) {
     136             :   Address end = space->top();
     137             :   // The bottom position is at the start of its page. Allows us to use
     138             :   // page->area_start() as start of range on all pages.
     139             :   CHECK_EQ(space->first_allocatable_address(),
     140             :            space->first_page()->area_start());
     141             : 
     142             :   PageRange range(space->first_allocatable_address(), end);
     143             :   for (auto it = range.begin(); it != range.end();) {
     144             :     Page* page = *(it++);
     145             :     Address limit = it != range.end() ? page->area_end() : end;
     146             :     CHECK(limit == end || !page->Contains(end));
     147             :     VerifyMarkingOnPage(page, page->area_start(), limit);
     148             :   }
     149             : }
     150             : 
     151             : void MarkingVerifier::VerifyMarking(PagedSpace* space) {
     152             :   for (Page* p : *space) {
     153             :     VerifyMarkingOnPage(p, p->area_start(), p->area_end());
     154             :   }
     155             : }
     156             : 
     157             : void MarkingVerifier::VerifyMarking(LargeObjectSpace* lo_space) {
     158             :   LargeObjectIterator it(lo_space);
     159             :   for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
     160             :     if (IsBlackOrGrey(obj)) {
     161             :       obj->Iterate(this);
     162             :     }
     163             :   }
     164             : }
     165             : 
     166             : class FullMarkingVerifier : public MarkingVerifier {
     167             :  public:
     168             :   explicit FullMarkingVerifier(Heap* heap)
     169             :       : MarkingVerifier(heap),
     170             :         marking_state_(
     171             :             heap->mark_compact_collector()->non_atomic_marking_state()) {}
     172             : 
     173             :   void Run() override {
     174             :     VerifyRoots(VISIT_ONLY_STRONG);
     175             :     VerifyMarking(heap_->new_space());
     176             :     VerifyMarking(heap_->new_lo_space());
     177             :     VerifyMarking(heap_->old_space());
     178             :     VerifyMarking(heap_->code_space());
     179             :     VerifyMarking(heap_->map_space());
     180             :     VerifyMarking(heap_->lo_space());
     181             :     VerifyMarking(heap_->code_lo_space());
     182             :   }
     183             : 
     184             :  protected:
     185             :   ConcurrentBitmap<AccessMode::NON_ATOMIC>* bitmap(
     186             :       const MemoryChunk* chunk) override {
     187             :     return marking_state_->bitmap(chunk);
     188             :   }
     189             : 
     190             :   bool IsMarked(HeapObject object) override {
     191             :     return marking_state_->IsBlack(object);
     192             :   }
     193             : 
     194             :   bool IsBlackOrGrey(HeapObject object) override {
     195             :     return marking_state_->IsBlackOrGrey(object);
     196             :   }
     197             : 
     198             :   void VerifyPointers(ObjectSlot start, ObjectSlot end) override {
     199             :     VerifyPointersImpl(start, end);
     200             :   }
     201             : 
     202             :   void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) override {
     203             :     VerifyPointersImpl(start, end);
     204             :   }
     205             : 
     206             :   void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) override {
     207             :     VerifyPointersImpl(start, end);
     208             :   }
     209             : 
     210             :   void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
     211             :     Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
     212             :     VerifyHeapObjectImpl(target);
     213             :   }
     214             : 
     215             :   void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
     216             :     DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
     217             :     if (!host->IsWeakObject(rinfo->target_object())) {
     218             :       HeapObject object = rinfo->target_object();
     219             :       VerifyHeapObjectImpl(object);
     220             :     }
     221             :   }
     222             : 
     223             :  private:
     224             :   V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) {
     225             :     CHECK(marking_state_->IsBlackOrGrey(heap_object));
     226             :   }
     227             : 
     228             :   template <typename TSlot>
     229             :   V8_INLINE void VerifyPointersImpl(TSlot start, TSlot end) {
     230             :     for (TSlot slot = start; slot < end; ++slot) {
     231             :       typename TSlot::TObject object = *slot;
     232             :       HeapObject heap_object;
     233             :       if (object.GetHeapObjectIfStrong(&heap_object)) {
     234             :         VerifyHeapObjectImpl(heap_object);
     235             :       }
     236             :     }
     237             :   }
     238             : 
     239             :   MarkCompactCollector::NonAtomicMarkingState* marking_state_;
     240             : };
     241             : 
     242             : class EvacuationVerifier : public ObjectVisitor, public RootVisitor {
     243             :  public:
     244             :   virtual void Run() = 0;
     245             : 
     246             :   void VisitPointers(HeapObject host, ObjectSlot start,
     247             :                      ObjectSlot end) override {
     248             :     VerifyPointers(start, end);
     249             :   }
     250             : 
     251             :   void VisitPointers(HeapObject host, MaybeObjectSlot start,
     252             :                      MaybeObjectSlot end) override {
     253             :     VerifyPointers(start, end);
     254             :   }
     255             : 
     256             :   void VisitRootPointers(Root root, const char* description,
     257             :                          FullObjectSlot start, FullObjectSlot end) override {
     258             :     VerifyRootPointers(start, end);
     259             :   }
     260             : 
     261             :  protected:
     262             :   explicit EvacuationVerifier(Heap* heap) : heap_(heap) {}
     263             : 
     264             :   inline Heap* heap() { return heap_; }
     265             : 
     266             :   virtual void VerifyPointers(ObjectSlot start, ObjectSlot end) = 0;
     267             :   virtual void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) = 0;
     268             :   virtual void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) = 0;
     269             : 
     270             :   void VerifyRoots(VisitMode mode);
     271             :   void VerifyEvacuationOnPage(Address start, Address end);
     272             :   void VerifyEvacuation(NewSpace* new_space);
     273             :   void VerifyEvacuation(PagedSpace* paged_space);
     274             : 
     275             :   Heap* heap_;
     276             : };
     277             : 
     278             : void EvacuationVerifier::VerifyRoots(VisitMode mode) {
     279             :   heap_->IterateStrongRoots(this, mode);
     280             : }
     281             : 
     282             : void EvacuationVerifier::VerifyEvacuationOnPage(Address start, Address end) {
     283             :   Address current = start;
     284             :   while (current < end) {
     285             :     HeapObject object = HeapObject::FromAddress(current);
     286             :     if (!object->IsFiller()) object->Iterate(this);
     287             :     current += object->Size();
     288             :   }
     289             : }
     290             : 
     291             : void EvacuationVerifier::VerifyEvacuation(NewSpace* space) {
     292             :   PageRange range(space->first_allocatable_address(), space->top());
     293             :   for (auto it = range.begin(); it != range.end();) {
     294             :     Page* page = *(it++);
     295             :     Address current = page->area_start();
     296             :     Address limit = it != range.end() ? page->area_end() : space->top();
     297             :     CHECK(limit == space->top() || !page->Contains(space->top()));
     298             :     VerifyEvacuationOnPage(current, limit);
     299             :   }
     300             : }
     301             : 
     302             : void EvacuationVerifier::VerifyEvacuation(PagedSpace* space) {
     303             :   for (Page* p : *space) {
     304             :     if (p->IsEvacuationCandidate()) continue;
     305             :     if (p->Contains(space->top())) {
     306             :       CodePageMemoryModificationScope memory_modification_scope(p);
     307             :       heap_->CreateFillerObjectAt(
     308             :           space->top(), static_cast<int>(space->limit() - space->top()),
     309             :           ClearRecordedSlots::kNo);
     310             :     }
     311             :     VerifyEvacuationOnPage(p->area_start(), p->area_end());
     312             :   }
     313             : }
     314             : 
     315             : class FullEvacuationVerifier : public EvacuationVerifier {
     316             :  public:
     317             :   explicit FullEvacuationVerifier(Heap* heap) : EvacuationVerifier(heap) {}
     318             : 
     319             :   void Run() override {
     320             :     VerifyRoots(VISIT_ALL);
     321             :     VerifyEvacuation(heap_->new_space());
     322             :     VerifyEvacuation(heap_->old_space());
     323             :     VerifyEvacuation(heap_->code_space());
     324             :     VerifyEvacuation(heap_->map_space());
     325             :   }
     326             : 
     327             :  protected:
     328             :   V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) {
     329             :     CHECK_IMPLIES(Heap::InYoungGeneration(heap_object),
     330             :                   Heap::InToPage(heap_object));
     331             :     CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(heap_object));
     332             :   }
     333             : 
     334             :   template <typename TSlot>
     335             :   void VerifyPointersImpl(TSlot start, TSlot end) {
     336             :     for (TSlot current = start; current < end; ++current) {
     337             :       typename TSlot::TObject object = *current;
     338             :       HeapObject heap_object;
     339             :       if (object.GetHeapObjectIfStrong(&heap_object)) {
     340             :         VerifyHeapObjectImpl(heap_object);
     341             :       }
     342             :     }
     343             :   }
     344             : 
     345             :   void VerifyPointers(ObjectSlot start, ObjectSlot end) override {
     346             :     VerifyPointersImpl(start, end);
     347             :   }
     348             :   void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) override {
     349             :     VerifyPointersImpl(start, end);
     350             :   }
     351             :   void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
     352             :     Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
     353             :     VerifyHeapObjectImpl(target);
     354             :   }
     355             :   void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
     356             :     VerifyHeapObjectImpl(rinfo->target_object());
     357             :   }
     358             :   void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) override {
     359             :     VerifyPointersImpl(start, end);
     360             :   }
     361             : };
     362             : 
     363             : }  // namespace
     364             : #endif  // VERIFY_HEAP
     365             : 
     366             : // =============================================================================
     367             : // MarkCompactCollectorBase, MinorMarkCompactCollector, MarkCompactCollector
     368             : // =============================================================================
     369             : 
     370             : using MarkCompactMarkingVisitor =
     371             :     MarkingVisitor<FixedArrayVisitationMode::kRegular,
     372             :                    TraceRetainingPathMode::kEnabled,
     373             :                    MarkCompactCollector::MarkingState>;
     374             : 
     375             : namespace {
     376             : 
     377      263652 : int NumberOfAvailableCores() {
     378      263652 :   static int num_cores = V8::GetCurrentPlatform()->NumberOfWorkerThreads() + 1;
     379             :   // This number of cores should be greater than zero and never change.
     380             :   DCHECK_GE(num_cores, 1);
     381             :   DCHECK_EQ(num_cores, V8::GetCurrentPlatform()->NumberOfWorkerThreads() + 1);
     382      263652 :   return num_cores;
     383             : }
     384             : 
     385             : }  // namespace
     386             : 
     387       57895 : int MarkCompactCollectorBase::NumberOfParallelCompactionTasks(int pages) {
     388             :   DCHECK_GT(pages, 0);
     389             :   int tasks =
     390       57895 :       FLAG_parallel_compaction ? Min(NumberOfAvailableCores(), pages) : 1;
     391       57895 :   if (!heap_->CanExpandOldGeneration(
     392       57895 :           static_cast<size_t>(tasks * Page::kPageSize))) {
     393             :     // Optimize for memory usage near the heap limit.
     394             :     tasks = 1;
     395             :   }
     396       57895 :   return tasks;
     397             : }
     398             : 
     399      137327 : int MarkCompactCollectorBase::NumberOfParallelPointerUpdateTasks(int pages,
     400             :                                                                  int slots) {
     401             :   DCHECK_GT(pages, 0);
     402             :   // Limit the number of update tasks as task creation often dominates the
     403             :   // actual work that is being done.
     404             :   const int kMaxPointerUpdateTasks = 8;
     405             :   const int kSlotsPerTask = 600;
     406             :   const int wanted_tasks =
     407      137327 :       (slots >= 0) ? Max(1, Min(pages, slots / kSlotsPerTask)) : pages;
     408             :   return FLAG_parallel_pointer_update
     409      137163 :              ? Min(kMaxPointerUpdateTasks,
     410             :                    Min(NumberOfAvailableCores(), wanted_tasks))
     411      274654 :              : 1;
     412             : }
     413             : 
     414           0 : int MarkCompactCollectorBase::NumberOfParallelToSpacePointerUpdateTasks(
     415             :     int pages) {
     416             :   DCHECK_GT(pages, 0);
     417             :   // No cap needed because all pages we need to process are fully filled with
     418             :   // interesting objects.
     419       68764 :   return FLAG_parallel_pointer_update ? Min(NumberOfAvailableCores(), pages)
     420       68846 :                                       : 1;
     421             : }
     422             : 
     423       62442 : MarkCompactCollector::MarkCompactCollector(Heap* heap)
     424             :     : MarkCompactCollectorBase(heap),
     425             :       page_parallel_job_semaphore_(0),
     426             : #ifdef DEBUG
     427             :       state_(IDLE),
     428             : #endif
     429             :       was_marked_incrementally_(false),
     430             :       evacuation_(false),
     431             :       compacting_(false),
     432             :       black_allocation_(false),
     433             :       have_code_to_deoptimize_(false),
     434             :       marking_worklist_(heap),
     435      124884 :       sweeper_(new Sweeper(heap, non_atomic_marking_state())) {
     436       62442 :   old_to_new_slots_ = -1;
     437       62442 : }
     438             : 
     439      249707 : MarkCompactCollector::~MarkCompactCollector() { delete sweeper_; }
     440             : 
     441       62442 : void MarkCompactCollector::SetUp() {
     442             :   DCHECK_EQ(0, strcmp(Marking::kWhiteBitPattern, "00"));
     443             :   DCHECK_EQ(0, strcmp(Marking::kBlackBitPattern, "11"));
     444             :   DCHECK_EQ(0, strcmp(Marking::kGreyBitPattern, "10"));
     445             :   DCHECK_EQ(0, strcmp(Marking::kImpossibleBitPattern, "01"));
     446       62442 : }
     447             : 
     448       62424 : void MarkCompactCollector::TearDown() {
     449       62424 :   AbortCompaction();
     450       62426 :   AbortWeakObjects();
     451       62427 :   if (heap()->incremental_marking()->IsMarking()) {
     452        5055 :     marking_worklist()->Clear();
     453             :   }
     454       62427 : }
     455             : 
     456           0 : void MarkCompactCollector::AddEvacuationCandidate(Page* p) {
     457             :   DCHECK(!p->NeverEvacuate());
     458        9829 :   p->MarkEvacuationCandidate();
     459        9829 :   evacuation_candidates_.push_back(p);
     460           0 : }
     461             : 
     462             : 
     463           0 : static void TraceFragmentation(PagedSpace* space) {
     464           0 :   int number_of_pages = space->CountTotalPages();
     465           0 :   intptr_t reserved = (number_of_pages * space->AreaSize());
     466           0 :   intptr_t free = reserved - space->SizeOfObjects();
     467           0 :   PrintF("[%s]: %d pages, %d (%.1f%%) free\n", space->name(), number_of_pages,
     468           0 :          static_cast<int>(free), static_cast<double>(free) * 100 / reserved);
     469           0 : }
     470             : 
     471       73901 : bool MarkCompactCollector::StartCompaction() {
     472       73901 :   if (!compacting_) {
     473             :     DCHECK(evacuation_candidates_.empty());
     474             : 
     475       73901 :     CollectEvacuationCandidates(heap()->old_space());
     476             : 
     477       73901 :     if (FLAG_compact_code_space) {
     478       73901 :       CollectEvacuationCandidates(heap()->code_space());
     479           0 :     } else if (FLAG_trace_fragmentation) {
     480           0 :       TraceFragmentation(heap()->code_space());
     481             :     }
     482             : 
     483       73901 :     if (FLAG_trace_fragmentation) {
     484           0 :       TraceFragmentation(heap()->map_space());
     485             :     }
     486             : 
     487       73901 :     compacting_ = !evacuation_candidates_.empty();
     488             :   }
     489             : 
     490       73901 :   return compacting_;
     491             : }
     492             : 
     493       68846 : void MarkCompactCollector::CollectGarbage() {
     494             :   // Make sure that Prepare() has been called. The individual steps below will
     495             :   // update the state as they proceed.
     496             :   DCHECK(state_ == PREPARE_GC);
     497             : 
     498             : #ifdef ENABLE_MINOR_MC
     499       68846 :   heap()->minor_mark_compact_collector()->CleanupSweepToIteratePages();
     500             : #endif  // ENABLE_MINOR_MC
     501             : 
     502       68846 :   MarkLiveObjects();
     503       68846 :   ClearNonLiveReferences();
     504       68846 :   VerifyMarking();
     505             : 
     506       68846 :   RecordObjectStats();
     507             : 
     508       68846 :   StartSweepSpaces();
     509       68846 :   Evacuate();
     510       68846 :   Finish();
     511       68846 : }
     512             : 
     513             : #ifdef VERIFY_HEAP
     514             : void MarkCompactCollector::VerifyMarkbitsAreDirty(PagedSpace* space) {
     515             :   HeapObjectIterator iterator(space);
     516             :   for (HeapObject object = iterator.Next(); !object.is_null();
     517             :        object = iterator.Next()) {
     518             :     CHECK(non_atomic_marking_state()->IsBlack(object));
     519             :   }
     520             : }
     521             : 
     522             : void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
     523             :   for (Page* p : *space) {
     524             :     CHECK(non_atomic_marking_state()->bitmap(p)->IsClean());
     525             :     CHECK_EQ(0, non_atomic_marking_state()->live_bytes(p));
     526             :   }
     527             : }
     528             : 
     529             : 
     530             : void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
     531             :   for (Page* p : PageRange(space->first_allocatable_address(), space->top())) {
     532             :     CHECK(non_atomic_marking_state()->bitmap(p)->IsClean());
     533             :     CHECK_EQ(0, non_atomic_marking_state()->live_bytes(p));
     534             :   }
     535             : }
     536             : 
     537             : void MarkCompactCollector::VerifyMarkbitsAreClean(LargeObjectSpace* space) {
     538             :   LargeObjectIterator it(space);
     539             :   for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
     540             :     CHECK(non_atomic_marking_state()->IsWhite(obj));
     541             :     CHECK_EQ(0, non_atomic_marking_state()->live_bytes(
     542             :                     MemoryChunk::FromHeapObject(obj)));
     543             :   }
     544             : }
     545             : 
     546             : void MarkCompactCollector::VerifyMarkbitsAreClean() {
     547             :   VerifyMarkbitsAreClean(heap_->old_space());
     548             :   VerifyMarkbitsAreClean(heap_->code_space());
     549             :   VerifyMarkbitsAreClean(heap_->map_space());
     550             :   VerifyMarkbitsAreClean(heap_->new_space());
     551             :   // Read-only space should always be black since we never collect any objects
     552             :   // in it or linked from it.
     553             :   VerifyMarkbitsAreDirty(heap_->read_only_space());
     554             :   VerifyMarkbitsAreClean(heap_->lo_space());
     555             :   VerifyMarkbitsAreClean(heap_->code_lo_space());
     556             :   VerifyMarkbitsAreClean(heap_->new_lo_space());
     557             : }
     558             : 
     559             : #endif  // VERIFY_HEAP
     560             : 
     561      166877 : void MarkCompactCollector::EnsureSweepingCompleted() {
     562      166877 :   if (!sweeper()->sweeping_in_progress()) return;
     563             : 
     564       68846 :   sweeper()->EnsureCompleted();
     565       68846 :   heap()->old_space()->RefillFreeList();
     566       68846 :   heap()->code_space()->RefillFreeList();
     567       68846 :   heap()->map_space()->RefillFreeList();
     568             : 
     569             : #ifdef VERIFY_HEAP
     570             :   if (FLAG_verify_heap && !evacuation()) {
     571             :     FullEvacuationVerifier verifier(heap());
     572             :     verifier.Run();
     573             :   }
     574             : #endif
     575             : }
     576             : 
     577      145430 : void MarkCompactCollector::ComputeEvacuationHeuristics(
     578             :     size_t area_size, int* target_fragmentation_percent,
     579             :     size_t* max_evacuated_bytes) {
     580             :   // For memory reducing and optimize for memory mode we directly define both
     581             :   // constants.
     582             :   const int kTargetFragmentationPercentForReduceMemory = 20;
     583             :   const size_t kMaxEvacuatedBytesForReduceMemory = 12 * MB;
     584             :   const int kTargetFragmentationPercentForOptimizeMemory = 20;
     585             :   const size_t kMaxEvacuatedBytesForOptimizeMemory = 6 * MB;
     586             : 
     587             :   // For regular mode (which is latency critical) we define less aggressive
     588             :   // defaults to start and switch to a trace-based (using compaction speed)
     589             :   // approach as soon as we have enough samples.
     590             :   const int kTargetFragmentationPercent = 70;
     591             :   const size_t kMaxEvacuatedBytes = 4 * MB;
     592             :   // Time to take for a single area (=payload of page). Used as soon as there
     593             :   // exist enough compaction speed samples.
     594             :   const float kTargetMsPerArea = .5;
     595             : 
     596      145430 :   if (heap()->ShouldReduceMemory()) {
     597        9784 :     *target_fragmentation_percent = kTargetFragmentationPercentForReduceMemory;
     598        9784 :     *max_evacuated_bytes = kMaxEvacuatedBytesForReduceMemory;
     599      135646 :   } else if (heap()->ShouldOptimizeForMemoryUsage()) {
     600             :     *target_fragmentation_percent =
     601          24 :         kTargetFragmentationPercentForOptimizeMemory;
     602          24 :     *max_evacuated_bytes = kMaxEvacuatedBytesForOptimizeMemory;
     603             :   } else {
     604             :     const double estimated_compaction_speed =
     605      135622 :         heap()->tracer()->CompactionSpeedInBytesPerMillisecond();
     606      135622 :     if (estimated_compaction_speed != 0) {
     607             :       // Estimate the target fragmentation based on traced compaction speed
     608             :       // and a goal for a single page.
     609             :       const double estimated_ms_per_area =
     610      103428 :           1 + area_size / estimated_compaction_speed;
     611             :       *target_fragmentation_percent = static_cast<int>(
     612      103428 :           100 - 100 * kTargetMsPerArea / estimated_ms_per_area);
     613      103428 :       if (*target_fragmentation_percent <
     614             :           kTargetFragmentationPercentForReduceMemory) {
     615             :         *target_fragmentation_percent =
     616           0 :             kTargetFragmentationPercentForReduceMemory;
     617             :       }
     618             :     } else {
     619       32194 :       *target_fragmentation_percent = kTargetFragmentationPercent;
     620             :     }
     621      135622 :     *max_evacuated_bytes = kMaxEvacuatedBytes;
     622             :   }
     623      145430 : }
     624             : 
     625      147802 : void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
     626             :   DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE);
     627             : 
     628      147802 :   int number_of_pages = space->CountTotalPages();
     629      147802 :   size_t area_size = space->AreaSize();
     630             : 
     631             :   // Pairs of (live_bytes_in_page, page).
     632             :   using LiveBytesPagePair = std::pair<size_t, Page*>;
     633             :   std::vector<LiveBytesPagePair> pages;
     634      147802 :   pages.reserve(number_of_pages);
     635             : 
     636             :   DCHECK(!sweeping_in_progress());
     637             :   Page* owner_of_linear_allocation_area =
     638             :       space->top() == space->limit()
     639             :           ? nullptr
     640      147802 :           : Page::FromAllocationAreaAddress(space->top());
     641      484709 :   for (Page* p : *space) {
     642      482133 :     if (p->NeverEvacuate() || (p == owner_of_linear_allocation_area) ||
     643             :         !p->CanAllocate())
     644             :       continue;
     645             :     // Invariant: Evacuation candidates are just created when marking is
     646             :     // started. This means that sweeping has finished. Furthermore, at the end
     647             :     // of a GC all evacuation candidates are cleared and their slot buffers are
     648             :     // released.
     649      145226 :     CHECK(!p->IsEvacuationCandidate());
     650      145226 :     CHECK_NULL(p->slot_set<OLD_TO_OLD>());
     651      145226 :     CHECK_NULL(p->typed_slot_set<OLD_TO_OLD>());
     652      145226 :     CHECK(p->SweepingDone());
     653             :     DCHECK(p->area_size() == area_size);
     654      290452 :     pages.push_back(std::make_pair(p->allocated_bytes(), p));
     655             :   }
     656             : 
     657             :   int candidate_count = 0;
     658             :   size_t total_live_bytes = 0;
     659             : 
     660             :   const bool reduce_memory = heap()->ShouldReduceMemory();
     661      147802 :   if (FLAG_manual_evacuation_candidates_selection) {
     662        1202 :     for (size_t i = 0; i < pages.size(); i++) {
     663         397 :       Page* p = pages[i].second;
     664         397 :       if (p->IsFlagSet(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING)) {
     665         189 :         candidate_count++;
     666         189 :         total_live_bytes += pages[i].first;
     667             :         p->ClearFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
     668             :         AddEvacuationCandidate(p);
     669             :       }
     670             :     }
     671      147394 :   } else if (FLAG_stress_compaction_random) {
     672           0 :     double fraction = isolate()->fuzzer_rng()->NextDouble();
     673             :     size_t pages_to_mark_count =
     674           0 :         static_cast<size_t>(fraction * (pages.size() + 1));
     675           0 :     for (uint64_t i : isolate()->fuzzer_rng()->NextSample(
     676           0 :              pages.size(), pages_to_mark_count)) {
     677           0 :       candidate_count++;
     678           0 :       total_live_bytes += pages[i].first;
     679           0 :       AddEvacuationCandidate(pages[i].second);
     680             :     }
     681      147394 :   } else if (FLAG_stress_compaction) {
     682       17654 :     for (size_t i = 0; i < pages.size(); i++) {
     683        7845 :       Page* p = pages[i].second;
     684        7845 :       if (i % 2 == 0) {
     685        4416 :         candidate_count++;
     686        4416 :         total_live_bytes += pages[i].first;
     687             :         AddEvacuationCandidate(p);
     688             :       }
     689             :     }
     690             :   } else {
     691             :     // The following approach determines the pages that should be evacuated.
     692             :     //
     693             :     // We use two conditions to decide whether a page qualifies as an evacuation
     694             :     // candidate, or not:
     695             :     // * Target fragmentation: How fragmented is a page, i.e., how is the ratio
     696             :     //   between live bytes and capacity of this page (= area).
     697             :     // * Evacuation quota: A global quota determining how much bytes should be
     698             :     //   compacted.
     699             :     //
     700             :     // The algorithm sorts all pages by live bytes and then iterates through
     701             :     // them starting with the page with the most free memory, adding them to the
     702             :     // set of evacuation candidates as long as both conditions (fragmentation
     703             :     // and quota) hold.
     704             :     size_t max_evacuated_bytes;
     705             :     int target_fragmentation_percent;
     706             :     ComputeEvacuationHeuristics(area_size, &target_fragmentation_percent,
     707      145430 :                                 &max_evacuated_bytes);
     708             : 
     709             :     const size_t free_bytes_threshold =
     710      145430 :         target_fragmentation_percent * (area_size / 100);
     711             : 
     712             :     // Sort pages from the most free to the least free, then select
     713             :     // the first n pages for evacuation such that:
     714             :     // - the total size of evacuated objects does not exceed the specified
     715             :     // limit.
     716             :     // - fragmentation of (n+1)-th page does not exceed the specified limit.
     717             :     std::sort(pages.begin(), pages.end(),
     718             :               [](const LiveBytesPagePair& a, const LiveBytesPagePair& b) {
     719             :                 return a.first < b.first;
     720             :               });
     721      419398 :     for (size_t i = 0; i < pages.size(); i++) {
     722      136984 :       size_t live_bytes = pages[i].first;
     723             :       DCHECK_GE(area_size, live_bytes);
     724      136984 :       size_t free_bytes = area_size - live_bytes;
     725      136984 :       if (FLAG_always_compact ||
     726       30638 :           ((free_bytes >= free_bytes_threshold) &&
     727       30638 :            ((total_live_bytes + live_bytes) <= max_evacuated_bytes))) {
     728       30691 :         candidate_count++;
     729       30691 :         total_live_bytes += live_bytes;
     730             :       }
     731      136984 :       if (FLAG_trace_fragmentation_verbose) {
     732           0 :         PrintIsolate(isolate(),
     733             :                      "compaction-selection-page: space=%s free_bytes_page=%zu "
     734             :                      "fragmentation_limit_kb=%" PRIuS
     735             :                      " fragmentation_limit_percent=%d sum_compaction_kb=%zu "
     736             :                      "compaction_limit_kb=%zu\n",
     737             :                      space->name(), free_bytes / KB, free_bytes_threshold / KB,
     738             :                      target_fragmentation_percent, total_live_bytes / KB,
     739           0 :                      max_evacuated_bytes / KB);
     740             :       }
     741             :     }
     742             :     // How many pages we will allocated for the evacuated objects
     743             :     // in the worst case: ceil(total_live_bytes / area_size)
     744             :     int estimated_new_pages =
     745      145430 :         static_cast<int>((total_live_bytes + area_size - 1) / area_size);
     746             :     DCHECK_LE(estimated_new_pages, candidate_count);
     747             :     int estimated_released_pages = candidate_count - estimated_new_pages;
     748             :     // Avoid (compact -> expand) cycles.
     749      145430 :     if ((estimated_released_pages == 0) && !FLAG_always_compact) {
     750             :       candidate_count = 0;
     751             :     }
     752      155878 :     for (int i = 0; i < candidate_count; i++) {
     753       10448 :       AddEvacuationCandidate(pages[i].second);
     754             :     }
     755             :   }
     756             : 
     757      147802 :   if (FLAG_trace_fragmentation) {
     758           0 :     PrintIsolate(isolate(),
     759             :                  "compaction-selection: space=%s reduce_memory=%d pages=%d "
     760             :                  "total_live_bytes=%zu\n",
     761             :                  space->name(), reduce_memory, candidate_count,
     762           0 :                  total_live_bytes / KB);
     763             :   }
     764      147802 : }
     765             : 
     766             : 
     767       62423 : void MarkCompactCollector::AbortCompaction() {
     768       62423 :   if (compacting_) {
     769          38 :     RememberedSet<OLD_TO_OLD>::ClearAll(heap());
     770         116 :     for (Page* p : evacuation_candidates_) {
     771             :       p->ClearEvacuationCandidate();
     772             :     }
     773          38 :     compacting_ = false;
     774             :     evacuation_candidates_.clear();
     775             :   }
     776             :   DCHECK(evacuation_candidates_.empty());
     777       62423 : }
     778             : 
     779             : 
     780       68846 : void MarkCompactCollector::Prepare() {
     781       68846 :   was_marked_incrementally_ = heap()->incremental_marking()->IsMarking();
     782             : 
     783             : #ifdef DEBUG
     784             :   DCHECK(state_ == IDLE);
     785             :   state_ = PREPARE_GC;
     786             : #endif
     787             : 
     788             :   DCHECK(!FLAG_never_compact || !FLAG_always_compact);
     789             : 
     790             :   // Instead of waiting we could also abort the sweeper threads here.
     791       68846 :   EnsureSweepingCompleted();
     792             : 
     793       68846 :   if (heap()->incremental_marking()->IsSweeping()) {
     794        2941 :     heap()->incremental_marking()->Stop();
     795             :   }
     796             : 
     797       68846 :   if (!was_marked_incrementally_) {
     798      186312 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_EMBEDDER_PROLOGUE);
     799       93156 :     heap_->local_embedder_heap_tracer()->TracePrologue();
     800             :   }
     801             : 
     802             :   // Don't start compaction if we are in the middle of incremental
     803             :   // marking cycle. We did not collect any slots.
     804       68846 :   if (!FLAG_never_compact && !was_marked_incrementally_) {
     805       46578 :     StartCompaction();
     806             :   }
     807             : 
     808             :   PagedSpaces spaces(heap());
     809      275384 :   for (PagedSpace* space = spaces.next(); space != nullptr;
     810             :        space = spaces.next()) {
     811      206538 :     space->PrepareForMarkCompact();
     812             :   }
     813             :   heap()->account_external_memory_concurrently_freed();
     814             : 
     815             : #ifdef VERIFY_HEAP
     816             :   if (!was_marked_incrementally_ && FLAG_verify_heap) {
     817             :     VerifyMarkbitsAreClean();
     818             :   }
     819             : #endif
     820       68846 : }
     821             : 
     822      206577 : void MarkCompactCollector::FinishConcurrentMarking(
     823             :     ConcurrentMarking::StopRequest stop_request) {
     824             :   // FinishConcurrentMarking is called for both, concurrent and parallel,
     825             :   // marking. It is safe to call this function when tasks are already finished.
     826      206577 :   if (FLAG_parallel_marking || FLAG_concurrent_marking) {
     827      204174 :     heap()->concurrent_marking()->Stop(stop_request);
     828             :     heap()->concurrent_marking()->FlushMemoryChunkData(
     829      204174 :         non_atomic_marking_state());
     830             :   }
     831      206577 : }
     832             : 
     833       68846 : void MarkCompactCollector::VerifyMarking() {
     834       68846 :   CHECK(marking_worklist()->IsEmpty());
     835             :   DCHECK(heap_->incremental_marking()->IsStopped());
     836             : #ifdef VERIFY_HEAP
     837             :   if (FLAG_verify_heap) {
     838             :     FullMarkingVerifier verifier(heap());
     839             :     verifier.Run();
     840             :   }
     841             : #endif
     842             : #ifdef VERIFY_HEAP
     843             :   if (FLAG_verify_heap) {
     844             :     heap()->old_space()->VerifyLiveBytes();
     845             :     heap()->map_space()->VerifyLiveBytes();
     846             :     heap()->code_space()->VerifyLiveBytes();
     847             :   }
     848             : #endif
     849       68846 : }
     850             : 
     851       68846 : void MarkCompactCollector::Finish() {
     852      275384 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_FINISH);
     853             : 
     854       68846 :   epoch_++;
     855             : 
     856             : #ifdef DEBUG
     857             :   heap()->VerifyCountersBeforeConcurrentSweeping();
     858             : #endif
     859             : 
     860       68846 :   CHECK(weak_objects_.current_ephemerons.IsEmpty());
     861       68846 :   CHECK(weak_objects_.discovered_ephemerons.IsEmpty());
     862             :   weak_objects_.next_ephemerons.Clear();
     863             : 
     864       68846 :   sweeper()->StartSweeperTasks();
     865       68846 :   sweeper()->StartIterabilityTasks();
     866             : 
     867             :   // Clear the marking state of live large objects.
     868       68846 :   heap_->lo_space()->ClearMarkingStateOfLiveObjects();
     869       68846 :   heap_->code_lo_space()->ClearMarkingStateOfLiveObjects();
     870             : 
     871             : #ifdef DEBUG
     872             :   DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
     873             :   state_ = IDLE;
     874             : #endif
     875       68846 :   heap_->isolate()->inner_pointer_to_code_cache()->Flush();
     876             : 
     877             :   // The stub caches are not traversed during GC; clear them to force
     878             :   // their lazy re-initialization. This must be done after the
     879             :   // GC, because it relies on the new address of certain old space
     880             :   // objects (empty string, illegal builtin).
     881       68846 :   isolate()->load_stub_cache()->Clear();
     882       68846 :   isolate()->store_stub_cache()->Clear();
     883             : 
     884       68846 :   if (have_code_to_deoptimize_) {
     885             :     // Some code objects were marked for deoptimization during the GC.
     886          66 :     Deoptimizer::DeoptimizeMarkedCode(isolate());
     887          66 :     have_code_to_deoptimize_ = false;
     888             :   }
     889       68846 : }
     890             : 
     891      137692 : class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor {
     892             :  public:
     893             :   explicit RootMarkingVisitor(MarkCompactCollector* collector)
     894       68846 :       : collector_(collector) {}
     895             : 
     896   212485422 :   void VisitRootPointer(Root root, const char* description,
     897             :                         FullObjectSlot p) final {
     898             :     MarkObjectByPointer(root, p);
     899   212485406 :   }
     900             : 
     901     1791996 :   void VisitRootPointers(Root root, const char* description,
     902             :                          FullObjectSlot start, FullObjectSlot end) final {
     903    34897656 :     for (FullObjectSlot p = start; p < end; ++p) MarkObjectByPointer(root, p);
     904     1791996 :   }
     905             : 
     906             :  private:
     907             :   V8_INLINE void MarkObjectByPointer(Root root, FullObjectSlot p) {
     908   243799086 :     if (!(*p)->IsHeapObject()) return;
     909             : 
     910   239478645 :     collector_->MarkRootObject(root, HeapObject::cast(*p));
     911             :   }
     912             : 
     913             :   MarkCompactCollector* const collector_;
     914             : };
     915             : 
     916             : // This visitor is used to visit the body of special objects held alive by
     917             : // other roots.
     918             : //
     919             : // It is currently used for
     920             : // - Code held alive by the top optimized frame. This code cannot be deoptimized
     921             : // and thus have to be kept alive in an isolate way, i.e., it should not keep
     922             : // alive other code objects reachable through the weak list but they should
     923             : // keep alive its embedded pointers (which would otherwise be dropped).
     924             : // - Prefix of the string table.
     925      137692 : class MarkCompactCollector::CustomRootBodyMarkingVisitor final
     926             :     : public ObjectVisitor {
     927             :  public:
     928             :   explicit CustomRootBodyMarkingVisitor(MarkCompactCollector* collector)
     929       68846 :       : collector_(collector) {}
     930             : 
     931           0 :   void VisitPointer(HeapObject host, ObjectSlot p) final {
     932             :     MarkObject(host, *p);
     933           0 :   }
     934             : 
     935       69764 :   void VisitPointers(HeapObject host, ObjectSlot start, ObjectSlot end) final {
     936      486952 :     for (ObjectSlot p = start; p < end; ++p) {
     937             :       DCHECK(!HasWeakHeapObjectTag(*p));
     938             :       MarkObject(host, *p);
     939             :     }
     940       69764 :   }
     941             : 
     942           0 :   void VisitPointers(HeapObject host, MaybeObjectSlot start,
     943             :                      MaybeObjectSlot end) final {
     944             :     // At the moment, custom roots cannot contain weak pointers.
     945           0 :     UNREACHABLE();
     946             :   }
     947             : 
     948             :   // VisitEmbedderPointer is defined by ObjectVisitor to call VisitPointers.
     949           0 :   void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
     950           0 :     Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
     951             :     MarkObject(host, target);
     952           0 :   }
     953        7342 :   void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
     954             :     MarkObject(host, rinfo->target_object());
     955        7342 :   }
     956             : 
     957             :  private:
     958             :   V8_INLINE void MarkObject(HeapObject host, Object object) {
     959      354766 :     if (!object->IsHeapObject()) return;
     960       81294 :     collector_->MarkObject(host, HeapObject::cast(object));
     961             :   }
     962             : 
     963             :   MarkCompactCollector* const collector_;
     964             : };
     965             : 
     966      137692 : class InternalizedStringTableCleaner : public ObjectVisitor {
     967             :  public:
     968             :   InternalizedStringTableCleaner(Heap* heap, HeapObject table)
     969       68846 :       : heap_(heap), pointers_removed_(0), table_(table) {}
     970             : 
     971       68846 :   void VisitPointers(HeapObject host, ObjectSlot start,
     972             :                      ObjectSlot end) override {
     973             :     // Visit all HeapObject pointers in [start, end).
     974       68846 :     Object the_hole = ReadOnlyRoots(heap_).the_hole_value();
     975             :     MarkCompactCollector::NonAtomicMarkingState* marking_state =
     976             :         heap_->mark_compact_collector()->non_atomic_marking_state();
     977   172526044 :     for (ObjectSlot p = start; p < end; ++p) {
     978             :       Object o = *p;
     979   172388352 :       if (o->IsHeapObject()) {
     980             :         HeapObject heap_object = HeapObject::cast(o);
     981   172388352 :         if (marking_state->IsWhite(heap_object)) {
     982     4702564 :           pointers_removed_++;
     983             :           // Set the entry to the_hole_value (as deleted).
     984             :           p.store(the_hole);
     985             :         } else {
     986             :           // StringTable contains only old space strings.
     987             :           DCHECK(!Heap::InYoungGeneration(o));
     988             :           MarkCompactCollector::RecordSlot(table_, p, heap_object);
     989             :         }
     990             :       }
     991             :     }
     992       68846 :   }
     993             : 
     994           0 :   void VisitPointers(HeapObject host, MaybeObjectSlot start,
     995             :                      MaybeObjectSlot end) final {
     996           0 :     UNREACHABLE();
     997             :   }
     998             : 
     999           0 :   void VisitCodeTarget(Code host, RelocInfo* rinfo) final { UNREACHABLE(); }
    1000             : 
    1001           0 :   void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
    1002           0 :     UNREACHABLE();
    1003             :   }
    1004             : 
    1005             :   int PointersRemoved() {
    1006             :     return pointers_removed_;
    1007             :   }
    1008             : 
    1009             :  private:
    1010             :   Heap* heap_;
    1011             :   int pointers_removed_;
    1012             :   HeapObject table_;
    1013             : };
    1014             : 
    1015      137692 : class ExternalStringTableCleaner : public RootVisitor {
    1016             :  public:
    1017       68846 :   explicit ExternalStringTableCleaner(Heap* heap) : heap_(heap) {}
    1018             : 
    1019       68819 :   void VisitRootPointers(Root root, const char* description,
    1020             :                          FullObjectSlot start, FullObjectSlot end) override {
    1021             :     // Visit all HeapObject pointers in [start, end).
    1022             :     MarkCompactCollector::NonAtomicMarkingState* marking_state =
    1023       68819 :         heap_->mark_compact_collector()->non_atomic_marking_state();
    1024             :     Object the_hole = ReadOnlyRoots(heap_).the_hole_value();
    1025      241697 :     for (FullObjectSlot p = start; p < end; ++p) {
    1026             :       Object o = *p;
    1027      104059 :       if (o->IsHeapObject()) {
    1028             :         HeapObject heap_object = HeapObject::cast(o);
    1029      104059 :         if (marking_state->IsWhite(heap_object)) {
    1030        1638 :           if (o->IsExternalString()) {
    1031        1638 :             heap_->FinalizeExternalString(String::cast(o));
    1032             :           } else {
    1033             :             // The original external string may have been internalized.
    1034             :             DCHECK(o->IsThinString());
    1035             :           }
    1036             :           // Set the entry to the_hole_value (as deleted).
    1037             :           p.store(the_hole);
    1038             :         }
    1039             :       }
    1040             :     }
    1041       68819 :   }
    1042             : 
    1043             :  private:
    1044             :   Heap* heap_;
    1045             : };
    1046             : 
    1047             : // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects
    1048             : // are retained.
    1049      137692 : class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
    1050             :  public:
    1051             :   explicit MarkCompactWeakObjectRetainer(
    1052             :       MarkCompactCollector::NonAtomicMarkingState* marking_state)
    1053       68846 :       : marking_state_(marking_state) {}
    1054             : 
    1055     1900888 :   Object RetainAs(Object object) override {
    1056             :     HeapObject heap_object = HeapObject::cast(object);
    1057             :     DCHECK(!marking_state_->IsGrey(heap_object));
    1058     1900888 :     if (marking_state_->IsBlack(heap_object)) {
    1059     1656545 :       return object;
    1060      386063 :     } else if (object->IsAllocationSite() &&
    1061             :                !(AllocationSite::cast(object)->IsZombie())) {
    1062             :       // "dead" AllocationSites need to live long enough for a traversal of new
    1063             :       // space. These sites get a one-time reprieve.
    1064             : 
    1065             :       Object nested = object;
    1066      230668 :       while (nested->IsAllocationSite()) {
    1067       78009 :         AllocationSite current_site = AllocationSite::cast(nested);
    1068             :         // MarkZombie will override the nested_site, read it first before
    1069             :         // marking
    1070             :         nested = current_site->nested_site();
    1071             :         current_site->MarkZombie();
    1072             :         marking_state_->WhiteToBlack(current_site);
    1073             :       }
    1074             : 
    1075       74650 :       return object;
    1076             :     } else {
    1077      169693 :       return Object();
    1078             :     }
    1079             :   }
    1080             : 
    1081             :  private:
    1082             :   MarkCompactCollector::NonAtomicMarkingState* marking_state_;
    1083             : };
    1084             : 
    1085      159945 : class RecordMigratedSlotVisitor : public ObjectVisitor {
    1086             :  public:
    1087             :   explicit RecordMigratedSlotVisitor(
    1088             :       MarkCompactCollector* collector,
    1089             :       EphemeronRememberedSet* ephemeron_remembered_set)
    1090             :       : collector_(collector),
    1091       80005 :         ephemeron_remembered_set_(ephemeron_remembered_set) {}
    1092             : 
    1093           0 :   inline void VisitPointer(HeapObject host, ObjectSlot p) final {
    1094             :     DCHECK(!HasWeakHeapObjectTag(*p));
    1095  1099744582 :     RecordMigratedSlot(host, MaybeObject::FromObject(*p), p.address());
    1096           0 :   }
    1097             : 
    1098           0 :   inline void VisitPointer(HeapObject host, MaybeObjectSlot p) final {
    1099    50652842 :     RecordMigratedSlot(host, *p, p.address());
    1100           0 :   }
    1101             : 
    1102      533047 :   inline void VisitPointers(HeapObject host, ObjectSlot start,
    1103             :                             ObjectSlot end) final {
    1104   557591674 :     while (start < end) {
    1105             :       VisitPointer(host, start);
    1106             :       ++start;
    1107             :     }
    1108      533025 :   }
    1109             : 
    1110           0 :   inline void VisitPointers(HeapObject host, MaybeObjectSlot start,
    1111             :                             MaybeObjectSlot end) final {
    1112    26320381 :     while (start < end) {
    1113             :       VisitPointer(host, start);
    1114             :       ++start;
    1115             :     }
    1116           0 :   }
    1117             : 
    1118        3332 :   inline void VisitEphemeron(HeapObject host, int index, ObjectSlot key,
    1119             :                              ObjectSlot value) override {
    1120             :     DCHECK(host->IsEphemeronHashTable());
    1121             :     DCHECK(!Heap::InYoungGeneration(host));
    1122             : 
    1123             :     VisitPointer(host, value);
    1124             : 
    1125        6664 :     if (ephemeron_remembered_set_ && Heap::InYoungGeneration(*key)) {
    1126             :       auto table = EphemeronHashTable::unchecked_cast(host);
    1127             :       auto insert_result =
    1128         504 :           ephemeron_remembered_set_->insert({table, std::unordered_set<int>()});
    1129             :       insert_result.first->second.insert(index);
    1130             :     } else {
    1131             :       VisitPointer(host, key);
    1132             :     }
    1133        3332 :   }
    1134             : 
    1135         292 :   inline void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
    1136             :     DCHECK_EQ(host, rinfo->host());
    1137             :     DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
    1138         292 :     Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
    1139             :     // The target is always in old space, we don't have to record the slot in
    1140             :     // the old-to-new remembered set.
    1141             :     DCHECK(!Heap::InYoungGeneration(target));
    1142         292 :     collector_->RecordRelocSlot(host, rinfo, target);
    1143         292 :   }
    1144             : 
    1145       57858 :   inline void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
    1146             :     DCHECK_EQ(host, rinfo->host());
    1147             :     DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
    1148             :     HeapObject object = HeapObject::cast(rinfo->target_object());
    1149             :     GenerationalBarrierForCode(host, rinfo, object);
    1150       57858 :     collector_->RecordRelocSlot(host, rinfo, object);
    1151       57858 :   }
    1152             : 
    1153             :   // Entries that are skipped for recording.
    1154           0 :   inline void VisitExternalReference(Code host, RelocInfo* rinfo) final {}
    1155           0 :   inline void VisitExternalReference(Foreign host, Address* p) final {}
    1156       39084 :   inline void VisitRuntimeEntry(Code host, RelocInfo* rinfo) final {}
    1157           0 :   inline void VisitInternalReference(Code host, RelocInfo* rinfo) final {}
    1158             : 
    1159             :  protected:
    1160   572386876 :   inline virtual void RecordMigratedSlot(HeapObject host, MaybeObject value,
    1161             :                                          Address slot) {
    1162   572386876 :     if (value->IsStrongOrWeak()) {
    1163             :       MemoryChunk* p = MemoryChunk::FromAddress(value.ptr());
    1164   365659204 :       if (p->InYoungGeneration()) {
    1165             :         DCHECK_IMPLIES(
    1166             :             p->IsToPage(),
    1167             :             p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION) || p->IsLargePage());
    1168             :         RememberedSet<OLD_TO_NEW>::Insert<AccessMode::NON_ATOMIC>(
    1169     5906080 :             MemoryChunk::FromHeapObject(host), slot);
    1170   359753124 :       } else if (p->IsEvacuationCandidate()) {
    1171             :         RememberedSet<OLD_TO_OLD>::Insert<AccessMode::NON_ATOMIC>(
    1172    10492149 :             MemoryChunk::FromHeapObject(host), slot);
    1173             :       }
    1174             :     }
    1175   572383541 :   }
    1176             : 
    1177             :   MarkCompactCollector* collector_;
    1178             :   EphemeronRememberedSet* ephemeron_remembered_set_;
    1179             : };
    1180             : 
    1181             : class MigrationObserver {
    1182             :  public:
    1183       57895 :   explicit MigrationObserver(Heap* heap) : heap_(heap) {}
    1184             : 
    1185       57895 :   virtual ~MigrationObserver() = default;
    1186             :   virtual void Move(AllocationSpace dest, HeapObject src, HeapObject dst,
    1187             :                     int size) = 0;
    1188             : 
    1189             :  protected:
    1190             :   Heap* heap_;
    1191             : };
    1192             : 
    1193       57895 : class ProfilingMigrationObserver final : public MigrationObserver {
    1194             :  public:
    1195       57895 :   explicit ProfilingMigrationObserver(Heap* heap) : MigrationObserver(heap) {}
    1196             : 
    1197      868328 :   inline void Move(AllocationSpace dest, HeapObject src, HeapObject dst,
    1198             :                    int size) final {
    1199     1006037 :     if (dest == CODE_SPACE || (dest == OLD_SPACE && dst->IsBytecodeArray())) {
    1200         590 :       PROFILE(heap_->isolate(),
    1201             :               CodeMoveEvent(AbstractCode::cast(src), AbstractCode::cast(dst)));
    1202             :     }
    1203      868328 :     heap_->OnMoveEvent(dst, src, size);
    1204      873890 :   }
    1205             : };
    1206             : 
    1207      319785 : class HeapObjectVisitor {
    1208             :  public:
    1209      319785 :   virtual ~HeapObjectVisitor() = default;
    1210             :   virtual bool Visit(HeapObject object, int size) = 0;
    1211             : };
    1212             : 
    1213      319760 : class EvacuateVisitorBase : public HeapObjectVisitor {
    1214             :  public:
    1215             :   void AddObserver(MigrationObserver* observer) {
    1216        1960 :     migration_function_ = RawMigrateObject<MigrationMode::kObserved>;
    1217        1960 :     observers_.push_back(observer);
    1218             :   }
    1219             : 
    1220             :  protected:
    1221             :   enum MigrationMode { kFast, kObserved };
    1222             : 
    1223             :   using MigrateFunction = void (*)(EvacuateVisitorBase* base, HeapObject dst,
    1224             :                                    HeapObject src, int size,
    1225             :                                    AllocationSpace dest);
    1226             : 
    1227             :   template <MigrationMode mode>
    1228    56988306 :   static void RawMigrateObject(EvacuateVisitorBase* base, HeapObject dst,
    1229             :                                HeapObject src, int size, AllocationSpace dest) {
    1230             :     Address dst_addr = dst->address();
    1231             :     Address src_addr = src->address();
    1232             :     DCHECK(base->heap_->AllowedToBeMigrated(src, dest));
    1233             :     DCHECK_NE(dest, LO_SPACE);
    1234             :     DCHECK_NE(dest, CODE_LO_SPACE);
    1235    56988306 :     if (dest == OLD_SPACE) {
    1236             :       DCHECK_OBJECT_SIZE(size);
    1237             :       DCHECK(IsAligned(size, kTaggedSize));
    1238             :       base->heap_->CopyBlock(dst_addr, src_addr, size);
    1239             :       if (mode != MigrationMode::kFast)
    1240             :         base->ExecuteMigrationObservers(dest, src, dst, size);
    1241    33097974 :       dst->IterateBodyFast(dst->map(), size, base->record_visitor_);
    1242    24285376 :     } else if (dest == CODE_SPACE) {
    1243             :       DCHECK_CODEOBJECT_SIZE(size, base->heap_->code_space());
    1244             :       base->heap_->CopyBlock(dst_addr, src_addr, size);
    1245        1784 :       Code::cast(dst)->Relocate(dst_addr - src_addr);
    1246             :       if (mode != MigrationMode::kFast)
    1247             :         base->ExecuteMigrationObservers(dest, src, dst, size);
    1248        1784 :       dst->IterateBodyFast(dst->map(), size, base->record_visitor_);
    1249             :     } else {
    1250             :       DCHECK_OBJECT_SIZE(size);
    1251             :       DCHECK(dest == NEW_SPACE);
    1252             :       base->heap_->CopyBlock(dst_addr, src_addr, size);
    1253             :       if (mode != MigrationMode::kFast)
    1254             :         base->ExecuteMigrationObservers(dest, src, dst, size);
    1255             :     }
    1256             :     src->set_map_word(MapWord::FromForwardingAddress(dst));
    1257    57502641 :   }
    1258             : 
    1259             :   EvacuateVisitorBase(Heap* heap, LocalAllocator* local_allocator,
    1260             :                       RecordMigratedSlotVisitor* record_visitor)
    1261             :       : heap_(heap),
    1262             :         local_allocator_(local_allocator),
    1263      159880 :         record_visitor_(record_visitor) {
    1264      159880 :     migration_function_ = RawMigrateObject<MigrationMode::kFast>;
    1265             :   }
    1266             : 
    1267    32682702 :   inline bool TryEvacuateObject(AllocationSpace target_space, HeapObject object,
    1268             :                                 int size, HeapObject* target_object) {
    1269             : #ifdef VERIFY_HEAP
    1270             :     if (AbortCompactionForTesting(object)) return false;
    1271             : #endif  // VERIFY_HEAP
    1272             :     AllocationAlignment alignment =
    1273             :         HeapObject::RequiredAlignment(object->map());
    1274             :     AllocationResult allocation =
    1275    32682702 :         local_allocator_->Allocate(target_space, size, alignment);
    1276    32876813 :     if (allocation.To(target_object)) {
    1277             :       MigrateObject(*target_object, object, size, target_space);
    1278    33071881 :       return true;
    1279             :     }
    1280             :     return false;
    1281             :   }
    1282             : 
    1283             :   inline void ExecuteMigrationObservers(AllocationSpace dest, HeapObject src,
    1284             :                                         HeapObject dst, int size) {
    1285     1743538 :     for (MigrationObserver* obs : observers_) {
    1286      868745 :       obs->Move(dest, src, dst, size);
    1287             :     }
    1288             :   }
    1289             : 
    1290             :   inline void MigrateObject(HeapObject dst, HeapObject src, int size,
    1291             :                             AllocationSpace dest) {
    1292    57206502 :     migration_function_(this, dst, src, size, dest);
    1293             :   }
    1294             : 
    1295             : #ifdef VERIFY_HEAP
    1296             :   bool AbortCompactionForTesting(HeapObject object) {
    1297             :     if (FLAG_stress_compaction) {
    1298             :       const uintptr_t mask = static_cast<uintptr_t>(FLAG_random_seed) &
    1299             :                              kPageAlignmentMask & ~kObjectAlignmentMask;
    1300             :       if ((object->ptr() & kPageAlignmentMask) == mask) {
    1301             :         Page* page = Page::FromHeapObject(object);
    1302             :         if (page->IsFlagSet(Page::COMPACTION_WAS_ABORTED_FOR_TESTING)) {
    1303             :           page->ClearFlag(Page::COMPACTION_WAS_ABORTED_FOR_TESTING);
    1304             :         } else {
    1305             :           page->SetFlag(Page::COMPACTION_WAS_ABORTED_FOR_TESTING);
    1306             :           return true;
    1307             :         }
    1308             :       }
    1309             :     }
    1310             :     return false;
    1311             :   }
    1312             : #endif  // VERIFY_HEAP
    1313             : 
    1314             :   Heap* heap_;
    1315             :   LocalAllocator* local_allocator_;
    1316             :   RecordMigratedSlotVisitor* record_visitor_;
    1317             :   std::vector<MigrationObserver*> observers_;
    1318             :   MigrateFunction migration_function_;
    1319             : };
    1320             : 
    1321      159880 : class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase {
    1322             :  public:
    1323       79940 :   explicit EvacuateNewSpaceVisitor(
    1324             :       Heap* heap, LocalAllocator* local_allocator,
    1325             :       RecordMigratedSlotVisitor* record_visitor,
    1326             :       Heap::PretenuringFeedbackMap* local_pretenuring_feedback)
    1327             :       : EvacuateVisitorBase(heap, local_allocator, record_visitor),
    1328             :         buffer_(LocalAllocationBuffer::InvalidBuffer()),
    1329             :         promoted_size_(0),
    1330             :         semispace_copied_size_(0),
    1331             :         local_pretenuring_feedback_(local_pretenuring_feedback),
    1332      239820 :         is_incremental_marking_(heap->incremental_marking()->IsMarking()) {}
    1333             : 
    1334    32049015 :   inline bool Visit(HeapObject object, int size) override {
    1335    32049015 :     if (TryEvacuateWithoutCopy(object)) return true;
    1336    31012071 :     HeapObject target_object;
    1337    68609083 :     if (heap_->ShouldBePromoted(object->address()) &&
    1338     6584628 :         TryEvacuateObject(OLD_SPACE, object, size, &target_object)) {
    1339     6573166 :       promoted_size_ += size;
    1340     6573166 :       return true;
    1341             :     }
    1342    48878436 :     heap_->UpdateAllocationSite(object->map(), object,
    1343    24439218 :                                 local_pretenuring_feedback_);
    1344    24394836 :     HeapObject target;
    1345    24394836 :     AllocationSpace space = AllocateTargetObject(object, size, &target);
    1346    24289668 :     MigrateObject(HeapObject::cast(target), object, size, space);
    1347    24329731 :     semispace_copied_size_ += size;
    1348    24329731 :     return true;
    1349             :   }
    1350             : 
    1351             :   intptr_t promoted_size() { return promoted_size_; }
    1352             :   intptr_t semispace_copied_size() { return semispace_copied_size_; }
    1353             : 
    1354             :  private:
    1355    32039657 :   inline bool TryEvacuateWithoutCopy(HeapObject object) {
    1356    32039657 :     if (is_incremental_marking_) return false;
    1357             : 
    1358             :     Map map = object->map();
    1359             : 
    1360             :     // Some objects can be evacuated without creating a copy.
    1361    32056729 :     if (map->visitor_id() == kVisitThinString) {
    1362             :       HeapObject actual = ThinString::cast(object)->unchecked_actual();
    1363     1382240 :       if (MarkCompactCollector::IsOnEvacuationCandidate(actual)) return false;
    1364             :       object->map_slot().Relaxed_Store(
    1365             :           MapWord::FromForwardingAddress(actual).ToMap());
    1366     1377421 :       return true;
    1367             :     }
    1368             :     // TODO(mlippautz): Handle ConsString.
    1369             : 
    1370             :     return false;
    1371             :   }
    1372             : 
    1373    24390509 :   inline AllocationSpace AllocateTargetObject(HeapObject old_object, int size,
    1374             :                                               HeapObject* target_object) {
    1375             :     AllocationAlignment alignment =
    1376             :         HeapObject::RequiredAlignment(old_object->map());
    1377             :     AllocationSpace space_allocated_in = NEW_SPACE;
    1378             :     AllocationResult allocation =
    1379    24390509 :         local_allocator_->Allocate(NEW_SPACE, size, alignment);
    1380    24294184 :     if (allocation.IsRetry()) {
    1381        1999 :       allocation = AllocateInOldSpace(size, alignment);
    1382             :       space_allocated_in = OLD_SPACE;
    1383             :     }
    1384             :     bool ok = allocation.To(target_object);
    1385             :     DCHECK(ok);
    1386             :     USE(ok);
    1387    24294184 :     return space_allocated_in;
    1388             :   }
    1389             : 
    1390        1999 :   inline AllocationResult AllocateInOldSpace(int size_in_bytes,
    1391             :                                              AllocationAlignment alignment) {
    1392             :     AllocationResult allocation =
    1393        1999 :         local_allocator_->Allocate(OLD_SPACE, size_in_bytes, alignment);
    1394        1999 :     if (allocation.IsRetry()) {
    1395           0 :       heap_->FatalProcessOutOfMemory(
    1396           0 :           "MarkCompactCollector: semi-space copy, fallback in old gen");
    1397             :     }
    1398        1999 :     return allocation;
    1399             :   }
    1400             : 
    1401             :   LocalAllocationBuffer buffer_;
    1402             :   intptr_t promoted_size_;
    1403             :   intptr_t semispace_copied_size_;
    1404             :   Heap::PretenuringFeedbackMap* local_pretenuring_feedback_;
    1405             :   bool is_incremental_marking_;
    1406             : };
    1407             : 
    1408             : template <PageEvacuationMode mode>
    1409      239820 : class EvacuateNewSpacePageVisitor final : public HeapObjectVisitor {
    1410             :  public:
    1411             :   explicit EvacuateNewSpacePageVisitor(
    1412             :       Heap* heap, RecordMigratedSlotVisitor* record_visitor,
    1413             :       Heap::PretenuringFeedbackMap* local_pretenuring_feedback)
    1414             :       : heap_(heap),
    1415             :         record_visitor_(record_visitor),
    1416             :         moved_bytes_(0),
    1417      159880 :         local_pretenuring_feedback_(local_pretenuring_feedback) {}
    1418             : 
    1419        4193 :   static void Move(Page* page) {
    1420             :     switch (mode) {
    1421             :       case NEW_TO_NEW:
    1422             :         page->heap()->new_space()->MovePageFromSpaceToSpace(page);
    1423             :         page->SetFlag(Page::PAGE_NEW_NEW_PROMOTION);
    1424             :         break;
    1425             :       case NEW_TO_OLD: {
    1426        1486 :         page->heap()->new_space()->from_space().RemovePage(page);
    1427        1486 :         Page* new_page = Page::ConvertNewToOld(page);
    1428             :         DCHECK(!new_page->InYoungGeneration());
    1429             :         new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION);
    1430             :         break;
    1431             :       }
    1432             :     }
    1433        4193 :   }
    1434             : 
    1435     3724272 :   inline bool Visit(HeapObject object, int size) override {
    1436             :     if (mode == NEW_TO_NEW) {
    1437     7448544 :       heap_->UpdateAllocationSite(object->map(), object,
    1438             :                                   local_pretenuring_feedback_);
    1439             :     } else if (mode == NEW_TO_OLD) {
    1440     6308654 :       object->IterateBodyFast(record_visitor_);
    1441             :     }
    1442     3730442 :     return true;
    1443             :   }
    1444             : 
    1445             :   intptr_t moved_bytes() { return moved_bytes_; }
    1446        5274 :   void account_moved_bytes(intptr_t bytes) { moved_bytes_ += bytes; }
    1447             : 
    1448             :  private:
    1449             :   Heap* heap_;
    1450             :   RecordMigratedSlotVisitor* record_visitor_;
    1451             :   intptr_t moved_bytes_;
    1452             :   Heap::PretenuringFeedbackMap* local_pretenuring_feedback_;
    1453             : };
    1454             : 
    1455      159880 : class EvacuateOldSpaceVisitor final : public EvacuateVisitorBase {
    1456             :  public:
    1457             :   EvacuateOldSpaceVisitor(Heap* heap, LocalAllocator* local_allocator,
    1458             :                           RecordMigratedSlotVisitor* record_visitor)
    1459       79940 :       : EvacuateVisitorBase(heap, local_allocator, record_visitor) {}
    1460             : 
    1461    26750204 :   inline bool Visit(HeapObject object, int size) override {
    1462    26750204 :     HeapObject target_object;
    1463    26750204 :     if (TryEvacuateObject(Page::FromHeapObject(object)->owner()->identity(),
    1464             :                           object, size, &target_object)) {
    1465             :       DCHECK(object->map_word().IsForwardingAddress());
    1466             :       return true;
    1467             :     }
    1468          25 :     return false;
    1469             :   }
    1470             : };
    1471             : 
    1472          50 : class EvacuateRecordOnlyVisitor final : public HeapObjectVisitor {
    1473             :  public:
    1474          25 :   explicit EvacuateRecordOnlyVisitor(Heap* heap) : heap_(heap) {}
    1475             : 
    1476           0 :   inline bool Visit(HeapObject object, int size) override {
    1477             :     RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector(),
    1478          65 :                                       &heap_->ephemeron_remembered_set_);
    1479          65 :     object->IterateBodyFast(&visitor);
    1480           0 :     return true;
    1481             :   }
    1482             : 
    1483             :  private:
    1484             :   Heap* heap_;
    1485             : };
    1486             : 
    1487    10715056 : bool MarkCompactCollector::IsUnmarkedHeapObject(Heap* heap, FullObjectSlot p) {
    1488             :   Object o = *p;
    1489    10715056 :   if (!o->IsHeapObject()) return false;
    1490             :   HeapObject heap_object = HeapObject::cast(o);
    1491             :   return heap->mark_compact_collector()->non_atomic_marking_state()->IsWhite(
    1492    10715056 :       heap_object);
    1493             : }
    1494             : 
    1495       68846 : void MarkCompactCollector::MarkStringTable(
    1496             :     ObjectVisitor* custom_root_body_visitor) {
    1497       68846 :   StringTable string_table = heap()->string_table();
    1498             :   // Mark the string table itself.
    1499       68846 :   if (marking_state()->WhiteToBlack(string_table)) {
    1500             :     // Explicitly mark the prefix.
    1501       68368 :     string_table->IteratePrefix(custom_root_body_visitor);
    1502             :   }
    1503       68846 : }
    1504             : 
    1505       68846 : void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor,
    1506             :                                      ObjectVisitor* custom_root_body_visitor) {
    1507             :   // Mark the heap roots including global variables, stack variables,
    1508             :   // etc., and all objects reachable from them.
    1509       68846 :   heap()->IterateStrongRoots(root_visitor, VISIT_ONLY_STRONG);
    1510             : 
    1511             :   // Custom marking for string table and top optimized frame.
    1512       68846 :   MarkStringTable(custom_root_body_visitor);
    1513       68846 :   ProcessTopOptimizedFrame(custom_root_body_visitor);
    1514       68846 : }
    1515             : 
    1516      137692 : void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() {
    1517             :   bool work_to_do = true;
    1518             :   int iterations = 0;
    1519      137692 :   int max_iterations = FLAG_ephemeron_fixpoint_iterations;
    1520             : 
    1521      413154 :   while (work_to_do) {
    1522      137731 :     PerformWrapperTracing();
    1523             : 
    1524      137731 :     if (iterations >= max_iterations) {
    1525             :       // Give up fixpoint iteration and switch to linear algorithm.
    1526           0 :       ProcessEphemeronsLinear();
    1527           0 :       break;
    1528             :     }
    1529             : 
    1530             :     // Move ephemerons from next_ephemerons into current_ephemerons to
    1531             :     // drain them in this iteration.
    1532      137731 :     weak_objects_.current_ephemerons.Swap(weak_objects_.next_ephemerons);
    1533             :     heap()->concurrent_marking()->set_ephemeron_marked(false);
    1534             : 
    1535             :     {
    1536      550924 :       TRACE_GC(heap()->tracer(),
    1537             :                GCTracer::Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_MARKING);
    1538             : 
    1539      137731 :       if (FLAG_parallel_marking) {
    1540      272258 :         heap_->concurrent_marking()->RescheduleTasksIfNeeded();
    1541             :       }
    1542             : 
    1543      137731 :       work_to_do = ProcessEphemerons();
    1544             :       FinishConcurrentMarking(
    1545      137731 :           ConcurrentMarking::StopRequest::COMPLETE_ONGOING_TASKS);
    1546             :     }
    1547             : 
    1548      137731 :     CHECK(weak_objects_.current_ephemerons.IsEmpty());
    1549      137731 :     CHECK(weak_objects_.discovered_ephemerons.IsEmpty());
    1550             : 
    1551      275440 :     work_to_do = work_to_do || !marking_worklist()->IsEmpty() ||
    1552      137692 :                  heap()->concurrent_marking()->ephemeron_marked() ||
    1553      413115 :                  !marking_worklist()->IsEmbedderEmpty() ||
    1554      137692 :                  !heap()->local_embedder_heap_tracer()->IsRemoteTracingDone();
    1555      137731 :     ++iterations;
    1556             :   }
    1557             : 
    1558      137692 :   CHECK(marking_worklist()->IsEmpty());
    1559      137692 :   CHECK(weak_objects_.current_ephemerons.IsEmpty());
    1560      137692 :   CHECK(weak_objects_.discovered_ephemerons.IsEmpty());
    1561      137692 : }
    1562             : 
    1563      137731 : bool MarkCompactCollector::ProcessEphemerons() {
    1564      137731 :   Ephemeron ephemeron;
    1565             :   bool ephemeron_marked = false;
    1566             : 
    1567             :   // Drain current_ephemerons and push ephemerons where key and value are still
    1568             :   // unreachable into next_ephemerons.
    1569      137743 :   while (weak_objects_.current_ephemerons.Pop(kMainThread, &ephemeron)) {
    1570          12 :     if (ProcessEphemeron(ephemeron.key, ephemeron.value)) {
    1571             :       ephemeron_marked = true;
    1572             :     }
    1573             :   }
    1574             : 
    1575             :   // Drain marking worklist and push discovered ephemerons into
    1576             :   // discovered_ephemerons.
    1577             :   ProcessMarkingWorklist();
    1578             : 
    1579             :   // Drain discovered_ephemerons (filled in the drain MarkingWorklist-phase
    1580             :   // before) and push ephemerons where key and value are still unreachable into
    1581             :   // next_ephemerons.
    1582      137812 :   while (weak_objects_.discovered_ephemerons.Pop(kMainThread, &ephemeron)) {
    1583          81 :     if (ProcessEphemeron(ephemeron.key, ephemeron.value)) {
    1584             :       ephemeron_marked = true;
    1585             :     }
    1586             :   }
    1587             : 
    1588             :   // Flush local ephemerons for main task to global pool.
    1589      137731 :   weak_objects_.ephemeron_hash_tables.FlushToGlobal(kMainThread);
    1590      137731 :   weak_objects_.next_ephemerons.FlushToGlobal(kMainThread);
    1591             : 
    1592      137731 :   return ephemeron_marked;
    1593             : }
    1594             : 
    1595           0 : void MarkCompactCollector::ProcessEphemeronsLinear() {
    1596           0 :   TRACE_GC(heap()->tracer(),
    1597             :            GCTracer::Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_LINEAR);
    1598           0 :   CHECK(heap()->concurrent_marking()->IsStopped());
    1599             :   std::unordered_multimap<HeapObject, HeapObject, Object::Hasher> key_to_values;
    1600           0 :   Ephemeron ephemeron;
    1601             : 
    1602             :   DCHECK(weak_objects_.current_ephemerons.IsEmpty());
    1603           0 :   weak_objects_.current_ephemerons.Swap(weak_objects_.next_ephemerons);
    1604             : 
    1605           0 :   while (weak_objects_.current_ephemerons.Pop(kMainThread, &ephemeron)) {
    1606           0 :     ProcessEphemeron(ephemeron.key, ephemeron.value);
    1607             : 
    1608           0 :     if (non_atomic_marking_state()->IsWhite(ephemeron.value)) {
    1609           0 :       key_to_values.insert(std::make_pair(ephemeron.key, ephemeron.value));
    1610             :     }
    1611             :   }
    1612             : 
    1613           0 :   ephemeron_marking_.newly_discovered_limit = key_to_values.size();
    1614             :   bool work_to_do = true;
    1615             : 
    1616           0 :   while (work_to_do) {
    1617           0 :     PerformWrapperTracing();
    1618             : 
    1619             :     ResetNewlyDiscovered();
    1620           0 :     ephemeron_marking_.newly_discovered_limit = key_to_values.size();
    1621             : 
    1622             :     {
    1623           0 :       TRACE_GC(heap()->tracer(),
    1624             :                GCTracer::Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_MARKING);
    1625             :       // Drain marking worklist and push all discovered objects into
    1626             :       // newly_discovered.
    1627             :       ProcessMarkingWorklistInternal<
    1628             :           MarkCompactCollector::MarkingWorklistProcessingMode::
    1629           0 :               kTrackNewlyDiscoveredObjects>();
    1630             :     }
    1631             : 
    1632           0 :     while (weak_objects_.discovered_ephemerons.Pop(kMainThread, &ephemeron)) {
    1633           0 :       ProcessEphemeron(ephemeron.key, ephemeron.value);
    1634             : 
    1635           0 :       if (non_atomic_marking_state()->IsWhite(ephemeron.value)) {
    1636           0 :         key_to_values.insert(std::make_pair(ephemeron.key, ephemeron.value));
    1637             :       }
    1638             :     }
    1639             : 
    1640           0 :     if (ephemeron_marking_.newly_discovered_overflowed) {
    1641             :       // If newly_discovered was overflowed just visit all ephemerons in
    1642             :       // next_ephemerons.
    1643           0 :       weak_objects_.next_ephemerons.Iterate([&](Ephemeron ephemeron) {
    1644           0 :         if (non_atomic_marking_state()->IsBlackOrGrey(ephemeron.key) &&
    1645             :             non_atomic_marking_state()->WhiteToGrey(ephemeron.value)) {
    1646             :           marking_worklist()->Push(ephemeron.value);
    1647             :         }
    1648           0 :       });
    1649             : 
    1650             :     } else {
    1651             :       // This is the good case: newly_discovered stores all discovered
    1652             :       // objects. Now use key_to_values to see if discovered objects keep more
    1653             :       // objects alive due to ephemeron semantics.
    1654           0 :       for (HeapObject object : ephemeron_marking_.newly_discovered) {
    1655             :         auto range = key_to_values.equal_range(object);
    1656           0 :         for (auto it = range.first; it != range.second; ++it) {
    1657           0 :           HeapObject value = it->second;
    1658             :           MarkObject(object, value);
    1659             :         }
    1660             :       }
    1661             :     }
    1662             : 
    1663             :     // Do NOT drain marking worklist here, otherwise the current checks
    1664             :     // for work_to_do are not sufficient for determining if another iteration
    1665             :     // is necessary.
    1666             : 
    1667           0 :     work_to_do = !marking_worklist()->IsEmpty() ||
    1668           0 :                  !marking_worklist()->IsEmbedderEmpty() ||
    1669           0 :                  !heap()->local_embedder_heap_tracer()->IsRemoteTracingDone();
    1670           0 :     CHECK(weak_objects_.discovered_ephemerons.IsEmpty());
    1671             :   }
    1672             : 
    1673             :   ResetNewlyDiscovered();
    1674             :   ephemeron_marking_.newly_discovered.shrink_to_fit();
    1675             : 
    1676           0 :   CHECK(marking_worklist()->IsEmpty());
    1677           0 : }
    1678             : 
    1679      206577 : void MarkCompactCollector::PerformWrapperTracing() {
    1680      413154 :   if (heap_->local_embedder_heap_tracer()->InUse()) {
    1681         540 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_EMBEDDER_TRACING);
    1682             :     {
    1683             :       LocalEmbedderHeapTracer::ProcessingScope scope(
    1684         405 :           heap_->local_embedder_heap_tracer());
    1685         135 :       HeapObject object;
    1686         205 :       while (marking_worklist()->embedder()->Pop(kMainThread, &object)) {
    1687          35 :         scope.TracePossibleWrapper(JSObject::cast(object));
    1688             :       }
    1689             :     }
    1690         135 :     heap_->local_embedder_heap_tracer()->Trace(
    1691         135 :         std::numeric_limits<double>::infinity());
    1692             :   }
    1693      206577 : }
    1694             : 
    1695           0 : void MarkCompactCollector::ProcessMarkingWorklist() {
    1696             :   ProcessMarkingWorklistInternal<
    1697      481961 :       MarkCompactCollector::MarkingWorklistProcessingMode::kDefault>();
    1698           0 : }
    1699             : 
    1700             : template <MarkCompactCollector::MarkingWorklistProcessingMode mode>
    1701      481958 : void MarkCompactCollector::ProcessMarkingWorklistInternal() {
    1702             :   HeapObject object;
    1703             :   MarkCompactMarkingVisitor visitor(this, marking_state());
    1704   151737521 :   while (!(object = marking_worklist()->Pop()).is_null()) {
    1705             :     // Left trimming may result in grey or black filler objects on the marking
    1706             :     // worklist. Ignore these objects.
    1707   151255563 :     if (object->IsFiller()) {
    1708             :       // Due to copying mark bits and the fact that grey and black have their
    1709             :       // first bit set, one word fillers are always black.
    1710             :       DCHECK_IMPLIES(
    1711             :           object->map() == ReadOnlyRoots(heap()).one_pointer_filler_map(),
    1712             :           marking_state()->IsBlack(object));
    1713             :       // Other fillers may be black or grey depending on the color of the object
    1714             :       // that was trimmed.
    1715             :       DCHECK_IMPLIES(
    1716             :           object->map() != ReadOnlyRoots(heap()).one_pointer_filler_map(),
    1717             :           marking_state()->IsBlackOrGrey(object));
    1718             :       continue;
    1719             :     }
    1720             :     DCHECK(object->IsHeapObject());
    1721             :     DCHECK(heap()->Contains(object));
    1722             :     DCHECK(!(marking_state()->IsWhite(object)));
    1723             :     marking_state()->GreyToBlack(object);
    1724             :     if (mode == MarkCompactCollector::MarkingWorklistProcessingMode::
    1725             :                     kTrackNewlyDiscoveredObjects) {
    1726           0 :       AddNewlyDiscovered(object);
    1727             :     }
    1728             :     Map map = object->map();
    1729             :     MarkObject(object, map);
    1730             :     visitor.Visit(map, object);
    1731             :   }
    1732      481961 : }
    1733             : 
    1734          93 : bool MarkCompactCollector::ProcessEphemeron(HeapObject key, HeapObject value) {
    1735          93 :   if (marking_state()->IsBlackOrGrey(key)) {
    1736          16 :     if (marking_state()->WhiteToGrey(value)) {
    1737             :       marking_worklist()->Push(value);
    1738          16 :       return true;
    1739             :     }
    1740             : 
    1741          77 :   } else if (marking_state()->IsWhite(value)) {
    1742          77 :     weak_objects_.next_ephemerons.Push(kMainThread, Ephemeron{key, value});
    1743             :   }
    1744             : 
    1745             :   return false;
    1746             : }
    1747             : 
    1748      137692 : void MarkCompactCollector::ProcessEphemeronMarking() {
    1749             :   DCHECK(marking_worklist()->IsEmpty());
    1750             : 
    1751             :   // Incremental marking might leave ephemerons in main task's local
    1752             :   // buffer, flush it into global pool.
    1753      137692 :   weak_objects_.next_ephemerons.FlushToGlobal(kMainThread);
    1754             : 
    1755      137692 :   ProcessEphemeronsUntilFixpoint();
    1756             : 
    1757      137692 :   CHECK(marking_worklist()->IsEmpty());
    1758      137692 :   CHECK(heap()->local_embedder_heap_tracer()->IsRemoteTracingDone());
    1759      137692 : }
    1760             : 
    1761       68846 : void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) {
    1762      142729 :   for (StackFrameIterator it(isolate(), isolate()->thread_local_top());
    1763       73883 :        !it.done(); it.Advance()) {
    1764      114836 :     if (it.frame()->type() == StackFrame::INTERPRETED) {
    1765             :       return;
    1766             :     }
    1767       81488 :     if (it.frame()->type() == StackFrame::OPTIMIZED) {
    1768        7605 :       Code code = it.frame()->LookupCode();
    1769        7605 :       if (!code->CanDeoptAt(it.frame()->pc())) {
    1770        1396 :         Code::BodyDescriptor::IterateBody(code->map(), code, visitor);
    1771             :       }
    1772             :       return;
    1773             :     }
    1774             :   }
    1775             : }
    1776             : 
    1777       68846 : void MarkCompactCollector::RecordObjectStats() {
    1778       68846 :   if (V8_UNLIKELY(TracingFlags::is_gc_stats_enabled())) {
    1779           0 :     heap()->CreateObjectStats();
    1780             :     ObjectStatsCollector collector(heap(), heap()->live_object_stats_.get(),
    1781             :                                    heap()->dead_object_stats_.get());
    1782           0 :     collector.Collect();
    1783           0 :     if (V8_UNLIKELY(TracingFlags::gc_stats.load(std::memory_order_relaxed) &
    1784             :                     v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
    1785           0 :       std::stringstream live, dead;
    1786           0 :       heap()->live_object_stats_->Dump(live);
    1787           0 :       heap()->dead_object_stats_->Dump(dead);
    1788           0 :       TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc_stats"),
    1789             :                            "V8.GC_Objects_Stats", TRACE_EVENT_SCOPE_THREAD,
    1790             :                            "live", TRACE_STR_COPY(live.str().c_str()), "dead",
    1791             :                            TRACE_STR_COPY(dead.str().c_str()));
    1792             :     }
    1793           0 :     if (FLAG_trace_gc_object_stats) {
    1794           0 :       heap()->live_object_stats_->PrintJSON("live");
    1795           0 :       heap()->dead_object_stats_->PrintJSON("dead");
    1796             :     }
    1797           0 :     heap()->live_object_stats_->CheckpointObjectStats();
    1798           0 :     heap()->dead_object_stats_->ClearObjectStats();
    1799             :   }
    1800       68846 : }
    1801             : 
    1802       68846 : void MarkCompactCollector::MarkLiveObjects() {
    1803      275384 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK);
    1804             :   // The recursive GC marker detects when it is nearing stack overflow,
    1805             :   // and switches to a different marking system.  JS interrupts interfere
    1806             :   // with the C stack limit check.
    1807             :   PostponeInterruptsScope postpone(isolate());
    1808             : 
    1809             :   {
    1810      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_FINISH_INCREMENTAL);
    1811       68846 :     IncrementalMarking* incremental_marking = heap_->incremental_marking();
    1812       68846 :     if (was_marked_incrementally_) {
    1813       22268 :       incremental_marking->Finalize();
    1814             :     } else {
    1815       46578 :       CHECK(incremental_marking->IsStopped());
    1816             :     }
    1817             :   }
    1818             : 
    1819             : #ifdef DEBUG
    1820             :   DCHECK(state_ == PREPARE_GC);
    1821             :   state_ = MARK_LIVE_OBJECTS;
    1822             : #endif
    1823             : 
    1824      137692 :   heap_->local_embedder_heap_tracer()->EnterFinalPause();
    1825             : 
    1826             :   RootMarkingVisitor root_visitor(this);
    1827             : 
    1828             :   {
    1829      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS);
    1830             :     CustomRootBodyMarkingVisitor custom_root_body_visitor(this);
    1831       68846 :     MarkRoots(&root_visitor, &custom_root_body_visitor);
    1832             :   }
    1833             : 
    1834             :   {
    1835      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_MAIN);
    1836       68846 :     if (FLAG_parallel_marking) {
    1837      136090 :       heap_->concurrent_marking()->RescheduleTasksIfNeeded();
    1838             :     }
    1839             :     ProcessMarkingWorklist();
    1840             : 
    1841             :     FinishConcurrentMarking(
    1842       68846 :         ConcurrentMarking::StopRequest::COMPLETE_ONGOING_TASKS);
    1843             :     ProcessMarkingWorklist();
    1844             :   }
    1845             : 
    1846             :   {
    1847      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE);
    1848             : 
    1849             :     DCHECK(marking_worklist()->IsEmpty());
    1850             : 
    1851             :     // Mark objects reachable through the embedder heap. This phase is
    1852             :     // opportunistic as it may not discover graphs that are only reachable
    1853             :     // through ephemerons.
    1854             :     {
    1855      275384 :       TRACE_GC(heap()->tracer(),
    1856             :                GCTracer::Scope::MC_MARK_EMBEDDER_TRACING_CLOSURE);
    1857       68846 :       do {
    1858             :         // PerformWrapperTracing() also empties the work items collected by
    1859             :         // concurrent markers. As a result this call needs to happen at least
    1860             :         // once.
    1861       68846 :         PerformWrapperTracing();
    1862             :         ProcessMarkingWorklist();
    1863      206538 :       } while (!heap_->local_embedder_heap_tracer()->IsRemoteTracingDone() ||
    1864       68846 :                !marking_worklist()->IsEmbedderEmpty());
    1865             :       DCHECK(marking_worklist()->IsEmbedderEmpty());
    1866             :       DCHECK(marking_worklist()->IsEmpty());
    1867             :     }
    1868             : 
    1869             :     // The objects reachable from the roots are marked, yet unreachable objects
    1870             :     // are unmarked. Mark objects reachable due to embedder heap tracing or
    1871             :     // harmony weak maps.
    1872             :     {
    1873      275384 :       TRACE_GC(heap()->tracer(),
    1874             :                GCTracer::Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON);
    1875       68846 :       ProcessEphemeronMarking();
    1876             :       DCHECK(marking_worklist()->IsEmpty());
    1877             :     }
    1878             : 
    1879             :     // The objects reachable from the roots, weak maps, and embedder heap
    1880             :     // tracing are marked. Objects pointed to only by weak global handles cannot
    1881             :     // be immediately reclaimed. Instead, we have to mark them as pending and
    1882             :     // mark objects reachable from them.
    1883             :     //
    1884             :     // First we identify nonlive weak handles and mark them as pending
    1885             :     // destruction.
    1886             :     {
    1887      275384 :       TRACE_GC(heap()->tracer(),
    1888             :                GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES);
    1889             :       heap()->isolate()->global_handles()->IterateWeakRootsIdentifyFinalizers(
    1890       68846 :           &IsUnmarkedHeapObject);
    1891             :       ProcessMarkingWorklist();
    1892             :     }
    1893             : 
    1894             :     // Process finalizers, effectively keeping them alive until the next
    1895             :     // garbage collection.
    1896             :     {
    1897      275384 :       TRACE_GC(heap()->tracer(),
    1898             :                GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS);
    1899             :       heap()->isolate()->global_handles()->IterateWeakRootsForFinalizers(
    1900       68846 :           &root_visitor);
    1901             :       ProcessMarkingWorklist();
    1902             :     }
    1903             : 
    1904             :     // Repeat ephemeron processing from the newly marked objects.
    1905             :     {
    1906      275384 :       TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE_HARMONY);
    1907       68846 :       ProcessEphemeronMarking();
    1908             :       DCHECK(marking_worklist()->IsEmbedderEmpty());
    1909             :       DCHECK(marking_worklist()->IsEmpty());
    1910             :     }
    1911             : 
    1912             :     {
    1913             :       heap()->isolate()->global_handles()->IterateWeakRootsForPhantomHandles(
    1914       68846 :           &IsUnmarkedHeapObject);
    1915             :     }
    1916             :   }
    1917             : 
    1918       68846 :   if (was_marked_incrementally_) {
    1919       22268 :     heap()->incremental_marking()->Deactivate();
    1920             :   }
    1921       68846 : }
    1922             : 
    1923       68846 : void MarkCompactCollector::ClearNonLiveReferences() {
    1924      275384 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR);
    1925             : 
    1926             :   {
    1927      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_STRING_TABLE);
    1928             : 
    1929             :     // Prune the string table removing all strings only pointed to by the
    1930             :     // string table.  Cannot use string_table() here because the string
    1931             :     // table is marked.
    1932       68846 :     StringTable string_table = heap()->string_table();
    1933             :     InternalizedStringTableCleaner internalized_visitor(heap(), string_table);
    1934       68846 :     string_table->IterateElements(&internalized_visitor);
    1935       68846 :     string_table->ElementsRemoved(internalized_visitor.PointersRemoved());
    1936             : 
    1937             :     ExternalStringTableCleaner external_visitor(heap());
    1938       68846 :     heap()->external_string_table_.IterateAll(&external_visitor);
    1939       68846 :     heap()->external_string_table_.CleanUpAll();
    1940             :   }
    1941             : 
    1942             :   {
    1943      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_FLUSHABLE_BYTECODE);
    1944       68846 :     ClearOldBytecodeCandidates();
    1945             :   }
    1946             : 
    1947             :   {
    1948      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_FLUSHED_JS_FUNCTIONS);
    1949       68846 :     ClearFlushedJsFunctions();
    1950             :   }
    1951             : 
    1952             :   {
    1953      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_LISTS);
    1954             :     // Process the weak references.
    1955             :     MarkCompactWeakObjectRetainer mark_compact_object_retainer(
    1956             :         non_atomic_marking_state());
    1957       68846 :     heap()->ProcessAllWeakReferences(&mark_compact_object_retainer);
    1958             :   }
    1959             : 
    1960             :   {
    1961      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS);
    1962             :     // ClearFullMapTransitions must be called before weak references are
    1963             :     // cleared.
    1964       68846 :     ClearFullMapTransitions();
    1965             :   }
    1966             :   {
    1967      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_REFERENCES);
    1968       68846 :     ClearWeakReferences();
    1969       68846 :     ClearWeakCollections();
    1970       68846 :     ClearJSWeakRefs();
    1971             :   }
    1972             : 
    1973       68846 :   MarkDependentCodeForDeoptimization();
    1974             : 
    1975             :   DCHECK(weak_objects_.transition_arrays.IsEmpty());
    1976             :   DCHECK(weak_objects_.weak_references.IsEmpty());
    1977             :   DCHECK(weak_objects_.weak_objects_in_code.IsEmpty());
    1978             :   DCHECK(weak_objects_.js_weak_refs.IsEmpty());
    1979             :   DCHECK(weak_objects_.weak_cells.IsEmpty());
    1980             :   DCHECK(weak_objects_.bytecode_flushing_candidates.IsEmpty());
    1981             :   DCHECK(weak_objects_.flushed_js_functions.IsEmpty());
    1982       68846 : }
    1983             : 
    1984       68846 : void MarkCompactCollector::MarkDependentCodeForDeoptimization() {
    1985       68846 :   std::pair<HeapObject, Code> weak_object_in_code;
    1986      221300 :   while (weak_objects_.weak_objects_in_code.Pop(kMainThread,
    1987             :                                                 &weak_object_in_code)) {
    1988       76227 :     HeapObject object = weak_object_in_code.first;
    1989       76227 :     Code code = weak_object_in_code.second;
    1990       77050 :     if (!non_atomic_marking_state()->IsBlackOrGrey(object) &&
    1991         823 :         !code->embedded_objects_cleared()) {
    1992         264 :       if (!code->marked_for_deoptimization()) {
    1993          90 :         code->SetMarkedForDeoptimization("weak objects");
    1994          90 :         have_code_to_deoptimize_ = true;
    1995             :       }
    1996         264 :       code->ClearEmbeddedObjects(heap_);
    1997             :       DCHECK(code->embedded_objects_cleared());
    1998             :     }
    1999             :   }
    2000       68846 : }
    2001             : 
    2002      412894 : void MarkCompactCollector::ClearPotentialSimpleMapTransition(Map dead_target) {
    2003             :   DCHECK(non_atomic_marking_state()->IsWhite(dead_target));
    2004             :   Object potential_parent = dead_target->constructor_or_backpointer();
    2005      412894 :   if (potential_parent->IsMap()) {
    2006             :     Map parent = Map::cast(potential_parent);
    2007             :     DisallowHeapAllocation no_gc_obviously;
    2008      998955 :     if (non_atomic_marking_state()->IsBlackOrGrey(parent) &&
    2009             :         TransitionsAccessor(isolate(), parent, &no_gc_obviously)
    2010      494533 :             .HasSimpleTransitionTo(dead_target)) {
    2011       16628 :       ClearPotentialSimpleMapTransition(parent, dead_target);
    2012             :     }
    2013             :   }
    2014      412894 : }
    2015             : 
    2016       16628 : void MarkCompactCollector::ClearPotentialSimpleMapTransition(Map map,
    2017             :                                                              Map dead_target) {
    2018             :   DCHECK(!map->is_prototype_map());
    2019             :   DCHECK(!dead_target->is_prototype_map());
    2020             :   DCHECK_EQ(map->raw_transitions(), HeapObjectReference::Weak(dead_target));
    2021             :   // Take ownership of the descriptor array.
    2022             :   int number_of_own_descriptors = map->NumberOfOwnDescriptors();
    2023             :   DescriptorArray descriptors = map->instance_descriptors();
    2024       16628 :   if (descriptors == dead_target->instance_descriptors() &&
    2025             :       number_of_own_descriptors > 0) {
    2026        3961 :     TrimDescriptorArray(map, descriptors);
    2027             :     DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
    2028             :   }
    2029       16628 : }
    2030             : 
    2031       92723 : void MarkCompactCollector::FlushBytecodeFromSFI(
    2032             :     SharedFunctionInfo shared_info) {
    2033             :   DCHECK(shared_info->HasBytecodeArray());
    2034             : 
    2035             :   // Retain objects required for uncompiled data.
    2036       92723 :   String inferred_name = shared_info->inferred_name();
    2037       92723 :   int start_position = shared_info->StartPosition();
    2038       92723 :   int end_position = shared_info->EndPosition();
    2039             : 
    2040       92723 :   shared_info->DiscardCompiledMetadata(
    2041       92723 :       isolate(), [](HeapObject object, ObjectSlot slot, HeapObject target) {
    2042             :         RecordSlot(object, slot, target);
    2043      185446 :       });
    2044             : 
    2045             :   // The size of the bytecode array should always be larger than an
    2046             :   // UncompiledData object.
    2047             :   STATIC_ASSERT(BytecodeArray::SizeFor(0) >=
    2048             :                 UncompiledDataWithoutPreparseData::kSize);
    2049             : 
    2050             :   // Replace bytecode array with an uncompiled data array.
    2051       92723 :   HeapObject compiled_data = shared_info->GetBytecodeArray();
    2052             :   Address compiled_data_start = compiled_data->address();
    2053       92723 :   int compiled_data_size = compiled_data->Size();
    2054             :   MemoryChunk* chunk = MemoryChunk::FromAddress(compiled_data_start);
    2055             : 
    2056             :   // Clear any recorded slots for the compiled data as being invalid.
    2057       92723 :   RememberedSet<OLD_TO_NEW>::RemoveRange(
    2058             :       chunk, compiled_data_start, compiled_data_start + compiled_data_size,
    2059       92723 :       SlotSet::PREFREE_EMPTY_BUCKETS);
    2060             :   RememberedSet<OLD_TO_OLD>::RemoveRange(
    2061             :       chunk, compiled_data_start, compiled_data_start + compiled_data_size,
    2062       92723 :       SlotSet::PREFREE_EMPTY_BUCKETS);
    2063             : 
    2064             :   // Swap the map, using set_map_after_allocation to avoid verify heap checks
    2065             :   // which are not necessary since we are doing this during the GC atomic pause.
    2066             :   compiled_data->set_map_after_allocation(
    2067             :       ReadOnlyRoots(heap()).uncompiled_data_without_preparse_data_map(),
    2068             :       SKIP_WRITE_BARRIER);
    2069             : 
    2070             :   // Create a filler object for any left over space in the bytecode array.
    2071       92723 :   if (!heap()->IsLargeObject(compiled_data)) {
    2072             :     heap()->CreateFillerObjectAt(
    2073             :         compiled_data->address() + UncompiledDataWithoutPreparseData::kSize,
    2074             :         compiled_data_size - UncompiledDataWithoutPreparseData::kSize,
    2075      185446 :         ClearRecordedSlots::kNo);
    2076             :   }
    2077             : 
    2078             :   // Initialize the uncompiled data.
    2079             :   UncompiledData uncompiled_data = UncompiledData::cast(compiled_data);
    2080       92723 :   UncompiledData::Initialize(
    2081             :       uncompiled_data, inferred_name, start_position, end_position,
    2082             :       kFunctionLiteralIdInvalid,
    2083       92723 :       [](HeapObject object, ObjectSlot slot, HeapObject target) {
    2084             :         RecordSlot(object, slot, target);
    2085      185446 :       });
    2086             : 
    2087             :   // Mark the uncompiled data as black, and ensure all fields have already been
    2088             :   // marked.
    2089             :   DCHECK(non_atomic_marking_state()->IsBlackOrGrey(inferred_name));
    2090             :   non_atomic_marking_state()->WhiteToBlack(uncompiled_data);
    2091             : 
    2092             :   // Use the raw function data setter to avoid validity checks, since we're
    2093             :   // performing the unusual task of decompiling.
    2094       92723 :   shared_info->set_function_data(uncompiled_data);
    2095             :   DCHECK(!shared_info->is_compiled());
    2096       92723 : }
    2097             : 
    2098       68846 : void MarkCompactCollector::ClearOldBytecodeCandidates() {
    2099             :   DCHECK(FLAG_flush_bytecode ||
    2100             :          weak_objects_.bytecode_flushing_candidates.IsEmpty());
    2101       68846 :   SharedFunctionInfo flushing_candidate;
    2102      372010 :   while (weak_objects_.bytecode_flushing_candidates.Pop(kMainThread,
    2103             :                                                         &flushing_candidate)) {
    2104             :     // If the BytecodeArray is dead, flush it, which will replace the field with
    2105             :     // an uncompiled data object.
    2106      303164 :     if (!non_atomic_marking_state()->IsBlackOrGrey(
    2107      606328 :             flushing_candidate->GetBytecodeArray())) {
    2108       92723 :       FlushBytecodeFromSFI(flushing_candidate);
    2109             :     }
    2110             : 
    2111             :     // Now record the slot, which has either been updated to an uncompiled data,
    2112             :     // or is the BytecodeArray which is still alive.
    2113             :     ObjectSlot slot =
    2114             :         flushing_candidate.RawField(SharedFunctionInfo::kFunctionDataOffset);
    2115             :     RecordSlot(flushing_candidate, slot, HeapObject::cast(*slot));
    2116             :   }
    2117       68846 : }
    2118             : 
    2119       68846 : void MarkCompactCollector::ClearFlushedJsFunctions() {
    2120             :   DCHECK(FLAG_flush_bytecode || weak_objects_.flushed_js_functions.IsEmpty());
    2121       68846 :   JSFunction flushed_js_function;
    2122       83656 :   while (weak_objects_.flushed_js_functions.Pop(kMainThread,
    2123             :                                                 &flushed_js_function)) {
    2124        7405 :     flushed_js_function->ResetIfBytecodeFlushed();
    2125             :   }
    2126       68846 : }
    2127             : 
    2128       68846 : void MarkCompactCollector::ClearFullMapTransitions() {
    2129       68846 :   TransitionArray array;
    2130      750572 :   while (weak_objects_.transition_arrays.Pop(kMainThread, &array)) {
    2131             :     int num_transitions = array->number_of_entries();
    2132      681726 :     if (num_transitions > 0) {
    2133      514506 :       Map map;
    2134             :       // The array might contain "undefined" elements because it's not yet
    2135             :       // filled. Allow it.
    2136      514506 :       if (array->GetTargetIfExists(0, isolate(), &map)) {
    2137             :         DCHECK(!map.is_null());  // Weak pointers aren't cleared yet.
    2138             :         Map parent = Map::cast(map->constructor_or_backpointer());
    2139             :         bool parent_is_alive =
    2140             :             non_atomic_marking_state()->IsBlackOrGrey(parent);
    2141             :         DescriptorArray descriptors = parent_is_alive
    2142             :                                           ? parent->instance_descriptors()
    2143     1029012 :                                           : DescriptorArray();
    2144             :         bool descriptors_owner_died =
    2145      514506 :             CompactTransitionArray(parent, array, descriptors);
    2146      514506 :         if (descriptors_owner_died) {
    2147        2324 :           TrimDescriptorArray(parent, descriptors);
    2148             :         }
    2149             :       }
    2150             :     }
    2151             :   }
    2152       68846 : }
    2153             : 
    2154      514506 : bool MarkCompactCollector::CompactTransitionArray(Map map,
    2155             :                                                   TransitionArray transitions,
    2156             :                                                   DescriptorArray descriptors) {
    2157             :   DCHECK(!map->is_prototype_map());
    2158             :   int num_transitions = transitions->number_of_entries();
    2159             :   bool descriptors_owner_died = false;
    2160             :   int transition_index = 0;
    2161             :   // Compact all live transitions to the left.
    2162     1958408 :   for (int i = 0; i < num_transitions; ++i) {
    2163             :     Map target = transitions->GetTarget(i);
    2164             :     DCHECK_EQ(target->constructor_or_backpointer(), map);
    2165      721951 :     if (non_atomic_marking_state()->IsWhite(target)) {
    2166      146220 :       if (!descriptors.is_null() &&
    2167             :           target->instance_descriptors() == descriptors) {
    2168             :         DCHECK(!target->is_prototype_map());
    2169             :         descriptors_owner_died = true;
    2170             :       }
    2171             :     } else {
    2172      648841 :       if (i != transition_index) {
    2173       14945 :         Name key = transitions->GetKey(i);
    2174             :         transitions->SetKey(transition_index, key);
    2175             :         HeapObjectSlot key_slot = transitions->GetKeySlot(transition_index);
    2176             :         RecordSlot(transitions, key_slot, key);
    2177       14945 :         MaybeObject raw_target = transitions->GetRawTarget(i);
    2178             :         transitions->SetRawTarget(transition_index, raw_target);
    2179             :         HeapObjectSlot target_slot =
    2180             :             transitions->GetTargetSlot(transition_index);
    2181             :         RecordSlot(transitions, target_slot, raw_target->GetHeapObject());
    2182             :       }
    2183      648841 :       transition_index++;
    2184             :     }
    2185             :   }
    2186             :   // If there are no transitions to be cleared, return.
    2187      514506 :   if (transition_index == num_transitions) {
    2188             :     DCHECK(!descriptors_owner_died);
    2189             :     return false;
    2190             :   }
    2191             :   // Note that we never eliminate a transition array, though we might right-trim
    2192             :   // such that number_of_transitions() == 0. If this assumption changes,
    2193             :   // TransitionArray::Insert() will need to deal with the case that a transition
    2194             :   // array disappeared during GC.
    2195       17399 :   int trim = transitions->Capacity() - transition_index;
    2196       17399 :   if (trim > 0) {
    2197       17399 :     heap_->RightTrimWeakFixedArray(transitions,
    2198       17399 :                                    trim * TransitionArray::kEntrySize);
    2199             :     transitions->SetNumberOfTransitions(transition_index);
    2200             :   }
    2201             :   return descriptors_owner_died;
    2202             : }
    2203             : 
    2204        5427 : void MarkCompactCollector::RightTrimDescriptorArray(DescriptorArray array,
    2205             :                                                     int descriptors_to_trim) {
    2206        5427 :   int old_nof_all_descriptors = array->number_of_all_descriptors();
    2207        5427 :   int new_nof_all_descriptors = old_nof_all_descriptors - descriptors_to_trim;
    2208             :   DCHECK_LT(0, descriptors_to_trim);
    2209             :   DCHECK_LE(0, new_nof_all_descriptors);
    2210             :   Address start = array->GetDescriptorSlot(new_nof_all_descriptors).address();
    2211             :   Address end = array->GetDescriptorSlot(old_nof_all_descriptors).address();
    2212             :   RememberedSet<OLD_TO_NEW>::RemoveRange(MemoryChunk::FromHeapObject(array),
    2213             :                                          start, end,
    2214        5427 :                                          SlotSet::PREFREE_EMPTY_BUCKETS);
    2215             :   RememberedSet<OLD_TO_OLD>::RemoveRange(MemoryChunk::FromHeapObject(array),
    2216             :                                          start, end,
    2217        5427 :                                          SlotSet::PREFREE_EMPTY_BUCKETS);
    2218             :   heap()->CreateFillerObjectAt(start, static_cast<int>(end - start),
    2219        5427 :                                ClearRecordedSlots::kNo);
    2220             :   array->set_number_of_all_descriptors(new_nof_all_descriptors);
    2221        5427 : }
    2222             : 
    2223        6285 : void MarkCompactCollector::TrimDescriptorArray(Map map,
    2224             :                                                DescriptorArray descriptors) {
    2225             :   int number_of_own_descriptors = map->NumberOfOwnDescriptors();
    2226        6285 :   if (number_of_own_descriptors == 0) {
    2227             :     DCHECK(descriptors == ReadOnlyRoots(heap_).empty_descriptor_array());
    2228             :     return;
    2229             :   }
    2230             :   // TODO(ulan): Trim only if slack is greater than some percentage threshold.
    2231             :   int to_trim =
    2232        6241 :       descriptors->number_of_all_descriptors() - number_of_own_descriptors;
    2233        6241 :   if (to_trim > 0) {
    2234             :     descriptors->set_number_of_descriptors(number_of_own_descriptors);
    2235        5427 :     RightTrimDescriptorArray(descriptors, to_trim);
    2236             : 
    2237        5427 :     TrimEnumCache(map, descriptors);
    2238        5427 :     descriptors->Sort();
    2239             : 
    2240             :     if (FLAG_unbox_double_fields) {
    2241        5427 :       LayoutDescriptor layout_descriptor = map->layout_descriptor();
    2242             :       layout_descriptor = layout_descriptor->Trim(heap_, map, descriptors,
    2243        5427 :                                                   number_of_own_descriptors);
    2244             :       SLOW_DCHECK(layout_descriptor->IsConsistentWithMap(map, true));
    2245             :     }
    2246             :   }
    2247             :   DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
    2248        6241 :   map->set_owns_descriptors(true);
    2249             : }
    2250             : 
    2251        5427 : void MarkCompactCollector::TrimEnumCache(Map map, DescriptorArray descriptors) {
    2252             :   int live_enum = map->EnumLength();
    2253        5427 :   if (live_enum == kInvalidEnumCacheSentinel) {
    2254        5373 :     live_enum = map->NumberOfEnumerableProperties();
    2255             :   }
    2256        5427 :   if (live_enum == 0) return descriptors->ClearEnumCache();
    2257             :   EnumCache enum_cache = descriptors->enum_cache();
    2258             : 
    2259             :   FixedArray keys = enum_cache->keys();
    2260        5381 :   int to_trim = keys->length() - live_enum;
    2261        5381 :   if (to_trim <= 0) return;
    2262          99 :   heap_->RightTrimFixedArray(keys, to_trim);
    2263             : 
    2264             :   FixedArray indices = enum_cache->indices();
    2265          99 :   to_trim = indices->length() - live_enum;
    2266          99 :   if (to_trim <= 0) return;
    2267          90 :   heap_->RightTrimFixedArray(indices, to_trim);
    2268             : }
    2269             : 
    2270       68846 : void MarkCompactCollector::ClearWeakCollections() {
    2271      275384 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS);
    2272       68846 :   EphemeronHashTable table;
    2273             : 
    2274       77186 :   while (weak_objects_.ephemeron_hash_tables.Pop(kMainThread, &table)) {
    2275       87196 :     for (int i = 0; i < table->Capacity(); i++) {
    2276             :       HeapObject key = HeapObject::cast(table->KeyAt(i));
    2277             : #ifdef VERIFY_HEAP
    2278             :       Object value = table->ValueAt(i);
    2279             : 
    2280             :       if (value->IsHeapObject()) {
    2281             :         CHECK_IMPLIES(
    2282             :             non_atomic_marking_state()->IsBlackOrGrey(key),
    2283             :             non_atomic_marking_state()->IsBlackOrGrey(HeapObject::cast(value)));
    2284             :       }
    2285             : #endif
    2286       39428 :       if (!non_atomic_marking_state()->IsBlackOrGrey(key)) {
    2287         114 :         table->RemoveEntry(i);
    2288             :       }
    2289             :     }
    2290             :   }
    2291      137699 :   for (auto it = heap_->ephemeron_remembered_set_.begin();
    2292       68853 :        it != heap_->ephemeron_remembered_set_.end();) {
    2293           7 :     if (!non_atomic_marking_state()->IsBlackOrGrey(it->first)) {
    2294             :       it = heap_->ephemeron_remembered_set_.erase(it);
    2295             :     } else {
    2296             :       ++it;
    2297             :     }
    2298             :   }
    2299       68846 : }
    2300             : 
    2301       68846 : void MarkCompactCollector::ClearWeakReferences() {
    2302      275384 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_REFERENCES);
    2303             :   std::pair<HeapObject, HeapObjectSlot> slot;
    2304             :   HeapObjectReference cleared_weak_ref =
    2305             :       HeapObjectReference::ClearedValue(isolate());
    2306    18417023 :   while (weak_objects_.weak_references.Pop(kMainThread, &slot)) {
    2307             :     HeapObject value;
    2308             :     // The slot could have been overwritten, so we have to treat it
    2309             :     // as MaybeObjectSlot.
    2310             :     MaybeObjectSlot location(slot.second);
    2311    18348177 :     if ((*location)->GetHeapObjectIfWeak(&value)) {
    2312             :       DCHECK(!value->IsCell());
    2313    18322050 :       if (non_atomic_marking_state()->IsBlackOrGrey(value)) {
    2314             :         // The value of the weak reference is alive.
    2315             :         RecordSlot(slot.first, HeapObjectSlot(location), value);
    2316             :       } else {
    2317     2985719 :         if (value->IsMap()) {
    2318             :           // The map is non-live.
    2319      412894 :           ClearPotentialSimpleMapTransition(Map::cast(value));
    2320             :         }
    2321             :         location.store(cleared_weak_ref);
    2322             :       }
    2323             :     }
    2324             :   }
    2325       68846 : }
    2326             : 
    2327       68846 : void MarkCompactCollector::ClearJSWeakRefs() {
    2328       68846 :   if (!FLAG_harmony_weak_refs) {
    2329       68441 :     return;
    2330             :   }
    2331         405 :   JSWeakRef weak_ref;
    2332         516 :   while (weak_objects_.js_weak_refs.Pop(kMainThread, &weak_ref)) {
    2333             :     HeapObject target = HeapObject::cast(weak_ref->target());
    2334         111 :     if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
    2335          74 :       weak_ref->set_target(ReadOnlyRoots(isolate()).undefined_value());
    2336             :     } else {
    2337             :       // The value of the JSWeakRef is alive.
    2338             :       ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
    2339             :       RecordSlot(weak_ref, slot, target);
    2340             :     }
    2341             :   }
    2342         405 :   WeakCell weak_cell;
    2343         684 :   while (weak_objects_.weak_cells.Pop(kMainThread, &weak_cell)) {
    2344             :     HeapObject target = HeapObject::cast(weak_cell->target());
    2345         279 :     if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
    2346             :       DCHECK(!target->IsUndefined());
    2347             :       // The value of the WeakCell is dead.
    2348             :       JSFinalizationGroup finalization_group =
    2349             :           JSFinalizationGroup::cast(weak_cell->finalization_group());
    2350         262 :       if (!finalization_group->scheduled_for_cleanup()) {
    2351         208 :         heap()->AddDirtyJSFinalizationGroup(
    2352             :             finalization_group,
    2353         208 :             [](HeapObject object, ObjectSlot slot, Object target) {
    2354         208 :               if (target->IsHeapObject()) {
    2355             :                 RecordSlot(object, slot, HeapObject::cast(target));
    2356             :               }
    2357         416 :             });
    2358             :       }
    2359             :       // We're modifying the pointers in WeakCell and JSFinalizationGroup during
    2360             :       // GC; thus we need to record the slots it writes. The normal write
    2361             :       // barrier is not enough, since it's disabled before GC.
    2362         262 :       weak_cell->Nullify(isolate(),
    2363         852 :                          [](HeapObject object, ObjectSlot slot, Object target) {
    2364         852 :                            if (target->IsHeapObject()) {
    2365             :                              RecordSlot(object, slot, HeapObject::cast(target));
    2366             :                            }
    2367        1114 :                          });
    2368             :       DCHECK(finalization_group->NeedsCleanup());
    2369             :       DCHECK(finalization_group->scheduled_for_cleanup());
    2370             :     } else {
    2371             :       // The value of the WeakCell is alive.
    2372             :       ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
    2373             :       RecordSlot(weak_cell, slot, HeapObject::cast(*slot));
    2374             :     }
    2375             :   }
    2376             : }
    2377             : 
    2378       62426 : void MarkCompactCollector::AbortWeakObjects() {
    2379             :   weak_objects_.transition_arrays.Clear();
    2380             :   weak_objects_.ephemeron_hash_tables.Clear();
    2381             :   weak_objects_.current_ephemerons.Clear();
    2382             :   weak_objects_.next_ephemerons.Clear();
    2383             :   weak_objects_.discovered_ephemerons.Clear();
    2384             :   weak_objects_.weak_references.Clear();
    2385             :   weak_objects_.weak_objects_in_code.Clear();
    2386             :   weak_objects_.js_weak_refs.Clear();
    2387             :   weak_objects_.weak_cells.Clear();
    2388             :   weak_objects_.bytecode_flushing_candidates.Clear();
    2389             :   weak_objects_.flushed_js_functions.Clear();
    2390       62427 : }
    2391             : 
    2392           0 : bool MarkCompactCollector::IsOnEvacuationCandidate(MaybeObject obj) {
    2393           0 :   return Page::FromAddress(obj.ptr())->IsEvacuationCandidate();
    2394             : }
    2395             : 
    2396             : MarkCompactCollector::RecordRelocSlotInfo
    2397     4539639 : MarkCompactCollector::PrepareRecordRelocSlot(Code host, RelocInfo* rinfo,
    2398             :                                              HeapObject target) {
    2399             :   RecordRelocSlotInfo result;
    2400     4539639 :   result.should_record = false;
    2401             :   Page* target_page = Page::FromHeapObject(target);
    2402             :   Page* source_page = Page::FromHeapObject(host);
    2403     9079278 :   if (target_page->IsEvacuationCandidate() &&
    2404      202829 :       (rinfo->host().is_null() ||
    2405             :        !source_page->ShouldSkipEvacuationSlotRecording())) {
    2406             :     RelocInfo::Mode rmode = rinfo->rmode();
    2407             :     Address addr = rinfo->pc();
    2408             :     SlotType slot_type = SlotTypeForRelocInfoMode(rmode);
    2409      180799 :     if (rinfo->IsInConstantPool()) {
    2410             :       addr = rinfo->constant_pool_entry_address();
    2411             :       if (RelocInfo::IsCodeTargetMode(rmode)) {
    2412             :         slot_type = CODE_ENTRY_SLOT;
    2413             :       } else {
    2414             :         DCHECK(RelocInfo::IsEmbeddedObject(rmode));
    2415             :         slot_type = OBJECT_SLOT;
    2416             :       }
    2417             :     }
    2418      361570 :     uintptr_t offset = addr - source_page->address();
    2419             :     DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset));
    2420      180785 :     result.should_record = true;
    2421      180785 :     result.memory_chunk = source_page;
    2422      180785 :     result.slot_type = slot_type;
    2423      180785 :     result.offset = static_cast<uint32_t>(offset);
    2424             :   }
    2425     4539625 :   return result;
    2426             : }
    2427             : 
    2428     1951769 : void MarkCompactCollector::RecordRelocSlot(Code host, RelocInfo* rinfo,
    2429             :                                            HeapObject target) {
    2430     1951769 :   RecordRelocSlotInfo info = PrepareRecordRelocSlot(host, rinfo, target);
    2431     1951769 :   if (info.should_record) {
    2432      131546 :     RememberedSet<OLD_TO_OLD>::InsertTyped(info.memory_chunk, info.slot_type,
    2433      131546 :                                            info.offset);
    2434             :   }
    2435     1951769 : }
    2436             : 
    2437             : namespace {
    2438             : 
    2439             : // Missing specialization MakeSlotValue<FullObjectSlot, WEAK>() will turn
    2440             : // attempt to store a weak reference to strong-only slot to a compilation error.
    2441             : template <typename TSlot, HeapObjectReferenceType reference_type>
    2442             : typename TSlot::TObject MakeSlotValue(HeapObject heap_object);
    2443             : 
    2444             : template <>
    2445             : Object MakeSlotValue<ObjectSlot, HeapObjectReferenceType::STRONG>(
    2446             :     HeapObject heap_object) {
    2447             :   return heap_object;
    2448             : }
    2449             : 
    2450             : template <>
    2451             : MaybeObject MakeSlotValue<MaybeObjectSlot, HeapObjectReferenceType::STRONG>(
    2452             :     HeapObject heap_object) {
    2453             :   return HeapObjectReference::Strong(heap_object);
    2454             : }
    2455             : 
    2456             : template <>
    2457             : MaybeObject MakeSlotValue<MaybeObjectSlot, HeapObjectReferenceType::WEAK>(
    2458             :     HeapObject heap_object) {
    2459             :   return HeapObjectReference::Weak(heap_object);
    2460             : }
    2461             : 
    2462             : #ifdef V8_COMPRESS_POINTERS
    2463             : template <>
    2464             : Object MakeSlotValue<FullObjectSlot, HeapObjectReferenceType::STRONG>(
    2465             :     HeapObject heap_object) {
    2466             :   return heap_object;
    2467             : }
    2468             : 
    2469             : template <>
    2470             : MaybeObject MakeSlotValue<FullMaybeObjectSlot, HeapObjectReferenceType::STRONG>(
    2471             :     HeapObject heap_object) {
    2472             :   return HeapObjectReference::Strong(heap_object);
    2473             : }
    2474             : 
    2475             : // The following specialization
    2476             : //   MakeSlotValue<FullMaybeObjectSlot, HeapObjectReferenceType::WEAK>()
    2477             : // is not used.
    2478             : #endif
    2479             : 
    2480             : template <AccessMode access_mode, HeapObjectReferenceType reference_type,
    2481             :           typename TSlot>
    2482     6895816 : static inline SlotCallbackResult UpdateSlot(TSlot slot,
    2483             :                                             typename TSlot::TObject old,
    2484             :                                             HeapObject heap_obj) {
    2485             :   static_assert(
    2486             :       std::is_same<TSlot, FullObjectSlot>::value ||
    2487             :           std::is_same<TSlot, ObjectSlot>::value ||
    2488             :           std::is_same<TSlot, FullMaybeObjectSlot>::value ||
    2489             :           std::is_same<TSlot, MaybeObjectSlot>::value,
    2490             :       "Only [Full]ObjectSlot and [Full]MaybeObjectSlot are expected here");
    2491             :   MapWord map_word = heap_obj->map_word();
    2492   405725459 :   if (map_word.IsForwardingAddress()) {
    2493             :     DCHECK_IMPLIES(!Heap::InFromPage(heap_obj),
    2494             :                    MarkCompactCollector::IsOnEvacuationCandidate(heap_obj) ||
    2495             :                        Page::FromHeapObject(heap_obj)->IsFlagSet(
    2496             :                            Page::COMPACTION_WAS_ABORTED));
    2497             :     typename TSlot::TObject target =
    2498             :         MakeSlotValue<TSlot, reference_type>(map_word.ToForwardingAddress());
    2499             :     if (access_mode == AccessMode::NON_ATOMIC) {
    2500             :       slot.store(target);
    2501             :     } else {
    2502             :       slot.Release_CompareAndSwap(old, target);
    2503             :     }
    2504             :     DCHECK(!Heap::InFromPage(target));
    2505             :     DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(target));
    2506             :   } else {
    2507             :     DCHECK(heap_obj->map()->IsMap());
    2508             :   }
    2509             :   // OLD_TO_OLD slots are always removed after updating.
    2510     6895816 :   return REMOVE_SLOT;
    2511             : }
    2512             : 
    2513             : template <AccessMode access_mode, typename TSlot>
    2514    99425187 : static inline SlotCallbackResult UpdateSlot(TSlot slot) {
    2515             :   typename TSlot::TObject obj = slot.Relaxed_Load();
    2516             :   HeapObject heap_obj;
    2517    99425187 :   if (TSlot::kCanBeWeak && obj->GetHeapObjectIfWeak(&heap_obj)) {
    2518     6896103 :     UpdateSlot<access_mode, HeapObjectReferenceType::WEAK>(slot, obj, heap_obj);
    2519    92529084 :   } else if (obj->GetHeapObjectIfStrong(&heap_obj)) {
    2520             :     return UpdateSlot<access_mode, HeapObjectReferenceType::STRONG>(slot, obj,
    2521             :                                                                     heap_obj);
    2522             :   }
    2523             :   return REMOVE_SLOT;
    2524             : }
    2525             : 
    2526             : template <AccessMode access_mode, typename TSlot>
    2527   370914612 : static inline SlotCallbackResult UpdateStrongSlot(TSlot slot) {
    2528             :   DCHECK(!HasWeakHeapObjectTag((*slot).ptr()));
    2529             :   typename TSlot::TObject obj = slot.Relaxed_Load();
    2530             :   HeapObject heap_obj;
    2531   370914612 :   if (obj.GetHeapObject(&heap_obj)) {
    2532             :     return UpdateSlot<access_mode, HeapObjectReferenceType::STRONG>(slot, obj,
    2533             :                                                                     heap_obj);
    2534             :   }
    2535             :   return REMOVE_SLOT;
    2536             : }
    2537             : 
    2538             : }  // namespace
    2539             : 
    2540             : // Visitor for updating root pointers and to-space pointers.
    2541             : // It does not expect to encounter pointers to dead objects.
    2542      283168 : class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
    2543             :  public:
    2544      142838 :   PointersUpdatingVisitor() {}
    2545             : 
    2546      120505 :   void VisitPointer(HeapObject host, ObjectSlot p) override {
    2547             :     UpdateStrongSlotInternal(p);
    2548      120492 :   }
    2549             : 
    2550          12 :   void VisitPointer(HeapObject host, MaybeObjectSlot p) override {
    2551             :     UpdateSlotInternal(p);
    2552          12 :   }
    2553             : 
    2554    12373926 :   void VisitPointers(HeapObject host, ObjectSlot start,
    2555             :                      ObjectSlot end) override {
    2556   149324327 :     for (ObjectSlot p = start; p < end; ++p) {
    2557             :       UpdateStrongSlotInternal(p);
    2558             :     }
    2559    12438538 :   }
    2560             : 
    2561           0 :   void VisitPointers(HeapObject host, MaybeObjectSlot start,
    2562             :                      MaybeObjectSlot end) final {
    2563    66647758 :     for (MaybeObjectSlot p = start; p < end; ++p) {
    2564             :       UpdateSlotInternal(p);
    2565             :     }
    2566           0 :   }
    2567             : 
    2568   214836098 :   void VisitRootPointer(Root root, const char* description,
    2569             :                         FullObjectSlot p) override {
    2570             :     UpdateRootSlotInternal(p);
    2571   214836101 :   }
    2572             : 
    2573     1791996 :   void VisitRootPointers(Root root, const char* description,
    2574             :                          FullObjectSlot start, FullObjectSlot end) override {
    2575    34897656 :     for (FullObjectSlot p = start; p < end; ++p) {
    2576             :       UpdateRootSlotInternal(p);
    2577             :     }
    2578     1791996 :   }
    2579             : 
    2580           0 :   void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
    2581             :     // This visitor nevers visits code objects.
    2582           0 :     UNREACHABLE();
    2583             :   }
    2584             : 
    2585           0 :   void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
    2586             :     // This visitor nevers visits code objects.
    2587           0 :     UNREACHABLE();
    2588             :   }
    2589             : 
    2590             :  private:
    2591             :   static inline SlotCallbackResult UpdateRootSlotInternal(FullObjectSlot slot) {
    2592   246149762 :     return UpdateStrongSlot<AccessMode::NON_ATOMIC>(slot);
    2593             :   }
    2594             : 
    2595             :   static inline SlotCallbackResult UpdateStrongMaybeObjectSlotInternal(
    2596             :       MaybeObjectSlot slot) {
    2597             :     return UpdateStrongSlot<AccessMode::NON_ATOMIC>(slot);
    2598             :   }
    2599             : 
    2600             :   static inline SlotCallbackResult UpdateStrongSlotInternal(ObjectSlot slot) {
    2601   124632368 :     return UpdateStrongSlot<AccessMode::NON_ATOMIC>(slot);
    2602             :   }
    2603             : 
    2604             :   static inline SlotCallbackResult UpdateSlotInternal(MaybeObjectSlot slot) {
    2605    63259239 :     return UpdateSlot<AccessMode::NON_ATOMIC>(slot);
    2606             :   }
    2607             : };
    2608             : 
    2609      102421 : static String UpdateReferenceInExternalStringTableEntry(Heap* heap,
    2610             :                                                         FullObjectSlot p) {
    2611             :   MapWord map_word = HeapObject::cast(*p)->map_word();
    2612             : 
    2613      102421 :   if (map_word.IsForwardingAddress()) {
    2614             :     String new_string = String::cast(map_word.ToForwardingAddress());
    2615             : 
    2616         371 :     if (new_string->IsExternalString()) {
    2617         371 :       MemoryChunk::MoveExternalBackingStoreBytes(
    2618             :           ExternalBackingStoreType::kExternalString,
    2619             :           Page::FromAddress((*p).ptr()), Page::FromHeapObject(new_string),
    2620         742 :           ExternalString::cast(new_string)->ExternalPayloadSize());
    2621             :     }
    2622         371 :     return new_string;
    2623             :   }
    2624             : 
    2625             :   return String::cast(*p);
    2626             : }
    2627             : 
    2628       68846 : void MarkCompactCollector::EvacuatePrologue() {
    2629             :   // New space.
    2630             :   NewSpace* new_space = heap()->new_space();
    2631             :   // Append the list of new space pages to be processed.
    2632      161012 :   for (Page* p :
    2633       92166 :        PageRange(new_space->first_allocatable_address(), new_space->top())) {
    2634       92166 :     new_space_evacuation_pages_.push_back(p);
    2635             :   }
    2636       68846 :   new_space->Flip();
    2637       68846 :   new_space->ResetLinearAllocationArea();
    2638             : 
    2639       68846 :   heap()->new_lo_space()->Flip();
    2640             :   heap()->new_lo_space()->ResetPendingObject();
    2641             : 
    2642             :   // Old space.
    2643             :   DCHECK(old_space_evacuation_pages_.empty());
    2644       68846 :   old_space_evacuation_pages_ = std::move(evacuation_candidates_);
    2645             :   evacuation_candidates_.clear();
    2646             :   DCHECK(evacuation_candidates_.empty());
    2647       68846 : }
    2648             : 
    2649       68846 : void MarkCompactCollector::EvacuateEpilogue() {
    2650             :   aborted_evacuation_candidates_.clear();
    2651             :   // New space.
    2652             :   heap()->new_space()->set_age_mark(heap()->new_space()->top());
    2653             :   // Deallocate unmarked large objects.
    2654       68846 :   heap()->lo_space()->FreeUnmarkedObjects();
    2655       68846 :   heap()->code_lo_space()->FreeUnmarkedObjects();
    2656       68846 :   heap()->new_lo_space()->FreeUnmarkedObjects();
    2657             :   // Old space. Deallocate evacuated candidate pages.
    2658       68846 :   ReleaseEvacuationCandidates();
    2659             :   // Give pages that are queued to be freed back to the OS.
    2660       68846 :   heap()->memory_allocator()->unmapper()->FreeQueuedChunks();
    2661             : #ifdef DEBUG
    2662             :   // Old-to-old slot sets must be empty after evacuation.
    2663             :   for (Page* p : *heap()->old_space()) {
    2664             :     DCHECK_NULL((p->slot_set<OLD_TO_OLD, AccessMode::ATOMIC>()));
    2665             :     DCHECK_NULL((p->typed_slot_set<OLD_TO_OLD, AccessMode::ATOMIC>()));
    2666             :     DCHECK_NULL(p->invalidated_slots());
    2667             :   }
    2668             : #endif
    2669       68846 : }
    2670             : 
    2671             : class Evacuator : public Malloced {
    2672             :  public:
    2673             :   enum EvacuationMode {
    2674             :     kObjectsNewToOld,
    2675             :     kPageNewToOld,
    2676             :     kObjectsOldToOld,
    2677             :     kPageNewToNew,
    2678             :   };
    2679             : 
    2680             :   static inline EvacuationMode ComputeEvacuationMode(MemoryChunk* chunk) {
    2681             :     // Note: The order of checks is important in this function.
    2682      187017 :     if (chunk->IsFlagSet(MemoryChunk::PAGE_NEW_OLD_PROMOTION))
    2683             :       return kPageNewToOld;
    2684      182961 :     if (chunk->IsFlagSet(MemoryChunk::PAGE_NEW_NEW_PROMOTION))
    2685             :       return kPageNewToNew;
    2686      177565 :     if (chunk->InYoungGeneration()) return kObjectsNewToOld;
    2687             :     return kObjectsOldToOld;
    2688             :   }
    2689             : 
    2690             :   // NewSpacePages with more live bytes than this threshold qualify for fast
    2691             :   // evacuation.
    2692       68040 :   static intptr_t NewSpacePageEvacuationThreshold() {
    2693       68040 :     if (FLAG_page_promotion)
    2694      136040 :       return FLAG_page_promotion_threshold *
    2695      136040 :              MemoryChunkLayout::AllocatableMemoryInDataPage() / 100;
    2696          20 :     return MemoryChunkLayout::AllocatableMemoryInDataPage() + kTaggedSize;
    2697             :   }
    2698             : 
    2699       79940 :   Evacuator(Heap* heap, RecordMigratedSlotVisitor* record_visitor)
    2700             :       : heap_(heap),
    2701             :         local_allocator_(heap_),
    2702             :         local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity),
    2703             :         new_space_visitor_(heap_, &local_allocator_, record_visitor,
    2704             :                            &local_pretenuring_feedback_),
    2705             :         new_to_new_page_visitor_(heap_, record_visitor,
    2706             :                                  &local_pretenuring_feedback_),
    2707             :         new_to_old_page_visitor_(heap_, record_visitor,
    2708             :                                  &local_pretenuring_feedback_),
    2709             : 
    2710             :         old_space_visitor_(heap_, &local_allocator_, record_visitor),
    2711             :         duration_(0.0),
    2712      239820 :         bytes_compacted_(0) {}
    2713             : 
    2714      239820 :   virtual ~Evacuator() = default;
    2715             : 
    2716             :   void EvacuatePage(MemoryChunk* chunk);
    2717             : 
    2718         980 :   void AddObserver(MigrationObserver* observer) {
    2719             :     new_space_visitor_.AddObserver(observer);
    2720             :     old_space_visitor_.AddObserver(observer);
    2721         980 :   }
    2722             : 
    2723             :   // Merge back locally cached info sequentially. Note that this method needs
    2724             :   // to be called from the main thread.
    2725             :   inline void Finalize();
    2726             : 
    2727             :   virtual GCTracer::BackgroundScope::ScopeId GetBackgroundTracingScope() = 0;
    2728             : 
    2729             :  protected:
    2730             :   static const int kInitialLocalPretenuringFeedbackCapacity = 256;
    2731             : 
    2732             :   // |saved_live_bytes| returns the live bytes of the page that was processed.
    2733             :   virtual void RawEvacuatePage(MemoryChunk* chunk,
    2734             :                                intptr_t* saved_live_bytes) = 0;
    2735             : 
    2736             :   inline Heap* heap() { return heap_; }
    2737             : 
    2738             :   void ReportCompactionProgress(double duration, intptr_t bytes_compacted) {
    2739       85135 :     duration_ += duration;
    2740       85135 :     bytes_compacted_ += bytes_compacted;
    2741             :   }
    2742             : 
    2743             :   Heap* heap_;
    2744             : 
    2745             :   // Locally cached collector data.
    2746             :   LocalAllocator local_allocator_;
    2747             :   Heap::PretenuringFeedbackMap local_pretenuring_feedback_;
    2748             : 
    2749             :   // Visitors for the corresponding spaces.
    2750             :   EvacuateNewSpaceVisitor new_space_visitor_;
    2751             :   EvacuateNewSpacePageVisitor<PageEvacuationMode::NEW_TO_NEW>
    2752             :       new_to_new_page_visitor_;
    2753             :   EvacuateNewSpacePageVisitor<PageEvacuationMode::NEW_TO_OLD>
    2754             :       new_to_old_page_visitor_;
    2755             :   EvacuateOldSpaceVisitor old_space_visitor_;
    2756             : 
    2757             :   // Book keeping info.
    2758             :   double duration_;
    2759             :   intptr_t bytes_compacted_;
    2760             : };
    2761             : 
    2762       85095 : void Evacuator::EvacuatePage(MemoryChunk* chunk) {
    2763      255325 :   TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"), "Evacuator::EvacuatePage");
    2764             :   DCHECK(chunk->SweepingDone());
    2765       85095 :   intptr_t saved_live_bytes = 0;
    2766       85095 :   double evacuation_time = 0.0;
    2767             :   {
    2768             :     AlwaysAllocateScope always_allocate(heap()->isolate());
    2769             :     TimedScope timed_scope(&evacuation_time);
    2770       85069 :     RawEvacuatePage(chunk, &saved_live_bytes);
    2771             :   }
    2772       85135 :   ReportCompactionProgress(evacuation_time, saved_live_bytes);
    2773       85135 :   if (FLAG_trace_evacuation) {
    2774           0 :     PrintIsolate(heap()->isolate(),
    2775             :                  "evacuation[%p]: page=%p new_space=%d "
    2776             :                  "page_evacuation=%d executable=%d contains_age_mark=%d "
    2777             :                  "live_bytes=%" V8PRIdPTR " time=%f success=%d\n",
    2778             :                  static_cast<void*>(this), static_cast<void*>(chunk),
    2779             :                  chunk->InNewSpace(),
    2780           0 :                  chunk->IsFlagSet(Page::PAGE_NEW_OLD_PROMOTION) ||
    2781             :                      chunk->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION),
    2782             :                  chunk->IsFlagSet(MemoryChunk::IS_EXECUTABLE),
    2783             :                  chunk->Contains(heap()->new_space()->age_mark()),
    2784             :                  saved_live_bytes, evacuation_time,
    2785           0 :                  chunk->IsFlagSet(Page::COMPACTION_WAS_ABORTED));
    2786             :   }
    2787       85137 : }
    2788             : 
    2789       79940 : void Evacuator::Finalize() {
    2790       79940 :   local_allocator_.Finalize();
    2791      159880 :   heap()->tracer()->AddCompactionEvent(duration_, bytes_compacted_);
    2792       79940 :   heap()->IncrementPromotedObjectsSize(new_space_visitor_.promoted_size() +
    2793             :                                        new_to_old_page_visitor_.moved_bytes());
    2794       79940 :   heap()->IncrementSemiSpaceCopiedObjectSize(
    2795       79940 :       new_space_visitor_.semispace_copied_size() +
    2796             :       new_to_new_page_visitor_.moved_bytes());
    2797       79940 :   heap()->IncrementYoungSurvivorsCounter(
    2798       79940 :       new_space_visitor_.promoted_size() +
    2799       79940 :       new_space_visitor_.semispace_copied_size() +
    2800       79940 :       new_to_old_page_visitor_.moved_bytes() +
    2801             :       new_to_new_page_visitor_.moved_bytes());
    2802       79940 :   heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_);
    2803       79940 : }
    2804             : 
    2805      239820 : class FullEvacuator : public Evacuator {
    2806             :  public:
    2807       79940 :   explicit FullEvacuator(MarkCompactCollector* collector)
    2808             :       : Evacuator(collector->heap(), &record_visitor_),
    2809             :         record_visitor_(collector, &ephemeron_remembered_set_),
    2810      239820 :         collector_(collector) {}
    2811             : 
    2812       78223 :   GCTracer::BackgroundScope::ScopeId GetBackgroundTracingScope() override {
    2813       78223 :     return GCTracer::BackgroundScope::MC_BACKGROUND_EVACUATE_COPY;
    2814             :   }
    2815             : 
    2816       79940 :   inline void Finalize() {
    2817       79940 :     Evacuator::Finalize();
    2818             : 
    2819       79981 :     for (auto it = ephemeron_remembered_set_.begin();
    2820             :          it != ephemeron_remembered_set_.end(); ++it) {
    2821             :       auto insert_result =
    2822          41 :           heap()->ephemeron_remembered_set_.insert({it->first, it->second});
    2823          41 :       if (!insert_result.second) {
    2824             :         // Insertion didn't happen, there was already an item.
    2825             :         auto set = insert_result.first->second;
    2826           0 :         for (int entry : it->second) {
    2827             :           set.insert(entry);
    2828             :         }
    2829             :       }
    2830             :     }
    2831       79940 :   }
    2832             : 
    2833             :  protected:
    2834             :   void RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) override;
    2835             :   EphemeronRememberedSet ephemeron_remembered_set_;
    2836             :   RecordMigratedSlotVisitor record_visitor_;
    2837             : 
    2838             :   MarkCompactCollector* collector_;
    2839             : };
    2840             : 
    2841       85100 : void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) {
    2842             :   const EvacuationMode evacuation_mode = ComputeEvacuationMode(chunk);
    2843      255344 :   TRACE_EVENT1(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
    2844             :                "FullEvacuator::RawEvacuatePage", "evacuation_mode",
    2845             :                evacuation_mode);
    2846             :   MarkCompactCollector::NonAtomicMarkingState* marking_state =
    2847       85102 :       collector_->non_atomic_marking_state();
    2848       85102 :   *live_bytes = marking_state->live_bytes(chunk);
    2849       85102 :   HeapObject failed_object;
    2850       85102 :   switch (evacuation_mode) {
    2851             :     case kObjectsNewToOld:
    2852       70094 :       LiveObjectVisitor::VisitBlackObjectsNoFail(
    2853             :           chunk, marking_state, &new_space_visitor_,
    2854       70094 :           LiveObjectVisitor::kClearMarkbits);
    2855             :       // ArrayBufferTracker will be updated during pointers updating.
    2856       70111 :       break;
    2857             :     case kPageNewToOld:
    2858        2570 :       LiveObjectVisitor::VisitBlackObjectsNoFail(
    2859             :           chunk, marking_state, &new_to_old_page_visitor_,
    2860        2570 :           LiveObjectVisitor::kKeepMarking);
    2861             :       new_to_old_page_visitor_.account_moved_bytes(
    2862             :           marking_state->live_bytes(chunk));
    2863             :       // ArrayBufferTracker will be updated during sweeping.
    2864             :       break;
    2865             :     case kPageNewToNew:
    2866        2703 :       LiveObjectVisitor::VisitBlackObjectsNoFail(
    2867             :           chunk, marking_state, &new_to_new_page_visitor_,
    2868        2703 :           LiveObjectVisitor::kKeepMarking);
    2869             :       new_to_new_page_visitor_.account_moved_bytes(
    2870             :           marking_state->live_bytes(chunk));
    2871             :       // ArrayBufferTracker will be updated during sweeping.
    2872             :       break;
    2873             :     case kObjectsOldToOld: {
    2874        9729 :       const bool success = LiveObjectVisitor::VisitBlackObjects(
    2875             :           chunk, marking_state, &old_space_visitor_,
    2876        9729 :           LiveObjectVisitor::kClearMarkbits, &failed_object);
    2877        9751 :       if (!success) {
    2878             :         // Aborted compaction page. Actual processing happens on the main
    2879             :         // thread for simplicity reasons.
    2880          25 :         collector_->ReportAbortedEvacuationCandidate(failed_object, chunk);
    2881             :       } else {
    2882             :         // ArrayBufferTracker will be updated during pointers updating.
    2883             :       }
    2884             :       break;
    2885             :     }
    2886             :   }
    2887       85132 : }
    2888             : 
    2889             : class EvacuationItem : public ItemParallelJob::Item {
    2890             :  public:
    2891       85146 :   explicit EvacuationItem(MemoryChunk* chunk) : chunk_(chunk) {}
    2892      170292 :   ~EvacuationItem() override = default;
    2893             :   MemoryChunk* chunk() const { return chunk_; }
    2894             : 
    2895             :  private:
    2896             :   MemoryChunk* chunk_;
    2897             : };
    2898             : 
    2899      159849 : class PageEvacuationTask : public ItemParallelJob::Task {
    2900             :  public:
    2901             :   PageEvacuationTask(Isolate* isolate, Evacuator* evacuator)
    2902             :       : ItemParallelJob::Task(isolate),
    2903             :         evacuator_(evacuator),
    2904      159880 :         tracer_(isolate->heap()->tracer()) {}
    2905             : 
    2906       78252 :   void RunInParallel() override {
    2907      313008 :     TRACE_BACKGROUND_GC(tracer_, evacuator_->GetBackgroundTracingScope());
    2908             :     EvacuationItem* item = nullptr;
    2909      248520 :     while ((item = GetItem<EvacuationItem>()) != nullptr) {
    2910       85123 :       evacuator_->EvacuatePage(item->chunk());
    2911       85136 :       item->MarkFinished();
    2912             :     }
    2913       78295 :   }
    2914             : 
    2915             :  private:
    2916             :   Evacuator* evacuator_;
    2917             :   GCTracer* tracer_;
    2918             : };
    2919             : 
    2920             : template <class Evacuator, class Collector>
    2921       57895 : void MarkCompactCollectorBase::CreateAndExecuteEvacuationTasks(
    2922             :     Collector* collector, ItemParallelJob* job,
    2923             :     MigrationObserver* migration_observer, const intptr_t live_bytes) {
    2924             :   // Used for trace summary.
    2925             :   double compaction_speed = 0;
    2926       57895 :   if (FLAG_trace_evacuation) {
    2927           0 :     compaction_speed = heap()->tracer()->CompactionSpeedInBytesPerMillisecond();
    2928             :   }
    2929             : 
    2930       57895 :   const bool profiling = isolate()->LogObjectRelocation();
    2931             :   ProfilingMigrationObserver profiling_observer(heap());
    2932             : 
    2933             :   const int wanted_num_tasks =
    2934       57895 :       NumberOfParallelCompactionTasks(job->NumberOfItems());
    2935       57895 :   Evacuator** evacuators = new Evacuator*[wanted_num_tasks];
    2936      217775 :   for (int i = 0; i < wanted_num_tasks; i++) {
    2937       79940 :     evacuators[i] = new Evacuator(collector);
    2938       79940 :     if (profiling) evacuators[i]->AddObserver(&profiling_observer);
    2939       79940 :     if (migration_observer != nullptr)
    2940           0 :       evacuators[i]->AddObserver(migration_observer);
    2941      159880 :     job->AddTask(new PageEvacuationTask(heap()->isolate(), evacuators[i]));
    2942             :   }
    2943       57895 :   job->Run();
    2944      217775 :   for (int i = 0; i < wanted_num_tasks; i++) {
    2945       79940 :     evacuators[i]->Finalize();
    2946       79940 :     delete evacuators[i];
    2947             :   }
    2948       57895 :   delete[] evacuators;
    2949             : 
    2950       57895 :   if (FLAG_trace_evacuation) {
    2951           0 :     PrintIsolate(isolate(),
    2952             :                  "%8.0f ms: evacuation-summary: parallel=%s pages=%d "
    2953             :                  "wanted_tasks=%d tasks=%d cores=%d live_bytes=%" V8PRIdPTR
    2954             :                  " compaction_speed=%.f\n",
    2955             :                  isolate()->time_millis_since_init(),
    2956             :                  FLAG_parallel_compaction ? "yes" : "no", job->NumberOfItems(),
    2957             :                  wanted_num_tasks, job->NumberOfTasks(),
    2958           0 :                  V8::GetCurrentPlatform()->NumberOfWorkerThreads() + 1,
    2959             :                  live_bytes, compaction_speed);
    2960             :   }
    2961       57895 : }
    2962             : 
    2963       74308 : bool MarkCompactCollectorBase::ShouldMovePage(Page* p, intptr_t live_bytes) {
    2964             :   const bool reduce_memory = heap()->ShouldReduceMemory();
    2965             :   const Address age_mark = heap()->new_space()->age_mark();
    2966      136080 :   return !reduce_memory && !p->NeverEvacuate() &&
    2967       73194 :          (live_bytes > Evacuator::NewSpacePageEvacuationThreshold()) &&
    2968       78521 :          !p->Contains(age_mark) && heap()->CanExpandOldGeneration(live_bytes);
    2969             : }
    2970             : 
    2971       68846 : void MarkCompactCollector::EvacuatePagesInParallel() {
    2972             :   ItemParallelJob evacuation_job(isolate()->cancelable_task_manager(),
    2973      195587 :                                  &page_parallel_job_semaphore_);
    2974             :   intptr_t live_bytes = 0;
    2975             : 
    2976       78597 :   for (Page* page : old_space_evacuation_pages_) {
    2977        9751 :     live_bytes += non_atomic_marking_state()->live_bytes(page);
    2978        9751 :     evacuation_job.AddItem(new EvacuationItem(page));
    2979             :   }
    2980             : 
    2981      161012 :   for (Page* page : new_space_evacuation_pages_) {
    2982             :     intptr_t live_bytes_on_page = non_atomic_marking_state()->live_bytes(page);
    2983       92166 :     if (live_bytes_on_page == 0 && !page->contains_array_buffers()) continue;
    2984       74308 :     live_bytes += live_bytes_on_page;
    2985       74308 :     if (ShouldMovePage(page, live_bytes_on_page)) {
    2986        4193 :       if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) {
    2987        1486 :         EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page);
    2988             :         DCHECK_EQ(heap()->old_space(), page->owner());
    2989             :         // The move added page->allocated_bytes to the old space, but we are
    2990             :         // going to sweep the page and add page->live_byte_count.
    2991             :         heap()->old_space()->DecreaseAllocatedBytes(page->allocated_bytes(),
    2992             :                                                     page);
    2993             :       } else {
    2994        2707 :         EvacuateNewSpacePageVisitor<NEW_TO_NEW>::Move(page);
    2995             :       }
    2996             :     }
    2997       74308 :     evacuation_job.AddItem(new EvacuationItem(page));
    2998             :   }
    2999             : 
    3000             :   // Promote young generation large objects.
    3001             :   IncrementalMarking::NonAtomicMarkingState* marking_state =
    3002             :       heap()->incremental_marking()->non_atomic_marking_state();
    3003             : 
    3004       70592 :   for (auto it = heap()->new_lo_space()->begin();
    3005             :        it != heap()->new_lo_space()->end();) {
    3006             :     LargePage* current = *it;
    3007             :     it++;
    3008             :     HeapObject object = current->GetObject();
    3009             :     DCHECK(!marking_state->IsGrey(object));
    3010        1746 :     if (marking_state->IsBlack(object)) {
    3011        1087 :       heap_->lo_space()->PromoteNewLargeObject(current);
    3012             :       current->SetFlag(Page::PAGE_NEW_OLD_PROMOTION);
    3013        1087 :       evacuation_job.AddItem(new EvacuationItem(current));
    3014             :     }
    3015             :   }
    3016             : 
    3017       79797 :   if (evacuation_job.NumberOfItems() == 0) return;
    3018             : 
    3019       57895 :   CreateAndExecuteEvacuationTasks<FullEvacuator>(this, &evacuation_job, nullptr,
    3020       57895 :                                                  live_bytes);
    3021       57895 :   PostProcessEvacuationCandidates();
    3022             : }
    3023             : 
    3024      137692 : class EvacuationWeakObjectRetainer : public WeakObjectRetainer {
    3025             :  public:
    3026      137692 :   Object RetainAs(Object object) override {
    3027      137692 :     if (object->IsHeapObject()) {
    3028             :       HeapObject heap_object = HeapObject::cast(object);
    3029             :       MapWord map_word = heap_object->map_word();
    3030      137692 :       if (map_word.IsForwardingAddress()) {
    3031        1254 :         return map_word.ToForwardingAddress();
    3032             :       }
    3033             :     }
    3034      136438 :     return object;
    3035             :   }
    3036             : };
    3037             : 
    3038           0 : void MarkCompactCollector::RecordLiveSlotsOnPage(Page* page) {
    3039             :   EvacuateRecordOnlyVisitor visitor(heap());
    3040             :   LiveObjectVisitor::VisitBlackObjectsNoFail(page, non_atomic_marking_state(),
    3041             :                                              &visitor,
    3042           0 :                                              LiveObjectVisitor::kKeepMarking);
    3043           0 : }
    3044             : 
    3045             : template <class Visitor, typename MarkingState>
    3046        9708 : bool LiveObjectVisitor::VisitBlackObjects(MemoryChunk* chunk,
    3047             :                                           MarkingState* marking_state,
    3048             :                                           Visitor* visitor,
    3049             :                                           IterationMode iteration_mode,
    3050             :                                           HeapObject* failed_object) {
    3051       29165 :   TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
    3052             :                "LiveObjectVisitor::VisitBlackObjects");
    3053    26793309 :   for (auto object_and_size :
    3054             :        LiveObjectRange<kBlackObjects>(chunk, marking_state->bitmap(chunk))) {
    3055             :     HeapObject const object = object_and_size.first;
    3056    26783585 :     if (!visitor->Visit(object, object_and_size.second)) {
    3057          25 :       if (iteration_mode == kClearMarkbits) {
    3058          25 :         marking_state->bitmap(chunk)->ClearRange(
    3059             :             chunk->AddressToMarkbitIndex(chunk->area_start()),
    3060             :             chunk->AddressToMarkbitIndex(object->address()));
    3061          25 :         *failed_object = object;
    3062             :       }
    3063             :       return false;
    3064             :     }
    3065             :   }
    3066        9724 :   if (iteration_mode == kClearMarkbits) {
    3067             :     marking_state->ClearLiveness(chunk);
    3068             :   }
    3069             :   return true;
    3070             : }
    3071             : 
    3072             : template <class Visitor, typename MarkingState>
    3073       75356 : void LiveObjectVisitor::VisitBlackObjectsNoFail(MemoryChunk* chunk,
    3074             :                                                 MarkingState* marking_state,
    3075             :                                                 Visitor* visitor,
    3076             :                                                 IterationMode iteration_mode) {
    3077      226124 :   TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
    3078             :                "LiveObjectVisitor::VisitBlackObjectsNoFail");
    3079       75357 :   if (chunk->IsLargePage()) {
    3080        1087 :     HeapObject object = reinterpret_cast<LargePage*>(chunk)->GetObject();
    3081        1087 :     if (marking_state->IsBlack(object)) {
    3082        1087 :       const bool success = visitor->Visit(object, object->Size());
    3083             :       USE(success);
    3084             :       DCHECK(success);
    3085             :     }
    3086             :   } else {
    3087    42154655 :     for (auto object_and_size :
    3088             :          LiveObjectRange<kBlackObjects>(chunk, marking_state->bitmap(chunk))) {
    3089    35772696 :       HeapObject const object = object_and_size.first;
    3090             :       DCHECK(marking_state->IsBlack(object));
    3091    35772696 :       const bool success = visitor->Visit(object, object_and_size.second);
    3092             :       USE(success);
    3093             :       DCHECK(success);
    3094             :     }
    3095             :   }
    3096       75411 :   if (iteration_mode == kClearMarkbits) {
    3097             :     marking_state->ClearLiveness(chunk);
    3098             :   }
    3099       75412 : }
    3100             : 
    3101             : template <class Visitor, typename MarkingState>
    3102           0 : void LiveObjectVisitor::VisitGreyObjectsNoFail(MemoryChunk* chunk,
    3103             :                                                MarkingState* marking_state,
    3104             :                                                Visitor* visitor,
    3105             :                                                IterationMode iteration_mode) {
    3106           0 :   TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
    3107             :                "LiveObjectVisitor::VisitGreyObjectsNoFail");
    3108           0 :   if (chunk->IsLargePage()) {
    3109           0 :     HeapObject object = reinterpret_cast<LargePage*>(chunk)->GetObject();
    3110           0 :     if (marking_state->IsGrey(object)) {
    3111           0 :       const bool success = visitor->Visit(object, object->Size());
    3112             :       USE(success);
    3113             :       DCHECK(success);
    3114             :     }
    3115             :   } else {
    3116           0 :     for (auto object_and_size :
    3117             :          LiveObjectRange<kGreyObjects>(chunk, marking_state->bitmap(chunk))) {
    3118           0 :       HeapObject const object = object_and_size.first;
    3119             :       DCHECK(marking_state->IsGrey(object));
    3120           0 :       const bool success = visitor->Visit(object, object_and_size.second);
    3121             :       USE(success);
    3122             :       DCHECK(success);
    3123             :     }
    3124             :   }
    3125           0 :   if (iteration_mode == kClearMarkbits) {
    3126           0 :     marking_state->ClearLiveness(chunk);
    3127             :   }
    3128           0 : }
    3129             : 
    3130             : template <typename MarkingState>
    3131          25 : void LiveObjectVisitor::RecomputeLiveBytes(MemoryChunk* chunk,
    3132             :                                            MarkingState* marking_state) {
    3133             :   int new_live_size = 0;
    3134          90 :   for (auto object_and_size :
    3135             :        LiveObjectRange<kAllLiveObjects>(chunk, marking_state->bitmap(chunk))) {
    3136          65 :     new_live_size += object_and_size.second;
    3137             :   }
    3138          25 :   marking_state->SetLiveBytes(chunk, new_live_size);
    3139          25 : }
    3140             : 
    3141       68846 : void MarkCompactCollector::Evacuate() {
    3142      275384 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE);
    3143             :   base::MutexGuard guard(heap()->relocation_mutex());
    3144             : 
    3145             :   {
    3146      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_PROLOGUE);
    3147       68846 :     EvacuatePrologue();
    3148             :   }
    3149             : 
    3150             :   {
    3151      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_COPY);
    3152             :     EvacuationScope evacuation_scope(this);
    3153       68846 :     EvacuatePagesInParallel();
    3154             :   }
    3155             : 
    3156       68846 :   UpdatePointersAfterEvacuation();
    3157             : 
    3158             :   {
    3159      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_REBALANCE);
    3160       68846 :     if (!heap()->new_space()->Rebalance()) {
    3161           0 :       heap()->FatalProcessOutOfMemory("NewSpace::Rebalance");
    3162             :     }
    3163             :   }
    3164             : 
    3165             :   // Give pages that are queued to be freed back to the OS. Note that filtering
    3166             :   // slots only handles old space (for unboxed doubles), and thus map space can
    3167             :   // still contain stale pointers. We only free the chunks after pointer updates
    3168             :   // to still have access to page headers.
    3169       68846 :   heap()->memory_allocator()->unmapper()->FreeQueuedChunks();
    3170             : 
    3171             :   {
    3172      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_CLEAN_UP);
    3173             : 
    3174      161012 :     for (Page* p : new_space_evacuation_pages_) {
    3175       92166 :       if (p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION)) {
    3176             :         p->ClearFlag(Page::PAGE_NEW_NEW_PROMOTION);
    3177        2707 :         sweeper()->AddPageForIterability(p);
    3178       89459 :       } else if (p->IsFlagSet(Page::PAGE_NEW_OLD_PROMOTION)) {
    3179             :         p->ClearFlag(Page::PAGE_NEW_OLD_PROMOTION);
    3180             :         DCHECK_EQ(OLD_SPACE, p->owner()->identity());
    3181        1486 :         sweeper()->AddPage(OLD_SPACE, p, Sweeper::REGULAR);
    3182             :       }
    3183             :     }
    3184             :     new_space_evacuation_pages_.clear();
    3185             : 
    3186       78597 :     for (Page* p : old_space_evacuation_pages_) {
    3187             :       // Important: skip list should be cleared only after roots were updated
    3188             :       // because root iteration traverses the stack and might have to find
    3189             :       // code objects from non-updated pc pointing into evacuation candidate.
    3190             :       SkipList* list = p->skip_list();
    3191        9751 :       if (list != nullptr) list->Clear();
    3192        9751 :       if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) {
    3193          25 :         sweeper()->AddPage(p->owner()->identity(), p, Sweeper::REGULAR);
    3194             :         p->ClearFlag(Page::COMPACTION_WAS_ABORTED);
    3195             :       }
    3196             :     }
    3197             :   }
    3198             : 
    3199             :   {
    3200      275384 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_EPILOGUE);
    3201       68846 :     EvacuateEpilogue();
    3202             :   }
    3203             : 
    3204             : #ifdef VERIFY_HEAP
    3205             :   if (FLAG_verify_heap && !sweeper()->sweeping_in_progress()) {
    3206             :     FullEvacuationVerifier verifier(heap());
    3207             :     verifier.Run();
    3208             :   }
    3209             : #endif
    3210       68846 : }
    3211             : 
    3212      541673 : class UpdatingItem : public ItemParallelJob::Item {
    3213             :  public:
    3214     1083346 :   ~UpdatingItem() override = default;
    3215             :   virtual void Process() = 0;
    3216             : };
    3217             : 
    3218      774521 : class PointersUpdatingTask : public ItemParallelJob::Task {
    3219             :  public:
    3220             :   explicit PointersUpdatingTask(Isolate* isolate,
    3221             :                                 GCTracer::BackgroundScope::ScopeId scope)
    3222             :       : ItemParallelJob::Task(isolate),
    3223             :         tracer_(isolate->heap()->tracer()),
    3224      776156 :         scope_(scope) {}
    3225             : 
    3226      308166 :   void RunInParallel() override {
    3227     1231838 :     TRACE_BACKGROUND_GC(tracer_, scope_);
    3228             :     UpdatingItem* item = nullptr;
    3229     1390179 :     while ((item = GetItem<UpdatingItem>()) != nullptr) {
    3230      541217 :       item->Process();
    3231      540489 :       item->MarkFinished();
    3232             :     }
    3233      308599 :   }
    3234             : 
    3235             :  private:
    3236             :   GCTracer* tracer_;
    3237             :   GCTracer::BackgroundScope::ScopeId scope_;
    3238             : };
    3239             : 
    3240             : template <typename MarkingState>
    3241             : class ToSpaceUpdatingItem : public UpdatingItem {
    3242             :  public:
    3243             :   explicit ToSpaceUpdatingItem(MemoryChunk* chunk, Address start, Address end,
    3244             :                                MarkingState* marking_state)
    3245             :       : chunk_(chunk),
    3246             :         start_(start),
    3247             :         end_(end),
    3248       72741 :         marking_state_(marking_state) {}
    3249      145482 :   ~ToSpaceUpdatingItem() override = default;
    3250             : 
    3251       72732 :   void Process() override {
    3252      145464 :     if (chunk_->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION)) {
    3253             :       // New->new promoted pages contain garbage so they require iteration using
    3254             :       // markbits.
    3255        2701 :       ProcessVisitLive();
    3256             :     } else {
    3257       70031 :       ProcessVisitAll();
    3258             :     }
    3259       72739 :   }
    3260             : 
    3261             :  private:
    3262       71290 :   void ProcessVisitAll() {
    3263      212612 :     TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
    3264             :                  "ToSpaceUpdatingItem::ProcessVisitAll");
    3265             :     PointersUpdatingVisitor visitor;
    3266    24507748 :     for (Address cur = start_; cur < end_;) {
    3267    24437716 :       HeapObject object = HeapObject::FromAddress(cur);
    3268             :       Map map = object->map();
    3269    24437716 :       int size = object->SizeFromMap(map);
    3270             :       object->IterateBodyFast(map, size, &visitor);
    3271    24436458 :       cur += size;
    3272             :     }
    3273       70032 :   }
    3274             : 
    3275        2702 :   void ProcessVisitLive() {
    3276        8110 :     TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
    3277             :                  "ToSpaceUpdatingItem::ProcessVisitLive");
    3278             :     // For young generation evacuations we want to visit grey objects, for
    3279             :     // full MC, we need to visit black objects.
    3280             :     PointersUpdatingVisitor visitor;
    3281     3826826 :     for (auto object_and_size : LiveObjectRange<kAllLiveObjects>(
    3282             :              chunk_, marking_state_->bitmap(chunk_))) {
    3283     3821418 :       object_and_size.first->IterateBodyFast(&visitor);
    3284             :     }
    3285        2707 :   }
    3286             : 
    3287             :   MemoryChunk* chunk_;
    3288             :   Address start_;
    3289             :   Address end_;
    3290             :   MarkingState* marking_state_;
    3291             : };
    3292             : 
    3293             : template <typename MarkingState>
    3294             : class RememberedSetUpdatingItem : public UpdatingItem {
    3295             :  public:
    3296             :   explicit RememberedSetUpdatingItem(Heap* heap, MarkingState* marking_state,
    3297             :                                      MemoryChunk* chunk,
    3298             :                                      RememberedSetUpdatingMode updating_mode)
    3299             :       : heap_(heap),
    3300             :         marking_state_(marking_state),
    3301             :         chunk_(chunk),
    3302      313702 :         updating_mode_(updating_mode) {}
    3303      627404 :   ~RememberedSetUpdatingItem() override = default;
    3304             : 
    3305      313002 :   void Process() override {
    3306      939398 :     TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
    3307             :                  "RememberedSetUpdatingItem::Process");
    3308      313009 :     base::MutexGuard guard(chunk_->mutex());
    3309      313240 :     CodePageMemoryModificationScope memory_modification_scope(chunk_);
    3310      313136 :     UpdateUntypedPointers();
    3311      313223 :     UpdateTypedPointers();
    3312      313236 :   }
    3313             : 
    3314             :  private:
    3315             :   template <typename TSlot>
    3316    37221278 :   inline SlotCallbackResult CheckAndUpdateOldToNewSlot(TSlot slot) {
    3317             :     static_assert(
    3318             :         std::is_same<TSlot, FullMaybeObjectSlot>::value ||
    3319             :             std::is_same<TSlot, MaybeObjectSlot>::value,
    3320             :         "Only FullMaybeObjectSlot and MaybeObjectSlot are expected here");
    3321             :     using THeapObjectSlot = typename TSlot::THeapObjectSlot;
    3322             :     HeapObject heap_object;
    3323    37221278 :     if (!(*slot).GetHeapObject(&heap_object)) {
    3324             :       return REMOVE_SLOT;
    3325             :     }
    3326    38905619 :     if (Heap::InFromPage(heap_object)) {
    3327             :       MapWord map_word = heap_object->map_word();
    3328    32123178 :       if (map_word.IsForwardingAddress()) {
    3329             :         HeapObjectReference::Update(THeapObjectSlot(slot),
    3330             :                                     map_word.ToForwardingAddress());
    3331             :       }
    3332             :       bool success = (*slot).GetHeapObject(&heap_object);
    3333             :       USE(success);
    3334             :       DCHECK(success);
    3335             :       // If the object was in from space before and is after executing the
    3336             :       // callback in to space, the object is still live.
    3337             :       // Unfortunately, we do not know about the slot. It could be in a
    3338             :       // just freed free space object.
    3339    32123178 :       if (Heap::InToPage(heap_object)) {
    3340             :         return KEEP_SLOT;
    3341             :       }
    3342     6782441 :     } else if (Heap::InToPage(heap_object)) {
    3343             :       // Slots can point to "to" space if the page has been moved, or if the
    3344             :       // slot has been recorded multiple times in the remembered set, or
    3345             :       // if the slot was already updated during old->old updating.
    3346             :       // In case the page has been moved, check markbits to determine liveness
    3347             :       // of the slot. In the other case, the slot can just be kept.
    3348     1703287 :       if (Page::FromHeapObject(heap_object)
    3349             :               ->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION)) {
    3350             :         // IsBlackOrGrey is required because objects are marked as grey for
    3351             :         // the young generation collector while they are black for the full
    3352             :         // MC.);
    3353     1704269 :         if (marking_state_->IsBlackOrGrey(heap_object)) {
    3354             :           return KEEP_SLOT;
    3355             :         } else {
    3356         747 :           return REMOVE_SLOT;
    3357             :         }
    3358             :       }
    3359             :       return KEEP_SLOT;
    3360             :     } else {
    3361             :       DCHECK(!Heap::InYoungGeneration(heap_object));
    3362             :     }
    3363             :     return REMOVE_SLOT;
    3364             :   }
    3365             : 
    3366      312837 :   void UpdateUntypedPointers() {
    3367      312837 :     if (chunk_->slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() != nullptr) {
    3368      305564 :       RememberedSet<OLD_TO_NEW>::Iterate(
    3369             :           chunk_,
    3370    37219010 :           [this](MaybeObjectSlot slot) {
    3371             :             return CheckAndUpdateOldToNewSlot(slot);
    3372    37219010 :           },
    3373             :           SlotSet::PREFREE_EMPTY_BUCKETS);
    3374             :     }
    3375      626476 :     if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
    3376      313256 :         (chunk_->slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() != nullptr)) {
    3377       13594 :       InvalidatedSlotsFilter filter(chunk_);
    3378       13580 :       RememberedSet<OLD_TO_OLD>::Iterate(
    3379             :           chunk_,
    3380    72219446 :           [&filter](MaybeObjectSlot slot) {
    3381    36109723 :             if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
    3382    36027713 :             return UpdateSlot<AccessMode::NON_ATOMIC>(slot);
    3383             :           },
    3384             :           SlotSet::PREFREE_EMPTY_BUCKETS);
    3385             :     }
    3386      626424 :     if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
    3387      313181 :         chunk_->invalidated_slots() != nullptr) {
    3388             : #ifdef DEBUG
    3389             :       for (auto object_size : *chunk_->invalidated_slots()) {
    3390             :         HeapObject object = object_size.first;
    3391             :         int size = object_size.second;
    3392             :         DCHECK_LE(object->SizeFromMap(object->map()), size);
    3393             :       }
    3394             : #endif
    3395             :       // The invalidated slots are not needed after old-to-old slots were
    3396             :       // processsed.
    3397         232 :       chunk_->ReleaseInvalidatedSlots();
    3398             :     }
    3399      313245 :   }
    3400             : 
    3401      313043 :   void UpdateTypedPointers() {
    3402      313043 :     if (chunk_->typed_slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() !=
    3403             :         nullptr) {
    3404        3262 :       CHECK_NE(chunk_->owner(), heap_->map_space());
    3405             :       const auto check_and_update_old_to_new_slot_fn =
    3406       31155 :           [this](FullMaybeObjectSlot slot) {
    3407             :             return CheckAndUpdateOldToNewSlot(slot);
    3408       31155 :           };
    3409        1631 :       RememberedSet<OLD_TO_NEW>::IterateTyped(
    3410             :           chunk_, [=](SlotType slot_type, Address slot) {
    3411       31085 :             return UpdateTypedSlotHelper::UpdateTypedSlot(
    3412       31085 :                 heap_, slot_type, slot, check_and_update_old_to_new_slot_fn);
    3413       31085 :           });
    3414             :     }
    3415      626046 :     if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
    3416      313061 :         (chunk_->typed_slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() !=
    3417             :          nullptr)) {
    3418        1820 :       CHECK_NE(chunk_->owner(), heap_->map_space());
    3419         910 :       RememberedSet<OLD_TO_OLD>::IterateTyped(
    3420             :           chunk_, [=](SlotType slot_type, Address slot) {
    3421             :             // Using UpdateStrongSlot is OK here, because there are no weak
    3422             :             // typed slots.
    3423      180873 :             return UpdateTypedSlotHelper::UpdateTypedSlot(
    3424      180873 :                 heap_, slot_type, slot,
    3425             :                 UpdateStrongSlot<AccessMode::NON_ATOMIC, FullMaybeObjectSlot>);
    3426      180873 :           });
    3427             :     }
    3428      312985 :   }
    3429             : 
    3430             :   Heap* heap_;
    3431             :   MarkingState* marking_state_;
    3432             :   MemoryChunk* chunk_;
    3433             :   RememberedSetUpdatingMode updating_mode_;
    3434             : };
    3435             : 
    3436       72741 : UpdatingItem* MarkCompactCollector::CreateToSpaceUpdatingItem(
    3437             :     MemoryChunk* chunk, Address start, Address end) {
    3438             :   return new ToSpaceUpdatingItem<NonAtomicMarkingState>(
    3439      145482 :       chunk, start, end, non_atomic_marking_state());
    3440             : }
    3441             : 
    3442      313702 : UpdatingItem* MarkCompactCollector::CreateRememberedSetUpdatingItem(
    3443             :     MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) {
    3444             :   return new RememberedSetUpdatingItem<NonAtomicMarkingState>(
    3445      627404 :       heap(), non_atomic_marking_state(), chunk, updating_mode);
    3446             : }
    3447             : 
    3448             : // Update array buffers on a page that has been evacuated by copying objects.
    3449             : // Target page exclusivity in old space is guaranteed by the fact that
    3450             : // evacuation tasks either (a) retrieved a fresh page, or (b) retrieved all
    3451             : // free list items of a given page. For new space the tracker will update
    3452             : // using a lock.
    3453             : class ArrayBufferTrackerUpdatingItem : public UpdatingItem {
    3454             :  public:
    3455             :   enum EvacuationState { kRegular, kAborted };
    3456             : 
    3457             :   explicit ArrayBufferTrackerUpdatingItem(Page* page, EvacuationState state)
    3458       86384 :       : page_(page), state_(state) {}
    3459      172768 :   ~ArrayBufferTrackerUpdatingItem() override = default;
    3460             : 
    3461       86328 :   void Process() override {
    3462      259031 :     TRACE_EVENT1(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
    3463             :                  "ArrayBufferTrackerUpdatingItem::Process", "EvacuationState",
    3464             :                  state_);
    3465       86331 :     switch (state_) {
    3466             :       case EvacuationState::kRegular:
    3467             :         ArrayBufferTracker::ProcessBuffers(
    3468       86317 :             page_, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
    3469       86358 :         break;
    3470             :       case EvacuationState::kAborted:
    3471             :         ArrayBufferTracker::ProcessBuffers(
    3472           0 :             page_, ArrayBufferTracker::kUpdateForwardedKeepOthers);
    3473           0 :         break;
    3474             :     }
    3475       86353 :   }
    3476             : 
    3477             :  private:
    3478             :   Page* const page_;
    3479             :   const EvacuationState state_;
    3480             : };
    3481             : 
    3482       68846 : int MarkCompactCollectorBase::CollectToSpaceUpdatingItems(
    3483             :     ItemParallelJob* job) {
    3484             :   // Seed to space pages.
    3485             :   const Address space_start = heap()->new_space()->first_allocatable_address();
    3486             :   const Address space_end = heap()->new_space()->top();
    3487             :   int pages = 0;
    3488      141587 :   for (Page* page : PageRange(space_start, space_end)) {
    3489             :     Address start =
    3490       72741 :         page->Contains(space_start) ? space_start : page->area_start();
    3491       72741 :     Address end = page->Contains(space_end) ? space_end : page->area_end();
    3492       72741 :     job->AddItem(CreateToSpaceUpdatingItem(page, start, end));
    3493       72741 :     pages++;
    3494             :   }
    3495       68846 :   if (pages == 0) return 0;
    3496       68846 :   return NumberOfParallelToSpacePointerUpdateTasks(pages);
    3497             : }
    3498             : 
    3499             : template <typename IterateableSpace>
    3500      344230 : int MarkCompactCollectorBase::CollectRememberedSetUpdatingItems(
    3501             :     ItemParallelJob* job, IterateableSpace* space,
    3502             :     RememberedSetUpdatingMode mode) {
    3503             :   int pages = 0;
    3504      878855 :   for (MemoryChunk* chunk : *space) {
    3505             :     const bool contains_old_to_old_slots =
    3506             :         chunk->slot_set<OLD_TO_OLD>() != nullptr ||
    3507     1055619 :         chunk->typed_slot_set<OLD_TO_OLD>() != nullptr;
    3508             :     const bool contains_old_to_new_slots =
    3509             :         chunk->slot_set<OLD_TO_NEW>() != nullptr ||
    3510      762815 :         chunk->typed_slot_set<OLD_TO_NEW>() != nullptr;
    3511             :     const bool contains_invalidated_slots =
    3512             :         chunk->invalidated_slots() != nullptr;
    3513      534625 :     if (!contains_old_to_new_slots && !contains_old_to_old_slots &&
    3514             :         !contains_invalidated_slots)
    3515             :       continue;
    3516      313702 :     if (mode == RememberedSetUpdatingMode::ALL || contains_old_to_new_slots ||
    3517             :         contains_invalidated_slots) {
    3518      313702 :       job->AddItem(CreateRememberedSetUpdatingItem(chunk, mode));
    3519      313702 :       pages++;
    3520             :     }
    3521             :   }
    3522      344230 :   return pages;
    3523             : }
    3524             : 
    3525       68846 : int MarkCompactCollector::CollectNewSpaceArrayBufferTrackerItems(
    3526             :     ItemParallelJob* job) {
    3527             :   int pages = 0;
    3528      161012 :   for (Page* p : new_space_evacuation_pages_) {
    3529       92166 :     if (Evacuator::ComputeEvacuationMode(p) == Evacuator::kObjectsNewToOld) {
    3530       87973 :       if (p->local_tracker() == nullptr) continue;
    3531             : 
    3532       85671 :       pages++;
    3533       85671 :       job->AddItem(new ArrayBufferTrackerUpdatingItem(
    3534             :           p, ArrayBufferTrackerUpdatingItem::kRegular));
    3535             :     }
    3536             :   }
    3537       68846 :   return pages;
    3538             : }
    3539             : 
    3540       68846 : int MarkCompactCollector::CollectOldSpaceArrayBufferTrackerItems(
    3541             :     ItemParallelJob* job) {
    3542             :   int pages = 0;
    3543       78597 :   for (Page* p : old_space_evacuation_pages_) {
    3544       19502 :     if (Evacuator::ComputeEvacuationMode(p) == Evacuator::kObjectsOldToOld &&
    3545             :         p->IsEvacuationCandidate()) {
    3546        9726 :       if (p->local_tracker() == nullptr) continue;
    3547             : 
    3548         713 :       pages++;
    3549         713 :       job->AddItem(new ArrayBufferTrackerUpdatingItem(
    3550             :           p, ArrayBufferTrackerUpdatingItem::kRegular));
    3551             :     }
    3552             :   }
    3553       68871 :   for (auto object_and_page : aborted_evacuation_candidates_) {
    3554             :     Page* p = object_and_page.second;
    3555          25 :     if (p->local_tracker() == nullptr) continue;
    3556             : 
    3557           0 :     pages++;
    3558           0 :     job->AddItem(new ArrayBufferTrackerUpdatingItem(
    3559             :         p, ArrayBufferTrackerUpdatingItem::kAborted));
    3560             :   }
    3561       68846 :   return pages;
    3562             : }
    3563             : 
    3564             : class EphemeronTableUpdatingItem : public UpdatingItem {
    3565             :  public:
    3566             :   enum EvacuationState { kRegular, kAborted };
    3567             : 
    3568       68846 :   explicit EphemeronTableUpdatingItem(Heap* heap) : heap_(heap) {}
    3569      137692 :   ~EphemeronTableUpdatingItem() override = default;
    3570             : 
    3571       68846 :   void Process() override {
    3572      206538 :     TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
    3573             :                  "EphemeronTableUpdatingItem::Process");
    3574             : 
    3575       68846 :     for (auto it = heap_->ephemeron_remembered_set_.begin();
    3576             :          it != heap_->ephemeron_remembered_set_.end();) {
    3577          46 :       EphemeronHashTable table = it->first;
    3578             :       auto& indices = it->second;
    3579          46 :       if (table.map_word().IsForwardingAddress()) {
    3580             :         // The table has moved, and RecordMigratedSlotVisitor::VisitEphemeron
    3581             :         // inserts entries for the moved table into ephemeron_remembered_set_.
    3582           0 :         it = heap_->ephemeron_remembered_set_.erase(it);
    3583             :         continue;
    3584             :       }
    3585             :       DCHECK(table.map().IsMap());
    3586             :       DCHECK(table.Object::IsEphemeronHashTable());
    3587         177 :       for (auto iti = indices.begin(); iti != indices.end();) {
    3588             :         // EphemeronHashTable keys must be heap objects.
    3589             :         HeapObjectSlot key_slot(
    3590         131 :             table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(*iti)));
    3591             :         HeapObject key = key_slot.ToHeapObject();
    3592             :         MapWord map_word = key->map_word();
    3593         131 :         if (map_word.IsForwardingAddress()) {
    3594             :           key = map_word.ToForwardingAddress();
    3595             :           key_slot.StoreHeapObject(key);
    3596             :         }
    3597         131 :         if (!heap_->InYoungGeneration(key)) {
    3598             :           iti = indices.erase(iti);
    3599             :         } else {
    3600             :           ++iti;
    3601             :         }
    3602             :       }
    3603          46 :       if (indices.size() == 0) {
    3604          41 :         it = heap_->ephemeron_remembered_set_.erase(it);
    3605             :       } else {
    3606             :         ++it;
    3607             :       }
    3608             :     }
    3609       68846 :   }
    3610             : 
    3611             :  private:
    3612             :   Heap* const heap_;
    3613             : };
    3614             : 
    3615       68846 : void MarkCompactCollector::UpdatePointersAfterEvacuation() {
    3616      275384 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS);
    3617             : 
    3618             :   PointersUpdatingVisitor updating_visitor;
    3619             : 
    3620             :   {
    3621      275384 :     TRACE_GC(heap()->tracer(),
    3622             :              GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS);
    3623       68846 :     heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
    3624             :   }
    3625             : 
    3626             :   {
    3627      275384 :     TRACE_GC(heap()->tracer(),
    3628             :              GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAIN);
    3629             :     ItemParallelJob updating_job(isolate()->cancelable_task_manager(),
    3630      206538 :                                  &page_parallel_job_semaphore_);
    3631             : 
    3632             :     int remembered_set_pages = 0;
    3633       68846 :     remembered_set_pages += CollectRememberedSetUpdatingItems(
    3634       68846 :         &updating_job, heap()->old_space(), RememberedSetUpdatingMode::ALL);
    3635             :     remembered_set_pages += CollectRememberedSetUpdatingItems(
    3636       68846 :         &updating_job, heap()->code_space(), RememberedSetUpdatingMode::ALL);
    3637             :     remembered_set_pages += CollectRememberedSetUpdatingItems(
    3638       68846 :         &updating_job, heap()->lo_space(), RememberedSetUpdatingMode::ALL);
    3639             :     remembered_set_pages += CollectRememberedSetUpdatingItems(
    3640       68846 :         &updating_job, heap()->code_lo_space(), RememberedSetUpdatingMode::ALL);
    3641             :     const int remembered_set_tasks =
    3642             :         remembered_set_pages == 0
    3643             :             ? 0
    3644       68650 :             : NumberOfParallelPointerUpdateTasks(remembered_set_pages,
    3645      137496 :                                                  old_to_new_slots_);
    3646       68846 :     const int to_space_tasks = CollectToSpaceUpdatingItems(&updating_job);
    3647             :     const int num_ephemeron_table_updating_tasks = 1;
    3648             :     const int num_tasks =
    3649       68846 :         Max(to_space_tasks,
    3650             :             remembered_set_tasks + num_ephemeron_table_updating_tasks);
    3651      615628 :     for (int i = 0; i < num_tasks; i++) {
    3652      273391 :       updating_job.AddTask(new PointersUpdatingTask(
    3653             :           isolate(),
    3654      273391 :           GCTracer::BackgroundScope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS));
    3655             :     }
    3656       68846 :     updating_job.AddItem(new EphemeronTableUpdatingItem(heap()));
    3657       68846 :     updating_job.Run();
    3658             :   }
    3659             : 
    3660             :   {
    3661             :     // - Update pointers in map space in a separate phase to avoid data races
    3662             :     //   with Map->LayoutDescriptor edge.
    3663             :     // - Update array buffer trackers in the second phase to have access to
    3664             :     //   byte length which is potentially a HeapNumber.
    3665      275384 :     TRACE_GC(heap()->tracer(),
    3666             :              GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAP_SPACE);
    3667             :     ItemParallelJob updating_job(isolate()->cancelable_task_manager(),
    3668      137692 :                                  &page_parallel_job_semaphore_);
    3669             : 
    3670             :     int array_buffer_pages = 0;
    3671       68846 :     array_buffer_pages += CollectNewSpaceArrayBufferTrackerItems(&updating_job);
    3672       68846 :     array_buffer_pages += CollectOldSpaceArrayBufferTrackerItems(&updating_job);
    3673             : 
    3674             :     int remembered_set_pages = 0;
    3675             :     remembered_set_pages += CollectRememberedSetUpdatingItems(
    3676       68846 :         &updating_job, heap()->map_space(), RememberedSetUpdatingMode::ALL);
    3677             :     const int remembered_set_tasks =
    3678             :         remembered_set_pages == 0
    3679             :             ? 0
    3680       68677 :             : NumberOfParallelPointerUpdateTasks(remembered_set_pages,
    3681      137523 :                                                  old_to_new_slots_);
    3682             :     const int num_tasks = Max(array_buffer_pages, remembered_set_tasks);
    3683       68846 :     if (num_tasks > 0) {
    3684      298220 :       for (int i = 0; i < num_tasks; i++) {
    3685      114687 :         updating_job.AddTask(new PointersUpdatingTask(
    3686             :             isolate(),
    3687      114687 :             GCTracer::BackgroundScope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS));
    3688             :       }
    3689       68846 :       updating_job.Run();
    3690       68846 :       heap()->array_buffer_collector()->FreeAllocations();
    3691             :     }
    3692             :   }
    3693             : 
    3694             :   {
    3695      275384 :     TRACE_GC(heap()->tracer(),
    3696             :              GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK);
    3697             :     // Update pointers from external string table.
    3698       68846 :     heap_->UpdateReferencesInExternalStringTable(
    3699       68846 :         &UpdateReferenceInExternalStringTableEntry);
    3700             : 
    3701       68846 :     EvacuationWeakObjectRetainer evacuation_object_retainer;
    3702       68846 :     heap()->ProcessWeakListRoots(&evacuation_object_retainer);
    3703             :   }
    3704       68846 : }
    3705             : 
    3706          25 : void MarkCompactCollector::ReportAbortedEvacuationCandidate(
    3707             :     HeapObject failed_object, MemoryChunk* chunk) {
    3708          25 :   base::MutexGuard guard(&mutex_);
    3709             : 
    3710          25 :   aborted_evacuation_candidates_.push_back(
    3711          50 :       std::make_pair(failed_object, static_cast<Page*>(chunk)));
    3712          25 : }
    3713             : 
    3714       57895 : void MarkCompactCollector::PostProcessEvacuationCandidates() {
    3715       57920 :   for (auto object_and_page : aborted_evacuation_candidates_) {
    3716             :     HeapObject failed_object = object_and_page.first;
    3717             :     Page* page = object_and_page.second;
    3718             :     page->SetFlag(Page::COMPACTION_WAS_ABORTED);
    3719             :     // Aborted compaction page. We have to record slots here, since we
    3720             :     // might not have recorded them in first place.
    3721             : 
    3722             :     // Remove outdated slots.
    3723          25 :     RememberedSet<OLD_TO_NEW>::RemoveRange(page, page->address(),
    3724             :                                            failed_object->address(),
    3725          25 :                                            SlotSet::PREFREE_EMPTY_BUCKETS);
    3726             :     RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, page->address(),
    3727          25 :                                                 failed_object->address());
    3728             :     // Recompute live bytes.
    3729          25 :     LiveObjectVisitor::RecomputeLiveBytes(page, non_atomic_marking_state());
    3730             :     // Re-record slots.
    3731             :     EvacuateRecordOnlyVisitor record_visitor(heap());
    3732             :     LiveObjectVisitor::VisitBlackObjectsNoFail(page, non_atomic_marking_state(),
    3733             :                                                &record_visitor,
    3734          25 :                                                LiveObjectVisitor::kKeepMarking);
    3735             :     // Array buffers will be processed during pointer updating.
    3736             :   }
    3737             :   const int aborted_pages =
    3738       57895 :       static_cast<int>(aborted_evacuation_candidates_.size());
    3739             :   int aborted_pages_verified = 0;
    3740       67646 :   for (Page* p : old_space_evacuation_pages_) {
    3741        9751 :     if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) {
    3742             :       // After clearing the evacuation candidate flag the page is again in a
    3743             :       // regular state.
    3744             :       p->ClearEvacuationCandidate();
    3745             :       aborted_pages_verified++;
    3746             :     } else {
    3747             :       DCHECK(p->IsEvacuationCandidate());
    3748             :       DCHECK(p->SweepingDone());
    3749             :       p->owner()->memory_chunk_list().Remove(p);
    3750             :     }
    3751             :   }
    3752             :   DCHECK_EQ(aborted_pages_verified, aborted_pages);
    3753       57895 :   if (FLAG_trace_evacuation && (aborted_pages > 0)) {
    3754             :     PrintIsolate(isolate(), "%8.0f ms: evacuation: aborted=%d\n",
    3755           0 :                  isolate()->time_millis_since_init(), aborted_pages);
    3756             :   }
    3757       57895 : }
    3758             : 
    3759       68846 : void MarkCompactCollector::ReleaseEvacuationCandidates() {
    3760       78597 :   for (Page* p : old_space_evacuation_pages_) {
    3761        9751 :     if (!p->IsEvacuationCandidate()) continue;
    3762             :     PagedSpace* space = static_cast<PagedSpace*>(p->owner());
    3763             :     non_atomic_marking_state()->SetLiveBytes(p, 0);
    3764        9726 :     CHECK(p->SweepingDone());
    3765        9726 :     space->ReleasePage(p);
    3766             :   }
    3767             :   old_space_evacuation_pages_.clear();
    3768       68846 :   compacting_ = false;
    3769       68846 : }
    3770             : 
    3771      206538 : void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
    3772      206538 :   space->ClearStats();
    3773             : 
    3774             :   int will_be_swept = 0;
    3775             :   bool unused_page_present = false;
    3776             : 
    3777             :   // Loop needs to support deletion if live bytes == 0 for a page.
    3778      698626 :   for (auto it = space->begin(); it != space->end();) {
    3779             :     Page* p = *(it++);
    3780             :     DCHECK(p->SweepingDone());
    3781             : 
    3782      492088 :     if (p->IsEvacuationCandidate()) {
    3783             :       // Will be processed in Evacuate.
    3784             :       DCHECK(!evacuation_candidates_.empty());
    3785             :       continue;
    3786             :     }
    3787             : 
    3788             :     // One unused page is kept, all further are released before sweeping them.
    3789      482337 :     if (non_atomic_marking_state()->live_bytes(p) == 0) {
    3790       18727 :       if (unused_page_present) {
    3791             :         if (FLAG_gc_verbose) {
    3792             :           PrintIsolate(isolate(), "sweeping: released page: %p",
    3793             :                        static_cast<void*>(p));
    3794             :         }
    3795       11517 :         ArrayBufferTracker::FreeAll(p);
    3796             :         space->memory_chunk_list().Remove(p);
    3797       11517 :         space->ReleasePage(p);
    3798       11517 :         continue;
    3799             :       }
    3800             :       unused_page_present = true;
    3801             :     }
    3802             : 
    3803      470820 :     sweeper()->AddPage(space->identity(), p, Sweeper::REGULAR);
    3804             :     will_be_swept++;
    3805             :   }
    3806             : 
    3807             :   if (FLAG_gc_verbose) {
    3808             :     PrintIsolate(isolate(), "sweeping: space=%s initialized_for_sweeping=%d",
    3809             :                  space->name(), will_be_swept);
    3810             :   }
    3811      206538 : }
    3812             : 
    3813       68846 : void MarkCompactCollector::StartSweepSpaces() {
    3814      275384 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_SWEEP);
    3815             : #ifdef DEBUG
    3816             :   state_ = SWEEP_SPACES;
    3817             : #endif
    3818             : 
    3819             :   {
    3820             :     {
    3821             :       GCTracer::Scope sweep_scope(heap()->tracer(),
    3822      137692 :                                   GCTracer::Scope::MC_SWEEP_OLD);
    3823       68846 :       StartSweepSpace(heap()->old_space());
    3824             :     }
    3825             :     {
    3826             :       GCTracer::Scope sweep_scope(heap()->tracer(),
    3827      137692 :                                   GCTracer::Scope::MC_SWEEP_CODE);
    3828       68846 :       StartSweepSpace(heap()->code_space());
    3829             :     }
    3830             :     {
    3831             :       GCTracer::Scope sweep_scope(heap()->tracer(),
    3832      137692 :                                   GCTracer::Scope::MC_SWEEP_MAP);
    3833       68846 :       StartSweepSpace(heap()->map_space());
    3834             :     }
    3835       68846 :     sweeper()->StartSweeping();
    3836             :   }
    3837       68846 : }
    3838             : 
    3839           0 : void MarkCompactCollector::MarkingWorklist::PrintWorklist(
    3840             :     const char* worklist_name, ConcurrentMarkingWorklist* worklist) {
    3841             :   std::map<InstanceType, int> count;
    3842           0 :   int total_count = 0;
    3843           0 :   worklist->IterateGlobalPool([&count, &total_count](HeapObject obj) {
    3844           0 :     ++total_count;
    3845           0 :     count[obj->map()->instance_type()]++;
    3846           0 :   });
    3847             :   std::vector<std::pair<int, InstanceType>> rank;
    3848           0 :   rank.reserve(count.size());
    3849           0 :   for (const auto& i : count) {
    3850           0 :     rank.emplace_back(i.second, i.first);
    3851             :   }
    3852             :   std::map<InstanceType, std::string> instance_type_name;
    3853             : #define INSTANCE_TYPE_NAME(name) instance_type_name[name] = #name;
    3854           0 :   INSTANCE_TYPE_LIST(INSTANCE_TYPE_NAME)
    3855             : #undef INSTANCE_TYPE_NAME
    3856             :   std::sort(rank.begin(), rank.end(),
    3857             :             std::greater<std::pair<int, InstanceType>>());
    3858           0 :   PrintF("Worklist %s: %d\n", worklist_name, total_count);
    3859           0 :   for (auto i : rank) {
    3860           0 :     PrintF("  [%s]: %d\n", instance_type_name[i.second].c_str(), i.first);
    3861             :   }
    3862           0 : }
    3863             : 
    3864             : #ifdef ENABLE_MINOR_MC
    3865             : 
    3866             : namespace {
    3867             : 
    3868             : #ifdef VERIFY_HEAP
    3869             : 
    3870             : class YoungGenerationMarkingVerifier : public MarkingVerifier {
    3871             :  public:
    3872             :   explicit YoungGenerationMarkingVerifier(Heap* heap)
    3873             :       : MarkingVerifier(heap),
    3874             :         marking_state_(
    3875             :             heap->minor_mark_compact_collector()->non_atomic_marking_state()) {}
    3876             : 
    3877             :   ConcurrentBitmap<AccessMode::NON_ATOMIC>* bitmap(
    3878             :       const MemoryChunk* chunk) override {
    3879             :     return marking_state_->bitmap(chunk);
    3880             :   }
    3881             : 
    3882             :   bool IsMarked(HeapObject object) override {
    3883             :     return marking_state_->IsGrey(object);
    3884             :   }
    3885             : 
    3886             :   bool IsBlackOrGrey(HeapObject object) override {
    3887             :     return marking_state_->IsBlackOrGrey(object);
    3888             :   }
    3889             : 
    3890             :   void Run() override {
    3891             :     VerifyRoots(VISIT_ALL_IN_SCAVENGE);
    3892             :     VerifyMarking(heap_->new_space());
    3893             :   }
    3894             : 
    3895             :  protected:
    3896             :   void VerifyPointers(ObjectSlot start, ObjectSlot end) override {
    3897             :     VerifyPointersImpl(start, end);
    3898             :   }
    3899             : 
    3900             :   void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) override {
    3901             :     VerifyPointersImpl(start, end);
    3902             :   }
    3903             : 
    3904             :   void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
    3905             :     Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
    3906             :     VerifyHeapObjectImpl(target);
    3907             :   }
    3908             :   void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
    3909             :     VerifyHeapObjectImpl(rinfo->target_object());
    3910             :   }
    3911             :   void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) override {
    3912             :     VerifyPointersImpl(start, end);
    3913             :   }
    3914             : 
    3915             :  private:
    3916             :   V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) {
    3917             :     CHECK_IMPLIES(Heap::InYoungGeneration(heap_object), IsMarked(heap_object));
    3918             :   }
    3919             : 
    3920             :   template <typename TSlot>
    3921             :   V8_INLINE void VerifyPointersImpl(TSlot start, TSlot end) {
    3922             :     for (TSlot slot = start; slot < end; ++slot) {
    3923             :       typename TSlot::TObject object = *slot;
    3924             :       HeapObject heap_object;
    3925             :       // Minor MC treats weak references as strong.
    3926             :       if (object.GetHeapObject(&heap_object)) {
    3927             :         VerifyHeapObjectImpl(heap_object);
    3928             :       }
    3929             :     }
    3930             :   }
    3931             : 
    3932             :   MinorMarkCompactCollector::NonAtomicMarkingState* marking_state_;
    3933             : };
    3934             : 
    3935             : class YoungGenerationEvacuationVerifier : public EvacuationVerifier {
    3936             :  public:
    3937             :   explicit YoungGenerationEvacuationVerifier(Heap* heap)
    3938             :       : EvacuationVerifier(heap) {}
    3939             : 
    3940             :   void Run() override {
    3941             :     VerifyRoots(VISIT_ALL_IN_SCAVENGE);
    3942             :     VerifyEvacuation(heap_->new_space());
    3943             :     VerifyEvacuation(heap_->old_space());
    3944             :     VerifyEvacuation(heap_->code_space());
    3945             :     VerifyEvacuation(heap_->map_space());
    3946             :   }
    3947             : 
    3948             :  protected:
    3949             :   V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) {
    3950             :     CHECK_IMPLIES(Heap::InYoungGeneration(heap_object),
    3951             :                   Heap::InToPage(heap_object));
    3952             :   }
    3953             : 
    3954             :   template <typename TSlot>
    3955             :   void VerifyPointersImpl(TSlot start, TSlot end) {
    3956             :     for (TSlot current = start; current < end; ++current) {
    3957             :       typename TSlot::TObject object = *current;
    3958             :       HeapObject heap_object;
    3959             :       if (object.GetHeapObject(&heap_object)) {
    3960             :         VerifyHeapObjectImpl(heap_object);
    3961             :       }
    3962             :     }
    3963             :   }
    3964             : 
    3965             :   void VerifyPointers(ObjectSlot start, ObjectSlot end) override {
    3966             :     VerifyPointersImpl(start, end);
    3967             :   }
    3968             :   void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) override {
    3969             :     VerifyPointersImpl(start, end);
    3970             :   }
    3971             :   void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
    3972             :     Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
    3973             :     VerifyHeapObjectImpl(target);
    3974             :   }
    3975             :   void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
    3976             :     VerifyHeapObjectImpl(rinfo->target_object());
    3977             :   }
    3978             :   void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) override {
    3979             :     VerifyPointersImpl(start, end);
    3980             :   }
    3981             : };
    3982             : 
    3983             : #endif  // VERIFY_HEAP
    3984             : 
    3985           0 : bool IsUnmarkedObjectForYoungGeneration(Heap* heap, FullObjectSlot p) {
    3986             :   DCHECK_IMPLIES(Heap::InYoungGeneration(*p), Heap::InToPage(*p));
    3987           0 :   return Heap::InYoungGeneration(*p) && !heap->minor_mark_compact_collector()
    3988             :                                              ->non_atomic_marking_state()
    3989           0 :                                              ->IsGrey(HeapObject::cast(*p));
    3990             : }
    3991             : 
    3992             : }  // namespace
    3993             : 
    3994      124854 : class YoungGenerationMarkingVisitor final
    3995             :     : public NewSpaceVisitor<YoungGenerationMarkingVisitor> {
    3996             :  public:
    3997             :   YoungGenerationMarkingVisitor(
    3998             :       MinorMarkCompactCollector::MarkingState* marking_state,
    3999             :       MinorMarkCompactCollector::MarkingWorklist* global_worklist, int task_id)
    4000      124884 :       : worklist_(global_worklist, task_id), marking_state_(marking_state) {}
    4001             : 
    4002           0 :   V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
    4003             :                                ObjectSlot end) final {
    4004             :     VisitPointersImpl(host, start, end);
    4005           0 :   }
    4006             : 
    4007           0 :   V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
    4008             :                                MaybeObjectSlot end) final {
    4009             :     VisitPointersImpl(host, start, end);
    4010           0 :   }
    4011             : 
    4012           0 :   V8_INLINE void VisitPointer(HeapObject host, ObjectSlot slot) final {
    4013             :     VisitPointerImpl(host, slot);
    4014           0 :   }
    4015             : 
    4016           0 :   V8_INLINE void VisitPointer(HeapObject host, MaybeObjectSlot slot) final {
    4017             :     VisitPointerImpl(host, slot);
    4018           0 :   }
    4019             : 
    4020           0 :   V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final {
    4021             :     // Code objects are not expected in new space.
    4022           0 :     UNREACHABLE();
    4023             :   }
    4024             : 
    4025           0 :   V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
    4026             :     // Code objects are not expected in new space.
    4027           0 :     UNREACHABLE();
    4028             :   }
    4029             : 
    4030             :  private:
    4031             :   template <typename TSlot>
    4032             :   V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
    4033           0 :     for (TSlot slot = start; slot < end; ++slot) {
    4034             :       VisitPointer(host, slot);
    4035             :     }
    4036             :   }
    4037             : 
    4038             :   template <typename TSlot>
    4039             :   V8_INLINE void VisitPointerImpl(HeapObject host, TSlot slot) {
    4040           0 :     typename TSlot::TObject target = *slot;
    4041           0 :     if (Heap::InYoungGeneration(target)) {
    4042             :       // Treat weak references as strong.
    4043             :       // TODO(marja): Proper weakness handling for minor-mcs.
    4044           0 :       HeapObject target_object = target.GetHeapObject();
    4045           0 :       MarkObjectViaMarkingWorklist(target_object);
    4046             :     }
    4047             :   }
    4048             : 
    4049           0 :   inline void MarkObjectViaMarkingWorklist(HeapObject object) {
    4050           0 :     if (marking_state_->WhiteToGrey(object)) {
    4051             :       // Marking deque overflow is unsupported for the young generation.
    4052           0 :       CHECK(worklist_.Push(object));
    4053             :     }
    4054           0 :   }
    4055             : 
    4056             :   MinorMarkCompactCollector::MarkingWorklist::View worklist_;
    4057             :   MinorMarkCompactCollector::MarkingState* marking_state_;
    4058             : };
    4059             : 
    4060       62442 : void MinorMarkCompactCollector::SetUp() {}
    4061             : 
    4062       62425 : void MinorMarkCompactCollector::TearDown() {}
    4063             : 
    4064       62442 : MinorMarkCompactCollector::MinorMarkCompactCollector(Heap* heap)
    4065             :     : MarkCompactCollectorBase(heap),
    4066             :       worklist_(new MinorMarkCompactCollector::MarkingWorklist()),
    4067             :       main_marking_visitor_(new YoungGenerationMarkingVisitor(
    4068       62442 :           marking_state(), worklist_, kMainMarker)),
    4069      249768 :       page_parallel_job_semaphore_(0) {
    4070             :   static_assert(
    4071             :       kNumMarkers <= MinorMarkCompactCollector::MarkingWorklist::kMaxNumTasks,
    4072             :       "more marker tasks than marking deque can handle");
    4073       62442 : }
    4074             : 
    4075      249704 : MinorMarkCompactCollector::~MinorMarkCompactCollector() {
    4076       62425 :   delete worklist_;
    4077       62427 :   delete main_marking_visitor_;
    4078      124852 : }
    4079             : 
    4080           0 : int MinorMarkCompactCollector::NumberOfParallelMarkingTasks(int pages) {
    4081             :   DCHECK_GT(pages, 0);
    4082           0 :   if (!FLAG_minor_mc_parallel_marking) return 1;
    4083             :   // Pages are not private to markers but we can still use them to estimate the
    4084             :   // amount of marking that is required.
    4085             :   const int kPagesPerTask = 2;
    4086           0 :   const int wanted_tasks = Max(1, pages / kPagesPerTask);
    4087           0 :   return Min(NumberOfAvailableCores(),
    4088             :              Min(wanted_tasks,
    4089           0 :                  MinorMarkCompactCollector::MarkingWorklist::kMaxNumTasks));
    4090             : }
    4091             : 
    4092       68846 : void MinorMarkCompactCollector::CleanupSweepToIteratePages() {
    4093       68846 :   for (Page* p : sweep_to_iterate_pages_) {
    4094           0 :     if (p->IsFlagSet(Page::SWEEP_TO_ITERATE)) {
    4095             :       p->ClearFlag(Page::SWEEP_TO_ITERATE);
    4096           0 :       non_atomic_marking_state()->ClearLiveness(p);
    4097             :     }
    4098             :   }
    4099             :   sweep_to_iterate_pages_.clear();
    4100       68846 : }
    4101             : 
    4102           0 : class YoungGenerationMigrationObserver final : public MigrationObserver {
    4103             :  public:
    4104             :   YoungGenerationMigrationObserver(Heap* heap,
    4105             :                                    MarkCompactCollector* mark_compact_collector)
    4106             :       : MigrationObserver(heap),
    4107           0 :         mark_compact_collector_(mark_compact_collector) {}
    4108             : 
    4109           0 :   inline void Move(AllocationSpace dest, HeapObject src, HeapObject dst,
    4110             :                    int size) final {
    4111             :     // Migrate color to old generation marking in case the object survived young
    4112             :     // generation garbage collection.
    4113           0 :     if (heap_->incremental_marking()->IsMarking()) {
    4114             :       DCHECK(
    4115             :           heap_->incremental_marking()->atomic_marking_state()->IsWhite(dst));
    4116             :       heap_->incremental_marking()->TransferColor(src, dst);
    4117             :     }
    4118           0 :   }
    4119             : 
    4120             :  protected:
    4121             :   base::Mutex mutex_;
    4122             :   MarkCompactCollector* mark_compact_collector_;
    4123             : };
    4124             : 
    4125           0 : class YoungGenerationRecordMigratedSlotVisitor final
    4126             :     : public RecordMigratedSlotVisitor {
    4127             :  public:
    4128             :   explicit YoungGenerationRecordMigratedSlotVisitor(
    4129             :       MarkCompactCollector* collector)
    4130           0 :       : RecordMigratedSlotVisitor(collector, nullptr) {}
    4131             : 
    4132           0 :   void VisitCodeTarget(Code host, RelocInfo* rinfo) final { UNREACHABLE(); }
    4133           0 :   void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
    4134           0 :     UNREACHABLE();
    4135             :   }
    4136             : 
    4137             :  private:
    4138             :   // Only record slots for host objects that are considered as live by the full
    4139             :   // collector.
    4140           0 :   inline bool IsLive(HeapObject object) {
    4141           0 :     return collector_->non_atomic_marking_state()->IsBlack(object);
    4142             :   }
    4143             : 
    4144           0 :   inline void RecordMigratedSlot(HeapObject host, MaybeObject value,
    4145             :                                  Address slot) final {
    4146           0 :     if (value->IsStrongOrWeak()) {
    4147             :       MemoryChunk* p = MemoryChunk::FromAddress(value.ptr());
    4148           0 :       if (p->InYoungGeneration()) {
    4149             :         DCHECK_IMPLIES(
    4150             :             p->IsToPage(),
    4151             :             p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION) || p->IsLargePage());
    4152             :         RememberedSet<OLD_TO_NEW>::Insert<AccessMode::NON_ATOMIC>(
    4153           0 :             MemoryChunk::FromHeapObject(host), slot);
    4154           0 :       } else if (p->IsEvacuationCandidate() && IsLive(host)) {
    4155             :         RememberedSet<OLD_TO_OLD>::Insert<AccessMode::NON_ATOMIC>(
    4156           0 :             MemoryChunk::FromHeapObject(host), slot);
    4157             :       }
    4158             :     }
    4159           0 :   }
    4160             : };
    4161             : 
    4162           0 : void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() {
    4163           0 :   TRACE_GC(heap()->tracer(),
    4164             :            GCTracer::Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS);
    4165             : 
    4166             :   PointersUpdatingVisitor updating_visitor;
    4167             :   ItemParallelJob updating_job(isolate()->cancelable_task_manager(),
    4168           0 :                                &page_parallel_job_semaphore_);
    4169             : 
    4170           0 :   CollectNewSpaceArrayBufferTrackerItems(&updating_job);
    4171             :   // Create batches of global handles.
    4172           0 :   const int to_space_tasks = CollectToSpaceUpdatingItems(&updating_job);
    4173             :   int remembered_set_pages = 0;
    4174             :   remembered_set_pages += CollectRememberedSetUpdatingItems(
    4175             :       &updating_job, heap()->old_space(),
    4176           0 :       RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
    4177             :   remembered_set_pages += CollectRememberedSetUpdatingItems(
    4178             :       &updating_job, heap()->code_space(),
    4179           0 :       RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
    4180             :   remembered_set_pages += CollectRememberedSetUpdatingItems(
    4181             :       &updating_job, heap()->map_space(),
    4182           0 :       RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
    4183             :   remembered_set_pages += CollectRememberedSetUpdatingItems(
    4184             :       &updating_job, heap()->lo_space(),
    4185           0 :       RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
    4186             :   remembered_set_pages += CollectRememberedSetUpdatingItems(
    4187             :       &updating_job, heap()->code_lo_space(),
    4188           0 :       RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
    4189             :   const int remembered_set_tasks =
    4190             :       remembered_set_pages == 0 ? 0
    4191           0 :                                 : NumberOfParallelPointerUpdateTasks(
    4192           0 :                                       remembered_set_pages, old_to_new_slots_);
    4193             :   const int num_tasks = Max(to_space_tasks, remembered_set_tasks);
    4194           0 :   for (int i = 0; i < num_tasks; i++) {
    4195           0 :     updating_job.AddTask(new PointersUpdatingTask(
    4196             :         isolate(), GCTracer::BackgroundScope::
    4197           0 :                        MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS));
    4198             :   }
    4199             : 
    4200             :   {
    4201           0 :     TRACE_GC(heap()->tracer(),
    4202             :              GCTracer::Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS);
    4203           0 :     heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_MINOR_MC_UPDATE);
    4204             :   }
    4205             :   {
    4206           0 :     TRACE_GC(heap()->tracer(),
    4207             :              GCTracer::Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS);
    4208           0 :     updating_job.Run();
    4209           0 :     heap()->array_buffer_collector()->FreeAllocations();
    4210             :   }
    4211             : 
    4212             :   {
    4213           0 :     TRACE_GC(heap()->tracer(),
    4214             :              GCTracer::Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_WEAK);
    4215             : 
    4216           0 :     EvacuationWeakObjectRetainer evacuation_object_retainer;
    4217           0 :     heap()->ProcessWeakListRoots(&evacuation_object_retainer);
    4218             : 
    4219             :     // Update pointers from external string table.
    4220             :     heap()->UpdateYoungReferencesInExternalStringTable(
    4221           0 :         &UpdateReferenceInExternalStringTableEntry);
    4222             :   }
    4223           0 : }
    4224             : 
    4225           0 : class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor {
    4226             :  public:
    4227             :   explicit RootMarkingVisitor(MinorMarkCompactCollector* collector)
    4228           0 :       : collector_(collector) {}
    4229             : 
    4230           0 :   void VisitRootPointer(Root root, const char* description,
    4231             :                         FullObjectSlot p) final {
    4232             :     MarkObjectByPointer(p);
    4233           0 :   }
    4234             : 
    4235           0 :   void VisitRootPointers(Root root, const char* description,
    4236             :                          FullObjectSlot start, FullObjectSlot end) final {
    4237           0 :     for (FullObjectSlot p = start; p < end; ++p) {
    4238             :       MarkObjectByPointer(p);
    4239             :     }
    4240           0 :   }
    4241             : 
    4242             :  private:
    4243             :   V8_INLINE void MarkObjectByPointer(FullObjectSlot p) {
    4244           0 :     if (!(*p)->IsHeapObject()) return;
    4245           0 :     collector_->MarkRootObject(HeapObject::cast(*p));
    4246             :   }
    4247             :   MinorMarkCompactCollector* const collector_;
    4248             : };
    4249             : 
    4250           0 : void MinorMarkCompactCollector::CollectGarbage() {
    4251             :   {
    4252           0 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_SWEEPING);
    4253           0 :     heap()->mark_compact_collector()->sweeper()->EnsureIterabilityCompleted();
    4254           0 :     CleanupSweepToIteratePages();
    4255             :   }
    4256             : 
    4257           0 :   MarkLiveObjects();
    4258           0 :   ClearNonLiveReferences();
    4259             : #ifdef VERIFY_HEAP
    4260             :   if (FLAG_verify_heap) {
    4261             :     YoungGenerationMarkingVerifier verifier(heap());
    4262             :     verifier.Run();
    4263             :   }
    4264             : #endif  // VERIFY_HEAP
    4265             : 
    4266           0 :   Evacuate();
    4267             : #ifdef VERIFY_HEAP
    4268             :   if (FLAG_verify_heap) {
    4269             :     YoungGenerationEvacuationVerifier verifier(heap());
    4270             :     verifier.Run();
    4271             :   }
    4272             : #endif  // VERIFY_HEAP
    4273             : 
    4274             :   {
    4275           0 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARKING_DEQUE);
    4276           0 :     heap()->incremental_marking()->UpdateMarkingWorklistAfterScavenge();
    4277             :   }
    4278             : 
    4279             :   {
    4280           0 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_RESET_LIVENESS);
    4281           0 :     for (Page* p :
    4282             :          PageRange(heap()->new_space()->from_space().first_page(), nullptr)) {
    4283             :       DCHECK(!p->IsFlagSet(Page::SWEEP_TO_ITERATE));
    4284           0 :       non_atomic_marking_state()->ClearLiveness(p);
    4285           0 :       if (FLAG_concurrent_marking) {
    4286             :         // Ensure that concurrent marker does not track pages that are
    4287             :         // going to be unmapped.
    4288           0 :         heap()->concurrent_marking()->ClearMemoryChunkData(p);
    4289             :       }
    4290             :     }
    4291             :     // Since we promote all surviving large objects immediatelly, all remaining
    4292             :     // large objects must be dead.
    4293             :     // TODO(ulan): Don't free all as soon as we have an intermediate generation.
    4294           0 :     heap()->new_lo_space()->FreeDeadObjects([](HeapObject) { return true; });
    4295             :   }
    4296             : 
    4297             :   RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(
    4298           0 :       heap(), [](MemoryChunk* chunk) {
    4299           0 :         if (chunk->SweepingDone()) {
    4300           0 :           RememberedSet<OLD_TO_NEW>::FreeEmptyBuckets(chunk);
    4301             :         } else {
    4302           0 :           RememberedSet<OLD_TO_NEW>::PreFreeEmptyBuckets(chunk);
    4303             :         }
    4304           0 :       });
    4305             : 
    4306             :   heap()->account_external_memory_concurrently_freed();
    4307           0 : }
    4308             : 
    4309           0 : void MinorMarkCompactCollector::MakeIterable(
    4310             :     Page* p, MarkingTreatmentMode marking_mode,
    4311             :     FreeSpaceTreatmentMode free_space_mode) {
    4312           0 :   CHECK(!p->IsLargePage());
    4313             :   // We have to clear the full collectors markbits for the areas that we
    4314             :   // remove here.
    4315             :   MarkCompactCollector* full_collector = heap()->mark_compact_collector();
    4316             :   Address free_start = p->area_start();
    4317             : 
    4318           0 :   for (auto object_and_size :
    4319             :        LiveObjectRange<kGreyObjects>(p, marking_state()->bitmap(p))) {
    4320           0 :     HeapObject const object = object_and_size.first;
    4321             :     DCHECK(non_atomic_marking_state()->IsGrey(object));
    4322             :     Address free_end = object->address();
    4323           0 :     if (free_end != free_start) {
    4324           0 :       CHECK_GT(free_end, free_start);
    4325           0 :       size_t size = static_cast<size_t>(free_end - free_start);
    4326           0 :       full_collector->non_atomic_marking_state()->bitmap(p)->ClearRange(
    4327             :           p->AddressToMarkbitIndex(free_start),
    4328           0 :           p->AddressToMarkbitIndex(free_end));
    4329           0 :       if (free_space_mode == ZAP_FREE_SPACE) {
    4330             :         ZapCode(free_start, size);
    4331             :       }
    4332             :       p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
    4333           0 :                                       ClearRecordedSlots::kNo);
    4334             :     }
    4335           0 :     Map map = object->synchronized_map();
    4336           0 :     int size = object->SizeFromMap(map);
    4337           0 :     free_start = free_end + size;
    4338             :   }
    4339             : 
    4340           0 :   if (free_start != p->area_end()) {
    4341           0 :     CHECK_GT(p->area_end(), free_start);
    4342           0 :     size_t size = static_cast<size_t>(p->area_end() - free_start);
    4343           0 :     full_collector->non_atomic_marking_state()->bitmap(p)->ClearRange(
    4344             :         p->AddressToMarkbitIndex(free_start),
    4345           0 :         p->AddressToMarkbitIndex(p->area_end()));
    4346           0 :     if (free_space_mode == ZAP_FREE_SPACE) {
    4347             :       ZapCode(free_start, size);
    4348             :     }
    4349             :     p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
    4350           0 :                                     ClearRecordedSlots::kNo);
    4351             :   }
    4352             : 
    4353           0 :   if (marking_mode == MarkingTreatmentMode::CLEAR) {
    4354           0 :     non_atomic_marking_state()->ClearLiveness(p);
    4355             :     p->ClearFlag(Page::SWEEP_TO_ITERATE);
    4356             :   }
    4357           0 : }
    4358             : 
    4359             : namespace {
    4360             : 
    4361             : // Helper class for pruning the string table.
    4362           0 : class YoungGenerationExternalStringTableCleaner : public RootVisitor {
    4363             :  public:
    4364             :   YoungGenerationExternalStringTableCleaner(
    4365             :       MinorMarkCompactCollector* collector)
    4366             :       : heap_(collector->heap()),
    4367           0 :         marking_state_(collector->non_atomic_marking_state()) {}
    4368             : 
    4369           0 :   void VisitRootPointers(Root root, const char* description,
    4370             :                          FullObjectSlot start, FullObjectSlot end) override {
    4371             :     DCHECK_EQ(static_cast<int>(root),
    4372             :               static_cast<int>(Root::kExternalStringsTable));
    4373             :     // Visit all HeapObject pointers in [start, end).
    4374           0 :     for (FullObjectSlot p = start; p < end; ++p) {
    4375             :       Object o = *p;
    4376           0 :       if (o->IsHeapObject()) {
    4377             :         HeapObject heap_object = HeapObject::cast(o);
    4378           0 :         if (marking_state_->IsWhite(heap_object)) {
    4379           0 :           if (o->IsExternalString()) {
    4380           0 :             heap_->FinalizeExternalString(String::cast(*p));
    4381             :           } else {
    4382             :             // The original external string may have been internalized.
    4383             :             DCHECK(o->IsThinString());
    4384             :           }
    4385             :           // Set the entry to the_hole_value (as deleted).
    4386           0 :           p.store(ReadOnlyRoots(heap_).the_hole_value());
    4387             :         }
    4388             :       }
    4389             :     }
    4390           0 :   }
    4391             : 
    4392             :  private:
    4393             :   Heap* heap_;
    4394             :   MinorMarkCompactCollector::NonAtomicMarkingState* marking_state_;
    4395             : };
    4396             : 
    4397             : // Marked young generation objects and all old generation objects will be
    4398             : // retained.
    4399           0 : class MinorMarkCompactWeakObjectRetainer : public WeakObjectRetainer {
    4400             :  public:
    4401             :   explicit MinorMarkCompactWeakObjectRetainer(
    4402             :       MinorMarkCompactCollector* collector)
    4403           0 :       : marking_state_(collector->non_atomic_marking_state()) {}
    4404             : 
    4405           0 :   Object RetainAs(Object object) override {
    4406             :     HeapObject heap_object = HeapObject::cast(object);
    4407           0 :     if (!Heap::InYoungGeneration(heap_object)) return object;
    4408             : 
    4409             :     // Young generation marking only marks to grey instead of black.
    4410             :     DCHECK(!marking_state_->IsBlack(heap_object));
    4411           0 :     if (marking_state_->IsGrey(heap_object)) {
    4412           0 :       return object;
    4413             :     }
    4414           0 :     return Object();
    4415             :   }
    4416             : 
    4417             :  private:
    4418             :   MinorMarkCompactCollector::NonAtomicMarkingState* marking_state_;
    4419             : };
    4420             : 
    4421             : }  // namespace
    4422             : 
    4423           0 : void MinorMarkCompactCollector::ClearNonLiveReferences() {
    4424           0 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_CLEAR);
    4425             : 
    4426             :   {
    4427           0 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_CLEAR_STRING_TABLE);
    4428             :     // Internalized strings are always stored in old space, so there is no need
    4429             :     // to clean them here.
    4430             :     YoungGenerationExternalStringTableCleaner external_visitor(this);
    4431           0 :     heap()->external_string_table_.IterateYoung(&external_visitor);
    4432           0 :     heap()->external_string_table_.CleanUpYoung();
    4433             :   }
    4434             : 
    4435             :   {
    4436           0 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_CLEAR_WEAK_LISTS);
    4437             :     // Process the weak references.
    4438             :     MinorMarkCompactWeakObjectRetainer retainer(this);
    4439           0 :     heap()->ProcessYoungWeakReferences(&retainer);
    4440             :   }
    4441           0 : }
    4442             : 
    4443           0 : void MinorMarkCompactCollector::EvacuatePrologue() {
    4444             :   NewSpace* new_space = heap()->new_space();
    4445             :   // Append the list of new space pages to be processed.
    4446           0 :   for (Page* p :
    4447           0 :        PageRange(new_space->first_allocatable_address(), new_space->top())) {
    4448           0 :     new_space_evacuation_pages_.push_back(p);
    4449             :   }
    4450           0 :   new_space->Flip();
    4451           0 :   new_space->ResetLinearAllocationArea();
    4452             : 
    4453           0 :   heap()->new_lo_space()->Flip();
    4454             :   heap()->new_lo_space()->ResetPendingObject();
    4455           0 : }
    4456             : 
    4457           0 : void MinorMarkCompactCollector::EvacuateEpilogue() {
    4458             :   heap()->new_space()->set_age_mark(heap()->new_space()->top());
    4459             :   // Give pages that are queued to be freed back to the OS.
    4460           0 :   heap()->memory_allocator()->unmapper()->FreeQueuedChunks();
    4461           0 : }
    4462             : 
    4463           0 : UpdatingItem* MinorMarkCompactCollector::CreateToSpaceUpdatingItem(
    4464             :     MemoryChunk* chunk, Address start, Address end) {
    4465             :   return new ToSpaceUpdatingItem<NonAtomicMarkingState>(
    4466           0 :       chunk, start, end, non_atomic_marking_state());
    4467             : }
    4468             : 
    4469           0 : UpdatingItem* MinorMarkCompactCollector::CreateRememberedSetUpdatingItem(
    4470             :     MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) {
    4471             :   return new RememberedSetUpdatingItem<NonAtomicMarkingState>(
    4472           0 :       heap(), non_atomic_marking_state(), chunk, updating_mode);
    4473             : }
    4474             : 
    4475             : class MarkingItem;
    4476             : class PageMarkingItem;
    4477             : class RootMarkingItem;
    4478             : class YoungGenerationMarkingTask;
    4479             : 
    4480           0 : class MarkingItem : public ItemParallelJob::Item {
    4481             :  public:
    4482           0 :   ~MarkingItem() override = default;
    4483             :   virtual void Process(YoungGenerationMarkingTask* task) = 0;
    4484             : };
    4485             : 
    4486           0 : class YoungGenerationMarkingTask : public ItemParallelJob::Task {
    4487             :  public:
    4488           0 :   YoungGenerationMarkingTask(
    4489             :       Isolate* isolate, MinorMarkCompactCollector* collector,
    4490             :       MinorMarkCompactCollector::MarkingWorklist* global_worklist, int task_id)
    4491             :       : ItemParallelJob::Task(isolate),
    4492             :         collector_(collector),
    4493             :         marking_worklist_(global_worklist, task_id),
    4494             :         marking_state_(collector->marking_state()),
    4495           0 :         visitor_(marking_state_, global_worklist, task_id) {
    4496           0 :     local_live_bytes_.reserve(isolate->heap()->new_space()->Capacity() /
    4497             :                               Page::kPageSize);
    4498           0 :   }
    4499             : 
    4500           0 :   void RunInParallel() override {
    4501           0 :     TRACE_BACKGROUND_GC(collector_->heap()->tracer(),
    4502             :                         GCTracer::BackgroundScope::MINOR_MC_BACKGROUND_MARKING);
    4503           0 :     double marking_time = 0.0;
    4504             :     {
    4505             :       TimedScope scope(&marking_time);
    4506             :       MarkingItem* item = nullptr;
    4507           0 :       while ((item = GetItem<MarkingItem>()) != nullptr) {
    4508           0 :         item->Process(this);
    4509           0 :         item->MarkFinished();
    4510           0 :         EmptyLocalMarkingWorklist();
    4511             :       }
    4512           0 :       EmptyMarkingWorklist();
    4513             :       DCHECK(marking_worklist_.IsLocalEmpty());
    4514           0 :       FlushLiveBytes();
    4515             :     }
    4516           0 :     if (FLAG_trace_minor_mc_parallel_marking) {
    4517           0 :       PrintIsolate(collector_->isolate(), "marking[%p]: time=%f\n",
    4518           0 :                    static_cast<void*>(this), marking_time);
    4519             :     }
    4520           0 :   }
    4521             : 
    4522           0 :   void MarkObject(Object object) {
    4523           0 :     if (!Heap::InYoungGeneration(object)) return;
    4524             :     HeapObject heap_object = HeapObject::cast(object);
    4525           0 :     if (marking_state_->WhiteToGrey(heap_object)) {
    4526             :       const int size = visitor_.Visit(heap_object);
    4527           0 :       IncrementLiveBytes(heap_object, size);
    4528             :     }
    4529             :   }
    4530             : 
    4531             :  private:
    4532           0 :   void EmptyLocalMarkingWorklist() {
    4533           0 :     HeapObject object;
    4534           0 :     while (marking_worklist_.Pop(&object)) {
    4535             :       const int size = visitor_.Visit(object);
    4536           0 :       IncrementLiveBytes(object, size);
    4537             :     }
    4538           0 :   }
    4539             : 
    4540           0 :   void EmptyMarkingWorklist() {
    4541           0 :     HeapObject object;
    4542           0 :     while (marking_worklist_.Pop(&object)) {
    4543             :       const int size = visitor_.Visit(object);
    4544           0 :       IncrementLiveBytes(object, size);
    4545             :     }
    4546           0 :   }
    4547             : 
    4548             :   void IncrementLiveBytes(HeapObject object, intptr_t bytes) {
    4549           0 :     local_live_bytes_[Page::FromHeapObject(object)] += bytes;
    4550             :   }
    4551             : 
    4552           0 :   void FlushLiveBytes() {
    4553           0 :     for (auto pair : local_live_bytes_) {
    4554             :       marking_state_->IncrementLiveBytes(pair.first, pair.second);
    4555             :     }
    4556           0 :   }
    4557             : 
    4558             :   MinorMarkCompactCollector* collector_;
    4559             :   MinorMarkCompactCollector::MarkingWorklist::View marking_worklist_;
    4560             :   MinorMarkCompactCollector::MarkingState* marking_state_;
    4561             :   YoungGenerationMarkingVisitor visitor_;
    4562             :   std::unordered_map<Page*, intptr_t, Page::Hasher> local_live_bytes_;
    4563             : };
    4564             : 
    4565             : class PageMarkingItem : public MarkingItem {
    4566             :  public:
    4567             :   explicit PageMarkingItem(MemoryChunk* chunk, std::atomic<int>* global_slots)
    4568           0 :       : chunk_(chunk), global_slots_(global_slots), slots_(0) {}
    4569           0 :   ~PageMarkingItem() override { *global_slots_ = *global_slots_ + slots_; }
    4570             : 
    4571           0 :   void Process(YoungGenerationMarkingTask* task) override {
    4572           0 :     TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
    4573             :                  "PageMarkingItem::Process");
    4574           0 :     base::MutexGuard guard(chunk_->mutex());
    4575             :     MarkUntypedPointers(task);
    4576             :     MarkTypedPointers(task);
    4577           0 :   }
    4578             : 
    4579             :  private:
    4580             :   inline Heap* heap() { return chunk_->heap(); }
    4581             : 
    4582             :   void MarkUntypedPointers(YoungGenerationMarkingTask* task) {
    4583           0 :     RememberedSet<OLD_TO_NEW>::Iterate(chunk_,
    4584           0 :                                        [this, task](MaybeObjectSlot slot) {
    4585             :                                          return CheckAndMarkObject(task, slot);
    4586           0 :                                        },
    4587           0 :                                        SlotSet::PREFREE_EMPTY_BUCKETS);
    4588             :   }
    4589             : 
    4590             :   void MarkTypedPointers(YoungGenerationMarkingTask* task) {
    4591           0 :     RememberedSet<OLD_TO_NEW>::IterateTyped(
    4592             :         chunk_, [=](SlotType slot_type, Address slot) {
    4593           0 :           return UpdateTypedSlotHelper::UpdateTypedSlot(
    4594           0 :               heap(), slot_type, slot, [this, task](FullMaybeObjectSlot slot) {
    4595             :                 return CheckAndMarkObject(task, slot);
    4596           0 :               });
    4597           0 :         });
    4598             :   }
    4599             : 
    4600             :   template <typename TSlot>
    4601             :   V8_INLINE SlotCallbackResult
    4602             :   CheckAndMarkObject(YoungGenerationMarkingTask* task, TSlot slot) {
    4603             :     static_assert(
    4604             :         std::is_same<TSlot, FullMaybeObjectSlot>::value ||
    4605             :             std::is_same<TSlot, MaybeObjectSlot>::value,
    4606             :         "Only FullMaybeObjectSlot and MaybeObjectSlot are expected here");
    4607             :     MaybeObject object = *slot;
    4608           0 :     if (Heap::InYoungGeneration(object)) {
    4609             :       // Marking happens before flipping the young generation, so the object
    4610             :       // has to be in a to page.
    4611             :       DCHECK(Heap::InToPage(object));
    4612           0 :       HeapObject heap_object;
    4613             :       bool success = object.GetHeapObject(&heap_object);
    4614             :       USE(success);
    4615             :       DCHECK(success);
    4616           0 :       task->MarkObject(heap_object);
    4617           0 :       slots_++;
    4618             :       return KEEP_SLOT;
    4619             :     }
    4620             :     return REMOVE_SLOT;
    4621             :   }
    4622             : 
    4623             :   MemoryChunk* chunk_;
    4624             :   std::atomic<int>* global_slots_;
    4625             :   int slots_;
    4626             : };
    4627             : 
    4628           0 : void MinorMarkCompactCollector::MarkRootSetInParallel(
    4629             :     RootMarkingVisitor* root_visitor) {
    4630             :   std::atomic<int> slots;
    4631             :   {
    4632             :     ItemParallelJob job(isolate()->cancelable_task_manager(),
    4633           0 :                         &page_parallel_job_semaphore_);
    4634             : 
    4635             :     // Seed the root set (roots + old->new set).
    4636             :     {
    4637           0 :       TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_SEED);
    4638             :       isolate()->global_handles()->IdentifyWeakUnmodifiedObjects(
    4639           0 :           &JSObject::IsUnmodifiedApiObject);
    4640           0 :       heap()->IterateRoots(root_visitor, VISIT_ALL_IN_MINOR_MC_MARK);
    4641             :       // Create items for each page.
    4642             :       RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(
    4643           0 :           heap(), [&job, &slots](MemoryChunk* chunk) {
    4644           0 :             job.AddItem(new PageMarkingItem(chunk, &slots));
    4645           0 :           });
    4646             :     }
    4647             : 
    4648             :     // Add tasks and run in parallel.
    4649             :     {
    4650           0 :       TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_ROOTS);
    4651             :       const int new_space_pages =
    4652           0 :           static_cast<int>(heap()->new_space()->Capacity()) / Page::kPageSize;
    4653           0 :       const int num_tasks = NumberOfParallelMarkingTasks(new_space_pages);
    4654           0 :       for (int i = 0; i < num_tasks; i++) {
    4655           0 :         job.AddTask(
    4656           0 :             new YoungGenerationMarkingTask(isolate(), this, worklist(), i));
    4657             :       }
    4658           0 :       job.Run();
    4659             :       DCHECK(worklist()->IsEmpty());
    4660             :     }
    4661             :   }
    4662           0 :   old_to_new_slots_ = slots;
    4663           0 : }
    4664             : 
    4665           0 : void MinorMarkCompactCollector::MarkLiveObjects() {
    4666           0 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK);
    4667             : 
    4668             :   PostponeInterruptsScope postpone(isolate());
    4669             : 
    4670             :   RootMarkingVisitor root_visitor(this);
    4671             : 
    4672           0 :   MarkRootSetInParallel(&root_visitor);
    4673             : 
    4674             :   // Mark rest on the main thread.
    4675             :   {
    4676           0 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_WEAK);
    4677           0 :     ProcessMarkingWorklist();
    4678             :   }
    4679             : 
    4680             :   {
    4681           0 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_GLOBAL_HANDLES);
    4682             :     isolate()->global_handles()->MarkYoungWeakUnmodifiedObjectsPending(
    4683           0 :         &IsUnmarkedObjectForYoungGeneration);
    4684             :     isolate()->global_handles()->IterateYoungWeakUnmodifiedRootsForFinalizers(
    4685           0 :         &root_visitor);
    4686             :     isolate()
    4687             :         ->global_handles()
    4688             :         ->IterateYoungWeakUnmodifiedRootsForPhantomHandles(
    4689           0 :             &root_visitor, &IsUnmarkedObjectForYoungGeneration);
    4690           0 :     ProcessMarkingWorklist();
    4691             :   }
    4692           0 : }
    4693             : 
    4694           0 : void MinorMarkCompactCollector::ProcessMarkingWorklist() {
    4695             :   MarkingWorklist::View marking_worklist(worklist(), kMainMarker);
    4696           0 :   HeapObject object;
    4697           0 :   while (marking_worklist.Pop(&object)) {
    4698             :     DCHECK(!object->IsFiller());
    4699             :     DCHECK(object->IsHeapObject());
    4700             :     DCHECK(heap()->Contains(object));
    4701             :     DCHECK(non_atomic_marking_state()->IsGrey(object));
    4702             :     main_marking_visitor()->Visit(object);
    4703             :   }
    4704             :   DCHECK(marking_worklist.IsLocalEmpty());
    4705           0 : }
    4706             : 
    4707           0 : void MinorMarkCompactCollector::Evacuate() {
    4708           0 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_EVACUATE);
    4709             :   base::MutexGuard guard(heap()->relocation_mutex());
    4710             : 
    4711             :   {
    4712           0 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_EVACUATE_PROLOGUE);
    4713           0 :     EvacuatePrologue();
    4714             :   }
    4715             : 
    4716             :   {
    4717           0 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_EVACUATE_COPY);
    4718           0 :     EvacuatePagesInParallel();
    4719             :   }
    4720             : 
    4721           0 :   UpdatePointersAfterEvacuation();
    4722             : 
    4723             :   {
    4724           0 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_EVACUATE_REBALANCE);
    4725           0 :     if (!heap()->new_space()->Rebalance()) {
    4726           0 :       heap()->FatalProcessOutOfMemory("NewSpace::Rebalance");
    4727             :     }
    4728             :   }
    4729             : 
    4730             :   {
    4731           0 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_EVACUATE_CLEAN_UP);
    4732           0 :     for (Page* p : new_space_evacuation_pages_) {
    4733           0 :       if (p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION) ||
    4734             :           p->IsFlagSet(Page::PAGE_NEW_OLD_PROMOTION)) {
    4735             :         p->ClearFlag(Page::PAGE_NEW_NEW_PROMOTION);
    4736             :         p->ClearFlag(Page::PAGE_NEW_OLD_PROMOTION);
    4737             :         p->SetFlag(Page::SWEEP_TO_ITERATE);
    4738           0 :         sweep_to_iterate_pages_.push_back(p);
    4739             :       }
    4740             :     }
    4741             :     new_space_evacuation_pages_.clear();
    4742             :   }
    4743             : 
    4744             :   {
    4745           0 :     TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_EVACUATE_EPILOGUE);
    4746           0 :     EvacuateEpilogue();
    4747             :   }
    4748           0 : }
    4749             : 
    4750             : namespace {
    4751             : 
    4752           0 : class YoungGenerationEvacuator : public Evacuator {
    4753             :  public:
    4754             :   explicit YoungGenerationEvacuator(MinorMarkCompactCollector* collector)
    4755             :       : Evacuator(collector->heap(), &record_visitor_),
    4756             :         record_visitor_(collector->heap()->mark_compact_collector()),
    4757           0 :         collector_(collector) {}
    4758             : 
    4759           0 :   GCTracer::BackgroundScope::ScopeId GetBackgroundTracingScope() override {
    4760           0 :     return GCTracer::BackgroundScope::MINOR_MC_BACKGROUND_EVACUATE_COPY;
    4761             :   }
    4762             : 
    4763             :  protected:
    4764             :   void RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) override;
    4765             : 
    4766             :   YoungGenerationRecordMigratedSlotVisitor record_visitor_;
    4767             :   MinorMarkCompactCollector* collector_;
    4768             : };
    4769             : 
    4770           0 : void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk,
    4771             :                                                intptr_t* live_bytes) {
    4772           0 :   TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
    4773             :                "YoungGenerationEvacuator::RawEvacuatePage");
    4774             :   MinorMarkCompactCollector::NonAtomicMarkingState* marking_state =
    4775           0 :       collector_->non_atomic_marking_state();
    4776           0 :   *live_bytes = marking_state->live_bytes(chunk);
    4777           0 :   switch (ComputeEvacuationMode(chunk)) {
    4778             :     case kObjectsNewToOld:
    4779           0 :       LiveObjectVisitor::VisitGreyObjectsNoFail(
    4780             :           chunk, marking_state, &new_space_visitor_,
    4781           0 :           LiveObjectVisitor::kClearMarkbits);
    4782             :       // ArrayBufferTracker will be updated during pointers updating.
    4783           0 :       break;
    4784             :     case kPageNewToOld:
    4785           0 :       LiveObjectVisitor::VisitGreyObjectsNoFail(
    4786             :           chunk, marking_state, &new_to_old_page_visitor_,
    4787           0 :           LiveObjectVisitor::kKeepMarking);
    4788             :       new_to_old_page_visitor_.account_moved_bytes(
    4789             :           marking_state->live_bytes(chunk));
    4790           0 :       if (!chunk->IsLargePage()) {
    4791             :         // TODO(mlippautz): If cleaning array buffers is too slow here we can
    4792             :         // delay it until the next GC.
    4793           0 :         ArrayBufferTracker::FreeDead(static_cast<Page*>(chunk), marking_state);
    4794             :         if (heap()->ShouldZapGarbage()) {
    4795             :           collector_->MakeIterable(static_cast<Page*>(chunk),
    4796             :                                    MarkingTreatmentMode::KEEP, ZAP_FREE_SPACE);
    4797           0 :         } else if (heap()->incremental_marking()->IsMarking()) {
    4798             :           // When incremental marking is on, we need to clear the mark bits of
    4799             :           // the full collector. We cannot yet discard the young generation mark
    4800             :           // bits as they are still relevant for pointers updating.
    4801           0 :           collector_->MakeIterable(static_cast<Page*>(chunk),
    4802             :                                    MarkingTreatmentMode::KEEP,
    4803           0 :                                    IGNORE_FREE_SPACE);
    4804             :         }
    4805             :       }
    4806             :       break;
    4807             :     case kPageNewToNew:
    4808           0 :       LiveObjectVisitor::VisitGreyObjectsNoFail(
    4809             :           chunk, marking_state, &new_to_new_page_visitor_,
    4810           0 :           LiveObjectVisitor::kKeepMarking);
    4811             :       new_to_new_page_visitor_.account_moved_bytes(
    4812             :           marking_state->live_bytes(chunk));
    4813             :       DCHECK(!chunk->IsLargePage());
    4814             :       // TODO(mlippautz): If cleaning array buffers is too slow here we can
    4815             :       // delay it until the next GC.
    4816           0 :       ArrayBufferTracker::FreeDead(static_cast<Page*>(chunk), marking_state);
    4817             :       if (heap()->ShouldZapGarbage()) {
    4818             :         collector_->MakeIterable(static_cast<Page*>(chunk),
    4819             :                                  MarkingTreatmentMode::KEEP, ZAP_FREE_SPACE);
    4820           0 :       } else if (heap()->incremental_marking()->IsMarking()) {
    4821             :         // When incremental marking is on, we need to clear the mark bits of
    4822             :         // the full collector. We cannot yet discard the young generation mark
    4823             :         // bits as they are still relevant for pointers updating.
    4824           0 :         collector_->MakeIterable(static_cast<Page*>(chunk),
    4825           0 :                                  MarkingTreatmentMode::KEEP, IGNORE_FREE_SPACE);
    4826             :       }
    4827             :       break;
    4828             :     case kObjectsOldToOld:
    4829           0 :       UNREACHABLE();
    4830             :       break;
    4831             :   }
    4832           0 : }
    4833             : 
    4834             : }  // namespace
    4835             : 
    4836           0 : void MinorMarkCompactCollector::EvacuatePagesInParallel() {
    4837             :   ItemParallelJob evacuation_job(isolate()->cancelable_task_manager(),
    4838           0 :                                  &page_parallel_job_semaphore_);
    4839             :   intptr_t live_bytes = 0;
    4840             : 
    4841           0 :   for (Page* page : new_space_evacuation_pages_) {
    4842             :     intptr_t live_bytes_on_page = non_atomic_marking_state()->live_bytes(page);
    4843           0 :     if (live_bytes_on_page == 0 && !page->contains_array_buffers()) continue;
    4844           0 :     live_bytes += live_bytes_on_page;
    4845           0 :     if (ShouldMovePage(page, live_bytes_on_page)) {
    4846           0 :       if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) {
    4847           0 :         EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page);
    4848             :       } else {
    4849           0 :         EvacuateNewSpacePageVisitor<NEW_TO_NEW>::Move(page);
    4850             :       }
    4851             :     }
    4852           0 :     evacuation_job.AddItem(new EvacuationItem(page));
    4853             :   }
    4854             : 
    4855             :   // Promote young generation large objects.
    4856           0 :   for (auto it = heap()->new_lo_space()->begin();
    4857             :        it != heap()->new_lo_space()->end();) {
    4858             :     LargePage* current = *it;
    4859             :     it++;
    4860             :     HeapObject object = current->GetObject();
    4861             :     DCHECK(!non_atomic_marking_state_.IsBlack(object));
    4862           0 :     if (non_atomic_marking_state_.IsGrey(object)) {
    4863           0 :       heap_->lo_space()->PromoteNewLargeObject(current);
    4864             :       current->SetFlag(Page::PAGE_NEW_OLD_PROMOTION);
    4865           0 :       evacuation_job.AddItem(new EvacuationItem(current));
    4866             :     }
    4867             :   }
    4868           0 :   if (evacuation_job.NumberOfItems() == 0) return;
    4869             : 
    4870             :   YoungGenerationMigrationObserver observer(heap(),
    4871             :                                             heap()->mark_compact_collector());
    4872           0 :   CreateAndExecuteEvacuationTasks<YoungGenerationEvacuator>(
    4873           0 :       this, &evacuation_job, &observer, live_bytes);
    4874             : }
    4875             : 
    4876           0 : int MinorMarkCompactCollector::CollectNewSpaceArrayBufferTrackerItems(
    4877             :     ItemParallelJob* job) {
    4878             :   int pages = 0;
    4879           0 :   for (Page* p : new_space_evacuation_pages_) {
    4880           0 :     if (Evacuator::ComputeEvacuationMode(p) == Evacuator::kObjectsNewToOld) {
    4881           0 :       if (p->local_tracker() == nullptr) continue;
    4882             : 
    4883           0 :       pages++;
    4884           0 :       job->AddItem(new ArrayBufferTrackerUpdatingItem(
    4885             :           p, ArrayBufferTrackerUpdatingItem::kRegular));
    4886             :     }
    4887             :   }
    4888           0 :   return pages;
    4889             : }
    4890             : 
    4891             : #endif  // ENABLE_MINOR_MC
    4892             : 
    4893             : }  // namespace internal
    4894      122036 : }  // namespace v8

Generated by: LCOV version 1.10