LCOV - code coverage report
Current view: top level - src/heap - scavenger.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 216 233 92.7 %
Date: 2019-04-17 Functions: 30 41 73.2 %

          Line data    Source code
       1             : // Copyright 2015 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/heap/scavenger.h"
       6             : 
       7             : #include "src/heap/array-buffer-collector.h"
       8             : #include "src/heap/barrier.h"
       9             : #include "src/heap/gc-tracer.h"
      10             : #include "src/heap/heap-inl.h"
      11             : #include "src/heap/item-parallel-job.h"
      12             : #include "src/heap/mark-compact-inl.h"
      13             : #include "src/heap/objects-visiting-inl.h"
      14             : #include "src/heap/scavenger-inl.h"
      15             : #include "src/heap/sweeper.h"
      16             : #include "src/objects-body-descriptors-inl.h"
      17             : #include "src/objects/data-handler-inl.h"
      18             : #include "src/objects/embedder-data-array-inl.h"
      19             : #include "src/transitions-inl.h"
      20             : #include "src/utils-inl.h"
      21             : 
      22             : namespace v8 {
      23             : namespace internal {
      24             : 
      25             : class PageScavengingItem final : public ItemParallelJob::Item {
      26             :  public:
      27      178484 :   explicit PageScavengingItem(MemoryChunk* chunk) : chunk_(chunk) {}
      28      356968 :   ~PageScavengingItem() override = default;
      29             : 
      30      178332 :   void Process(Scavenger* scavenger) { scavenger->ScavengePage(chunk_); }
      31             : 
      32             :  private:
      33             :   MemoryChunk* const chunk_;
      34             : };
      35             : 
      36      105496 : class ScavengingTask final : public ItemParallelJob::Task {
      37             :  public:
      38             :   ScavengingTask(Heap* heap, Scavenger* scavenger, OneshotBarrier* barrier)
      39             :       : ItemParallelJob::Task(heap->isolate()),
      40             :         heap_(heap),
      41             :         scavenger_(scavenger),
      42       52801 :         barrier_(barrier) {}
      43             : 
      44       50876 :   void RunInParallel() final {
      45      254370 :     TRACE_BACKGROUND_GC(
      46             :         heap_->tracer(),
      47             :         GCTracer::BackgroundScope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL);
      48       50852 :     double scavenging_time = 0.0;
      49             :     {
      50       50852 :       barrier_->Start();
      51             :       TimedScope scope(&scavenging_time);
      52             :       PageScavengingItem* item = nullptr;
      53      407584 :       while ((item = GetItem<PageScavengingItem>()) != nullptr) {
      54      178332 :         item->Process(scavenger_);
      55      178120 :         item->MarkFinished();
      56             :       }
      57       52283 :       do {
      58       52237 :         scavenger_->Process(barrier_);
      59       52135 :       } while (!barrier_->Wait());
      60       50935 :       scavenger_->Process();
      61             :     }
      62       50914 :     if (FLAG_trace_parallel_scavenge) {
      63           0 :       PrintIsolate(heap_->isolate(),
      64             :                    "scavenge[%p]: time=%.2f copied=%zu promoted=%zu\n",
      65             :                    static_cast<void*>(this), scavenging_time,
      66           0 :                    scavenger_->bytes_copied(), scavenger_->bytes_promoted());
      67             :     }
      68       50947 :   }
      69             : 
      70             :  private:
      71             :   Heap* const heap_;
      72             :   Scavenger* const scavenger_;
      73             :   OneshotBarrier* const barrier_;
      74             : };
      75             : 
      76    45275443 : class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
      77             :  public:
      78             :   IterateAndScavengePromotedObjectsVisitor(Scavenger* scavenger,
      79             :                                            bool record_slots)
      80    45229612 :       : scavenger_(scavenger), record_slots_(record_slots) {}
      81             : 
      82      375934 :   V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
      83             :                                ObjectSlot end) final {
      84             :     VisitPointersImpl(host, start, end);
      85      375907 :   }
      86             : 
      87           8 :   V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
      88             :                                MaybeObjectSlot end) final {
      89             :     VisitPointersImpl(host, start, end);
      90           8 :   }
      91             : 
      92           0 :   V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final {
      93           0 :     Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
      94           0 :     HandleSlot(host, FullHeapObjectSlot(&target), target);
      95           0 :   }
      96           0 :   V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
      97           0 :     HeapObject heap_object = rinfo->target_object();
      98           0 :     HandleSlot(host, FullHeapObjectSlot(&heap_object), heap_object);
      99           0 :   }
     100             : 
     101      156355 :   inline void VisitEphemeron(HeapObject obj, int entry, ObjectSlot key,
     102             :                              ObjectSlot value) override {
     103             :     DCHECK(Heap::IsLargeObject(obj) || obj->IsEphemeronHashTable());
     104      156355 :     VisitPointer(obj, value);
     105             : 
     106      156355 :     if (ObjectInYoungGeneration(*key)) {
     107             :       // We cannot check the map here, as it might be a large object.
     108         143 :       scavenger_->RememberPromotedEphemeron(
     109         143 :           EphemeronHashTable::unchecked_cast(obj), entry);
     110             :     } else {
     111      156212 :       VisitPointer(obj, key);
     112             :     }
     113      156356 :   }
     114             : 
     115             :  private:
     116             :   template <typename TSlot>
     117             :   V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
     118             :     using THeapObjectSlot = typename TSlot::THeapObjectSlot;
     119             :     // Treat weak references as strong.
     120             :     // TODO(marja): Proper weakness handling in the young generation.
     121  2160335807 :     for (TSlot slot = start; slot < end; ++slot) {
     122      375982 :       typename TSlot::TObject object = *slot;
     123      375988 :       HeapObject heap_object;
     124  2114591793 :       if (object.GetHeapObject(&heap_object)) {
     125      626652 :         HandleSlot(host, THeapObjectSlot(slot), heap_object);
     126             :       }
     127             :     }
     128             :   }
     129             : 
     130             :   template <typename THeapObjectSlot>
     131             :   V8_INLINE void HandleSlot(HeapObject host, THeapObjectSlot slot,
     132             :                             HeapObject target) {
     133             :     static_assert(
     134             :         std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
     135             :             std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
     136             :         "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
     137      313326 :     scavenger_->PageMemoryFence(MaybeObject::FromObject(target));
     138             : 
     139  1349796127 :     if (Heap::InFromPage(target)) {
     140    53575781 :       SlotCallbackResult result = scavenger_->ScavengeObject(slot, target);
     141         890 :       bool success = (*slot)->GetHeapObject(&target);
     142             :       USE(success);
     143             :       DCHECK(success);
     144             : 
     145    53508424 :       if (result == KEEP_SLOT) {
     146             :         SLOW_DCHECK(target->IsHeapObject());
     147     7435038 :         RememberedSet<OLD_TO_NEW>::Insert(MemoryChunk::FromHeapObject(host),
     148             :                                           slot.address());
     149             :       }
     150             :       SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
     151             :           HeapObject::cast(target)));
     152  1306524326 :     } else if (record_slots_ && MarkCompactCollector::IsOnEvacuationCandidate(
     153             :                                     HeapObject::cast(target))) {
     154             :       // We should never try to record off-heap slots.
     155             :       DCHECK((std::is_same<THeapObjectSlot, HeapObjectSlot>::value));
     156             :       // We cannot call MarkCompactCollector::RecordSlot because that checks
     157             :       // that the host page is not in young generation, which does not hold
     158             :       // for pending large pages.
     159        1443 :       RememberedSet<OLD_TO_OLD>::Insert(MemoryChunk::FromHeapObject(host),
     160             :                                         slot.address());
     161             :     }
     162             :   }
     163             : 
     164             :   Scavenger* const scavenger_;
     165             :   const bool record_slots_;
     166             : };
     167             : 
     168             : namespace {
     169             : 
     170             : V8_INLINE bool IsUnscavengedHeapObject(Heap* heap, Object object) {
     171      137539 :   return Heap::InFromPage(object) &&
     172       68762 :          !HeapObject::cast(object)->map_word().IsForwardingAddress();
     173             : }
     174             : 
     175             : // Same as IsUnscavengedHeapObject() above but specialized for HeapObjects.
     176             : V8_INLINE bool IsUnscavengedHeapObject(Heap* heap, HeapObject heap_object) {
     177      140100 :   return Heap::InFromPage(heap_object) &&
     178             :          !heap_object->map_word().IsForwardingAddress();
     179             : }
     180             : 
     181       68777 : bool IsUnscavengedHeapObjectSlot(Heap* heap, FullObjectSlot p) {
     182       68777 :   return IsUnscavengedHeapObject(heap, *p);
     183             : }
     184             : 
     185             : }  // namespace
     186             : 
     187       27814 : class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
     188             :  public:
     189       63864 :   Object RetainAs(Object object) override {
     190       63864 :     if (!Heap::InFromPage(object)) {
     191       63864 :       return object;
     192             :     }
     193             : 
     194             :     MapWord map_word = HeapObject::cast(object)->map_word();
     195           0 :     if (map_word.IsForwardingAddress()) {
     196           0 :       return map_word.ToForwardingAddress();
     197             :     }
     198           0 :     return Object();
     199             :   }
     200             : };
     201             : 
     202       62422 : ScavengerCollector::ScavengerCollector(Heap* heap)
     203      124844 :     : isolate_(heap->isolate()), heap_(heap), parallel_scavenge_semaphore_(0) {}
     204             : 
     205       27814 : void ScavengerCollector::CollectGarbage() {
     206             :   DCHECK(surviving_new_large_objects_.empty());
     207       27814 :   ItemParallelJob job(isolate_->cancelable_task_manager(),
     208       83442 :                       &parallel_scavenge_semaphore_);
     209             :   const int kMainThreadId = 0;
     210             :   Scavenger* scavengers[kMaxScavengerTasks];
     211       27814 :   const bool is_logging = isolate_->LogObjectRelocation();
     212       27814 :   const int num_scavenge_tasks = NumberOfScavengeTasks();
     213             :   OneshotBarrier barrier(base::TimeDelta::FromMilliseconds(kMaxWaitTimeMs));
     214       55628 :   Scavenger::CopiedList copied_list(num_scavenge_tasks);
     215             :   Scavenger::PromotionList promotion_list(num_scavenge_tasks);
     216       55628 :   EphemeronTableList ephemeron_table_list(num_scavenge_tasks);
     217      133416 :   for (int i = 0; i < num_scavenge_tasks; i++) {
     218       52801 :     scavengers[i] = new Scavenger(this, heap_, is_logging, &copied_list,
     219      105602 :                                   &promotion_list, &ephemeron_table_list, i);
     220      105602 :     job.AddTask(new ScavengingTask(heap_, scavengers[i], &barrier));
     221             :   }
     222             : 
     223             :   {
     224       27814 :     Sweeper* sweeper = heap_->mark_compact_collector()->sweeper();
     225             :     // Pause the concurrent sweeper.
     226       55628 :     Sweeper::PauseOrCompleteScope pause_scope(sweeper);
     227             :     // Filter out pages from the sweeper that need to be processed for old to
     228             :     // new slots by the Scavenger. After processing, the Scavenger adds back
     229             :     // pages that are still unsweeped. This way the Scavenger has exclusive
     230             :     // access to the slots of a page and can completely avoid any locks on
     231             :     // the page itself.
     232       55628 :     Sweeper::FilterSweepingPagesScope filter_scope(sweeper, pause_scope);
     233             :     filter_scope.FilterOldSpaceSweepingPages(
     234       28775 :         [](Page* page) { return !page->ContainsSlots<OLD_TO_NEW>(); });
     235             :     RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(
     236      384782 :         heap_, [&job](MemoryChunk* chunk) {
     237      178484 :           job.AddItem(new PageScavengingItem(chunk));
     238      234112 :         });
     239             : 
     240       27814 :     RootScavengeVisitor root_scavenge_visitor(scavengers[kMainThreadId]);
     241             : 
     242             :     {
     243             :       // Identify weak unmodified handles. Requires an unmodified graph.
     244      139070 :       TRACE_GC(
     245             :           heap_->tracer(),
     246             :           GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY);
     247       27814 :       isolate_->global_handles()->IdentifyWeakUnmodifiedObjects(
     248       27814 :           &JSObject::IsUnmodifiedApiObject);
     249             :     }
     250             :     {
     251             :       // Copy roots.
     252      139070 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_ROOTS);
     253       27814 :       heap_->IterateRoots(&root_scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
     254             :     }
     255             :     {
     256             :       // Parallel phase scavenging all copied and promoted objects.
     257      139070 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL);
     258       27814 :       job.Run();
     259             :       DCHECK(copied_list.IsEmpty());
     260             :       DCHECK(promotion_list.IsEmpty());
     261             :     }
     262             :     {
     263             :       // Scavenge weak global handles.
     264      139070 :       TRACE_GC(heap_->tracer(),
     265             :                GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS);
     266       27814 :       isolate_->global_handles()->MarkYoungWeakUnmodifiedObjectsPending(
     267       27814 :           &IsUnscavengedHeapObjectSlot);
     268       27814 :       isolate_->global_handles()->IterateYoungWeakUnmodifiedRootsForFinalizers(
     269       27814 :           &root_scavenge_visitor);
     270       27814 :       scavengers[kMainThreadId]->Process();
     271             : 
     272             :       DCHECK(copied_list.IsEmpty());
     273             :       DCHECK(promotion_list.IsEmpty());
     274       27814 :       isolate_->global_handles()
     275             :           ->IterateYoungWeakUnmodifiedRootsForPhantomHandles(
     276       27814 :               &root_scavenge_visitor, &IsUnscavengedHeapObjectSlot);
     277             :     }
     278             : 
     279             :     {
     280             :       // Finalize parallel scavenging.
     281      139070 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_FINALIZE);
     282             : 
     283             :       DCHECK(surviving_new_large_objects_.empty());
     284             : 
     285      133416 :       for (int i = 0; i < num_scavenge_tasks; i++) {
     286       52801 :         scavengers[i]->Finalize();
     287       52801 :         delete scavengers[i];
     288             :       }
     289             : 
     290       27814 :       HandleSurvivingNewLargeObjects();
     291             :     }
     292             :   }
     293             : 
     294             :   {
     295             :     // Update references into new space
     296      139070 :     TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_UPDATE_REFS);
     297       27814 :     heap_->UpdateYoungReferencesInExternalStringTable(
     298       27814 :         &Heap::UpdateYoungReferenceInExternalStringTableEntry);
     299             : 
     300       55628 :     heap_->incremental_marking()->UpdateMarkingWorklistAfterScavenge();
     301             :   }
     302             : 
     303       27814 :   if (FLAG_concurrent_marking) {
     304             :     // Ensure that concurrent marker does not track pages that are
     305             :     // going to be unmapped.
     306      272533 :     for (Page* p :
     307       26947 :          PageRange(heap_->new_space()->from_space().first_page(), nullptr)) {
     308      491172 :       heap_->concurrent_marking()->ClearMemoryChunkData(p);
     309             :     }
     310             :   }
     311             : 
     312       27814 :   ProcessWeakReferences(&ephemeron_table_list);
     313             : 
     314             :   // Set age mark.
     315       27814 :   heap_->new_space_->set_age_mark(heap_->new_space()->top());
     316             : 
     317             :   {
     318      139070 :     TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_PROCESS_ARRAY_BUFFERS);
     319       27814 :     ArrayBufferTracker::PrepareToFreeDeadInNewSpace(heap_);
     320             :   }
     321       55628 :   heap_->array_buffer_collector()->FreeAllocations();
     322             : 
     323             :   // Since we promote all surviving large objects immediatelly, all remaining
     324             :   // large objects must be dead.
     325             :   // TODO(hpayer): Don't free all as soon as we have an intermediate generation.
     326       55628 :   heap_->new_lo_space()->FreeDeadObjects([](HeapObject) { return true; });
     327             : 
     328      207545 :   RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(heap_, [](MemoryChunk* chunk) {
     329      179731 :     if (chunk->SweepingDone()) {
     330      178954 :       RememberedSet<OLD_TO_NEW>::FreeEmptyBuckets(chunk);
     331             :     } else {
     332         777 :       RememberedSet<OLD_TO_NEW>::PreFreeEmptyBuckets(chunk);
     333             :     }
     334      207545 :   });
     335             : 
     336             :   // Update how much has survived scavenge.
     337       27814 :   heap_->IncrementYoungSurvivorsCounter(heap_->SurvivedYoungObjectSize());
     338       27814 : }
     339             : 
     340       27814 : void ScavengerCollector::HandleSurvivingNewLargeObjects() {
     341       31806 :   for (SurvivingNewLargeObjectMapEntry update_info :
     342             :        surviving_new_large_objects_) {
     343             :     HeapObject object = update_info.first;
     344             :     Map map = update_info.second;
     345             :     // Order is important here. We have to re-install the map to have access
     346             :     // to meta-data like size during page promotion.
     347             :     object->set_map_word(MapWord::FromMap(map));
     348             :     LargePage* page = LargePage::FromHeapObject(object);
     349        3992 :     heap_->lo_space()->PromoteNewLargeObject(page);
     350             :   }
     351             :   surviving_new_large_objects_.clear();
     352       27814 : }
     353             : 
     354       52801 : void ScavengerCollector::MergeSurvivingNewLargeObjects(
     355             :     const SurvivingNewLargeObjectsMap& objects) {
     356       56793 :   for (SurvivingNewLargeObjectMapEntry object : objects) {
     357             :     bool success = surviving_new_large_objects_.insert(object).second;
     358             :     USE(success);
     359             :     DCHECK(success);
     360             :   }
     361       52801 : }
     362             : 
     363       27814 : int ScavengerCollector::NumberOfScavengeTasks() {
     364       27814 :   if (!FLAG_parallel_scavenge) return 1;
     365             :   const int num_scavenge_tasks =
     366       27394 :       static_cast<int>(heap_->new_space()->TotalCapacity()) / MB;
     367       27394 :   static int num_cores = V8::GetCurrentPlatform()->NumberOfWorkerThreads() + 1;
     368             :   int tasks =
     369       27394 :       Max(1, Min(Min(num_scavenge_tasks, kMaxScavengerTasks), num_cores));
     370       27394 :   if (!heap_->CanExpandOldGeneration(
     371       27394 :           static_cast<size_t>(tasks * Page::kPageSize))) {
     372             :     // Optimize for memory usage near the heap limit.
     373             :     tasks = 1;
     374             :   }
     375             :   return tasks;
     376             : }
     377             : 
     378       52801 : Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging,
     379             :                      CopiedList* copied_list, PromotionList* promotion_list,
     380             :                      EphemeronTableList* ephemeron_table_list, int task_id)
     381             :     : collector_(collector),
     382             :       heap_(heap),
     383             :       promotion_list_(promotion_list, task_id),
     384             :       copied_list_(copied_list, task_id),
     385             :       ephemeron_table_list_(ephemeron_table_list, task_id),
     386             :       local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity),
     387             :       copied_size_(0),
     388             :       promoted_size_(0),
     389             :       allocator_(heap),
     390             :       is_logging_(is_logging),
     391             :       is_incremental_marking_(heap->incremental_marking()->IsMarking()),
     392      264005 :       is_compacting_(heap->incremental_marking()->IsCompacting()) {}
     393             : 
     394    45229612 : void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map,
     395             :                                                  int size) {
     396             :   // We are not collecting slots on new space objects during mutation thus we
     397             :   // have to scan for pointers to evacuation candidates when we promote
     398             :   // objects. But we should not record any slots in non-black objects. Grey
     399             :   // object's slots would be rescanned. White object might not survive until
     400             :   // the end of collection it would be a violation of the invariant to record
     401             :   // its slots.
     402             :   const bool record_slots =
     403    45529314 :       is_compacting_ &&
     404             :       heap()->incremental_marking()->atomic_marking_state()->IsBlack(target);
     405             :   IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots);
     406             :   target->IterateBodyFast(map, size, &visitor);
     407    45275443 : }
     408             : 
     409         143 : void Scavenger::RememberPromotedEphemeron(EphemeronHashTable table, int entry) {
     410             :   auto indices =
     411         429 :       ephemeron_remembered_set_.insert({table, std::unordered_set<int>()});
     412             :   indices.first->second.insert(entry);
     413         143 : }
     414             : 
     415      178148 : void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) {
     416             :   AllocationSpace space = page->owner()->identity();
     417      305478 :   if ((space == OLD_SPACE) && !page->SweepingDone()) {
     418             :     heap()->mark_compact_collector()->sweeper()->AddPage(
     419             :         space, reinterpret_cast<Page*>(page),
     420         411 :         Sweeper::READD_TEMPORARY_REMOVED_PAGE);
     421             :   }
     422      178148 : }
     423             : 
     424      178250 : void Scavenger::ScavengePage(MemoryChunk* page) {
     425      178250 :   CodePageMemoryModificationScope memory_modification_scope(page);
     426             :   RememberedSet<OLD_TO_NEW>::Iterate(page,
     427    32743783 :                                      [this](MaybeObjectSlot addr) {
     428    32743783 :                                        return CheckAndScavengeObject(heap_,
     429             :                                                                      addr);
     430    32743783 :                                      },
     431      178226 :                                      SlotSet::KEEP_EMPTY_BUCKETS);
     432             :   RememberedSet<OLD_TO_NEW>::IterateTyped(
     433             :       page, [=](SlotType type, Address addr) {
     434      107736 :         return UpdateTypedSlotHelper::UpdateTypedSlot(
     435      215421 :             heap_, type, addr, [this](FullMaybeObjectSlot slot) {
     436             :               return CheckAndScavengeObject(heap(), slot);
     437      107685 :             });
     438      285929 :       });
     439             : 
     440      178174 :   AddPageToSweeperIfNecessary(page);
     441      178139 : }
     442             : 
     443      208855 : void Scavenger::Process(OneshotBarrier* barrier) {
     444             :   ScavengeVisitor scavenge_visitor(this);
     445             : 
     446             :   const bool have_barrier = barrier != nullptr;
     447             :   bool done;
     448             :   size_t objects = 0;
     449             :   do {
     450             :     done = true;
     451      241301 :     ObjectAndSize object_and_size;
     452   109821427 :     while (promotion_list_.ShouldEagerlyProcessPromotionList() &&
     453             :            copied_list_.Pop(&object_and_size)) {
     454             :       scavenge_visitor.Visit(object_and_size.first);
     455             :       done = false;
     456    54688284 :       if (have_barrier && ((++objects % kInterruptThreshold) == 0)) {
     457      421211 :         if (!copied_list_.IsGlobalPoolEmpty()) {
     458      136467 :           barrier->NotifyAll();
     459             :         }
     460             :       }
     461             :     }
     462             : 
     463             :     struct PromotionListEntry entry;
     464    90696944 :     while (promotion_list_.Pop(&entry)) {
     465    45251711 :       HeapObject target = entry.heap_object;
     466             :       DCHECK(!target->IsMap());
     467    45251711 :       IterateAndScavengePromotedObject(target, entry.map, entry.size);
     468             :       done = false;
     469    45261535 :       if (have_barrier && ((++objects % kInterruptThreshold) == 0)) {
     470      350168 :         if (!promotion_list_.IsGlobalPoolEmpty()) {
     471      117259 :           barrier->NotifyAll();
     472             :         }
     473             :       }
     474             :     }
     475      163241 :   } while (!done);
     476      130795 : }
     477             : 
     478       27814 : void ScavengerCollector::ProcessWeakReferences(
     479             :     EphemeronTableList* ephemeron_table_list) {
     480       27814 :   ScavengeWeakObjectRetainer weak_object_retainer;
     481       27814 :   heap_->ProcessYoungWeakReferences(&weak_object_retainer);
     482       27814 :   ClearYoungEphemerons(ephemeron_table_list);
     483       27814 :   ClearOldEphemerons();
     484       27814 : }
     485             : 
     486             : // Clear ephemeron entries from EphemeronHashTables in new-space whenever the
     487             : // entry has a dead new-space key.
     488       27814 : void ScavengerCollector::ClearYoungEphemerons(
     489             :     EphemeronTableList* ephemeron_table_list) {
     490       34646 :   ephemeron_table_list->Iterate([this](EphemeronHashTable table) {
     491      313614 :     for (int i = 0; i < table->Capacity(); i++) {
     492             :       // Keys in EphemeronHashTables must be heap objects.
     493             :       HeapObjectSlot key_slot(
     494             :           table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i)));
     495             :       HeapObject key = key_slot.ToHeapObject();
     496      139484 :       if (IsUnscavengedHeapObject(heap_, key)) {
     497          16 :         table->RemoveEntry(i);
     498             :       } else {
     499      139468 :         HeapObject forwarded = ForwardingAddress(key);
     500             :         key_slot.StoreHeapObject(forwarded);
     501             :       }
     502             :     }
     503       62460 :   });
     504             :   ephemeron_table_list->Clear();
     505       27814 : }
     506             : 
     507             : // Clear ephemeron entries from EphemeronHashTables in old-space whenever the
     508             : // entry has a dead new-space key.
     509       27814 : void ScavengerCollector::ClearOldEphemerons() {
     510       27843 :   for (auto it = heap_->ephemeron_remembered_set_.begin();
     511             :        it != heap_->ephemeron_remembered_set_.end();) {
     512          29 :     EphemeronHashTable table = it->first;
     513             :     auto& indices = it->second;
     514         177 :     for (auto iti = indices.begin(); iti != indices.end();) {
     515             :       // Keys in EphemeronHashTables must be heap objects.
     516             :       HeapObjectSlot key_slot(
     517         148 :           table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(*iti)));
     518             :       HeapObject key = key_slot.ToHeapObject();
     519         148 :       if (IsUnscavengedHeapObject(heap_, key)) {
     520           6 :         table->RemoveEntry(*iti);
     521             :         iti = indices.erase(iti);
     522             :       } else {
     523         142 :         HeapObject forwarded = ForwardingAddress(key);
     524             :         key_slot.StoreHeapObject(forwarded);
     525         142 :         if (!Heap::InYoungGeneration(forwarded)) {
     526             :           iti = indices.erase(iti);
     527             :         } else {
     528             :           ++iti;
     529             :         }
     530             :       }
     531             :     }
     532             : 
     533          29 :     if (indices.size() == 0) {
     534          24 :       it = heap_->ephemeron_remembered_set_.erase(it);
     535             :     } else {
     536             :       ++it;
     537             :     }
     538             :   }
     539       27814 : }
     540             : 
     541       52801 : void Scavenger::Finalize() {
     542       52801 :   heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_);
     543       52801 :   heap()->IncrementSemiSpaceCopiedObjectSize(copied_size_);
     544       52801 :   heap()->IncrementPromotedObjectsSize(promoted_size_);
     545       52801 :   collector_->MergeSurvivingNewLargeObjects(surviving_new_large_objects_);
     546       52801 :   allocator_.Finalize();
     547             :   ephemeron_table_list_.FlushToGlobal();
     548       52825 :   for (auto it = ephemeron_remembered_set_.begin();
     549             :        it != ephemeron_remembered_set_.end(); ++it) {
     550             :     auto insert_result = heap()->ephemeron_remembered_set_.insert(
     551          72 :         {it->first, std::unordered_set<int>()});
     552         167 :     for (int entry : it->second) {
     553             :       insert_result.first->second.insert(entry);
     554             :     }
     555             :   }
     556       52801 : }
     557             : 
     558           0 : void Scavenger::AddEphemeronHashTable(EphemeronHashTable table) {
     559             :   ephemeron_table_list_.Push(table);
     560           0 : }
     561             : 
     562    44958042 : void RootScavengeVisitor::VisitRootPointer(Root root, const char* description,
     563             :                                            FullObjectSlot p) {
     564             :   DCHECK(!HasWeakHeapObjectTag(*p));
     565    44958042 :   ScavengePointer(p);
     566    44958042 : }
     567             : 
     568     2393214 : void RootScavengeVisitor::VisitRootPointers(Root root, const char* description,
     569             :                                             FullObjectSlot start,
     570             :                                             FullObjectSlot end) {
     571             :   // Copy all HeapObject pointers in [start, end)
     572    59562205 :   for (FullObjectSlot p = start; p < end; ++p) ScavengePointer(p);
     573     2393214 : }
     574             : 
     575    99733819 : void RootScavengeVisitor::ScavengePointer(FullObjectSlot p) {
     576             :   Object object = *p;
     577             :   DCHECK(!HasWeakHeapObjectTag(object));
     578    99733819 :   if (Heap::InYoungGeneration(object)) {
     579     9462346 :     scavenger_->ScavengeObject(FullHeapObjectSlot(p), HeapObject::cast(object));
     580             :   }
     581    99733819 : }
     582             : 
     583           0 : RootScavengeVisitor::RootScavengeVisitor(Scavenger* scavenger)
     584       27814 :     : scavenger_(scavenger) {}
     585             : 
     586           0 : ScavengeVisitor::ScavengeVisitor(Scavenger* scavenger)
     587      208855 :     : scavenger_(scavenger) {}
     588             : 
     589             : }  // namespace internal
     590      121996 : }  // namespace v8

Generated by: LCOV version 1.10