LCOV - code coverage report
Current view: top level - src/heap - scavenger.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 216 233 92.7 %
Date: 2019-04-18 Functions: 30 41 73.2 %

          Line data    Source code
       1             : // Copyright 2015 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/heap/scavenger.h"
       6             : 
       7             : #include "src/heap/array-buffer-collector.h"
       8             : #include "src/heap/barrier.h"
       9             : #include "src/heap/gc-tracer.h"
      10             : #include "src/heap/heap-inl.h"
      11             : #include "src/heap/item-parallel-job.h"
      12             : #include "src/heap/mark-compact-inl.h"
      13             : #include "src/heap/objects-visiting-inl.h"
      14             : #include "src/heap/scavenger-inl.h"
      15             : #include "src/heap/sweeper.h"
      16             : #include "src/objects-body-descriptors-inl.h"
      17             : #include "src/objects/data-handler-inl.h"
      18             : #include "src/objects/embedder-data-array-inl.h"
      19             : #include "src/transitions-inl.h"
      20             : #include "src/utils-inl.h"
      21             : 
      22             : namespace v8 {
      23             : namespace internal {
      24             : 
      25             : class PageScavengingItem final : public ItemParallelJob::Item {
      26             :  public:
      27      184692 :   explicit PageScavengingItem(MemoryChunk* chunk) : chunk_(chunk) {}
      28      369384 :   ~PageScavengingItem() override = default;
      29             : 
      30      184558 :   void Process(Scavenger* scavenger) { scavenger->ScavengePage(chunk_); }
      31             : 
      32             :  private:
      33             :   MemoryChunk* const chunk_;
      34             : };
      35             : 
      36      113212 : class ScavengingTask final : public ItemParallelJob::Task {
      37             :  public:
      38             :   ScavengingTask(Heap* heap, Scavenger* scavenger, OneshotBarrier* barrier)
      39             :       : ItemParallelJob::Task(heap->isolate()),
      40             :         heap_(heap),
      41             :         scavenger_(scavenger),
      42       56662 :         barrier_(barrier) {}
      43             : 
      44       54952 :   void RunInParallel() final {
      45      274805 :     TRACE_BACKGROUND_GC(
      46             :         heap_->tracer(),
      47             :         GCTracer::BackgroundScope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL);
      48       54951 :     double scavenging_time = 0.0;
      49             :     {
      50       54951 :       barrier_->Start();
      51             :       TimedScope scope(&scavenging_time);
      52             :       PageScavengingItem* item = nullptr;
      53      424101 :       while ((item = GetItem<PageScavengingItem>()) != nullptr) {
      54      184558 :         item->Process(scavenger_);
      55      184357 :         item->MarkFinished();
      56             :       }
      57       56496 :       do {
      58       56447 :         scavenger_->Process(barrier_);
      59       56314 :       } while (!barrier_->Wait());
      60       55023 :       scavenger_->Process();
      61             :     }
      62       54999 :     if (FLAG_trace_parallel_scavenge) {
      63           0 :       PrintIsolate(heap_->isolate(),
      64             :                    "scavenge[%p]: time=%.2f copied=%zu promoted=%zu\n",
      65             :                    static_cast<void*>(this), scavenging_time,
      66           0 :                    scavenger_->bytes_copied(), scavenger_->bytes_promoted());
      67             :     }
      68       55029 :   }
      69             : 
      70             :  private:
      71             :   Heap* const heap_;
      72             :   Scavenger* const scavenger_;
      73             :   OneshotBarrier* const barrier_;
      74             : };
      75             : 
      76    44825990 : class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
      77             :  public:
      78             :   IterateAndScavengePromotedObjectsVisitor(Scavenger* scavenger,
      79             :                                            bool record_slots)
      80    44765463 :       : scavenger_(scavenger), record_slots_(record_slots) {}
      81             : 
      82      376219 :   V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
      83             :                                ObjectSlot end) final {
      84             :     VisitPointersImpl(host, start, end);
      85      376276 :   }
      86             : 
      87           8 :   V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
      88             :                                MaybeObjectSlot end) final {
      89             :     VisitPointersImpl(host, start, end);
      90           8 :   }
      91             : 
      92           0 :   V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final {
      93           0 :     Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
      94           0 :     HandleSlot(host, FullHeapObjectSlot(&target), target);
      95           0 :   }
      96           0 :   V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
      97           0 :     HeapObject heap_object = rinfo->target_object();
      98           0 :     HandleSlot(host, FullHeapObjectSlot(&heap_object), heap_object);
      99           0 :   }
     100             : 
     101      156714 :   inline void VisitEphemeron(HeapObject obj, int entry, ObjectSlot key,
     102             :                              ObjectSlot value) override {
     103             :     DCHECK(Heap::IsLargeObject(obj) || obj->IsEphemeronHashTable());
     104      156714 :     VisitPointer(obj, value);
     105             : 
     106      156713 :     if (ObjectInYoungGeneration(*key)) {
     107             :       // We cannot check the map here, as it might be a large object.
     108         143 :       scavenger_->RememberPromotedEphemeron(
     109         143 :           EphemeronHashTable::unchecked_cast(obj), entry);
     110             :     } else {
     111      156570 :       VisitPointer(obj, key);
     112             :     }
     113      156712 :   }
     114             : 
     115             :  private:
     116             :   template <typename TSlot>
     117             :   V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
     118             :     using THeapObjectSlot = typename TSlot::THeapObjectSlot;
     119             :     // Treat weak references as strong.
     120             :     // TODO(marja): Proper weakness handling in the young generation.
     121  2202913407 :     for (TSlot slot = start; slot < end; ++slot) {
     122      376349 :       typename TSlot::TObject object = *slot;
     123      376331 :       HeapObject heap_object;
     124  2157616330 :       if (object.GetHeapObject(&heap_object)) {
     125      628174 :         HandleSlot(host, THeapObjectSlot(slot), heap_object);
     126             :       }
     127             :     }
     128             :   }
     129             : 
     130             :   template <typename THeapObjectSlot>
     131             :   V8_INLINE void HandleSlot(HeapObject host, THeapObjectSlot slot,
     132             :                             HeapObject target) {
     133             :     static_assert(
     134             :         std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
     135             :             std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
     136             :         "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
     137      314087 :     scavenger_->PageMemoryFence(MaybeObject::FromObject(target));
     138             : 
     139  1358733077 :     if (Heap::InFromPage(target)) {
     140    53378962 :       SlotCallbackResult result = scavenger_->ScavengeObject(slot, target);
     141         935 :       bool success = (*slot)->GetHeapObject(&target);
     142             :       USE(success);
     143             :       DCHECK(success);
     144             : 
     145    53367350 :       if (result == KEEP_SLOT) {
     146             :         SLOW_DCHECK(target->IsHeapObject());
     147     7478543 :         RememberedSet<OLD_TO_NEW>::Insert(MemoryChunk::FromHeapObject(host),
     148             :                                           slot.address());
     149             :       }
     150             :       SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
     151             :           HeapObject::cast(target)));
     152  1313911792 :     } else if (record_slots_ && MarkCompactCollector::IsOnEvacuationCandidate(
     153             :                                     HeapObject::cast(target))) {
     154             :       // We should never try to record off-heap slots.
     155             :       DCHECK((std::is_same<THeapObjectSlot, HeapObjectSlot>::value));
     156             :       // We cannot call MarkCompactCollector::RecordSlot because that checks
     157             :       // that the host page is not in young generation, which does not hold
     158             :       // for pending large pages.
     159          16 :       RememberedSet<OLD_TO_OLD>::Insert(MemoryChunk::FromHeapObject(host),
     160             :                                         slot.address());
     161             :     }
     162             :   }
     163             : 
     164             :   Scavenger* const scavenger_;
     165             :   const bool record_slots_;
     166             : };
     167             : 
     168             : namespace {
     169             : 
     170             : V8_INLINE bool IsUnscavengedHeapObject(Heap* heap, Object object) {
     171      137143 :   return Heap::InFromPage(object) &&
     172       68564 :          !HeapObject::cast(object)->map_word().IsForwardingAddress();
     173             : }
     174             : 
     175             : // Same as IsUnscavengedHeapObject() above but specialized for HeapObjects.
     176             : V8_INLINE bool IsUnscavengedHeapObject(Heap* heap, HeapObject heap_object) {
     177      140276 :   return Heap::InFromPage(heap_object) &&
     178             :          !heap_object->map_word().IsForwardingAddress();
     179             : }
     180             : 
     181       68579 : bool IsUnscavengedHeapObjectSlot(Heap* heap, FullObjectSlot p) {
     182       68579 :   return IsUnscavengedHeapObject(heap, *p);
     183             : }
     184             : 
     185             : }  // namespace
     186             : 
     187       29178 : class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
     188             :  public:
     189       65089 :   Object RetainAs(Object object) override {
     190       65089 :     if (!Heap::InFromPage(object)) {
     191       65089 :       return object;
     192             :     }
     193             : 
     194             :     MapWord map_word = HeapObject::cast(object)->map_word();
     195           0 :     if (map_word.IsForwardingAddress()) {
     196           0 :       return map_word.ToForwardingAddress();
     197             :     }
     198           0 :     return Object();
     199             :   }
     200             : };
     201             : 
     202       62442 : ScavengerCollector::ScavengerCollector(Heap* heap)
     203      124884 :     : isolate_(heap->isolate()), heap_(heap), parallel_scavenge_semaphore_(0) {}
     204             : 
     205       29178 : void ScavengerCollector::CollectGarbage() {
     206             :   DCHECK(surviving_new_large_objects_.empty());
     207       29178 :   ItemParallelJob job(isolate_->cancelable_task_manager(),
     208       87534 :                       &parallel_scavenge_semaphore_);
     209             :   const int kMainThreadId = 0;
     210             :   Scavenger* scavengers[kMaxScavengerTasks];
     211       29178 :   const bool is_logging = isolate_->LogObjectRelocation();
     212       29178 :   const int num_scavenge_tasks = NumberOfScavengeTasks();
     213             :   OneshotBarrier barrier(base::TimeDelta::FromMilliseconds(kMaxWaitTimeMs));
     214       58356 :   Scavenger::CopiedList copied_list(num_scavenge_tasks);
     215             :   Scavenger::PromotionList promotion_list(num_scavenge_tasks);
     216       58356 :   EphemeronTableList ephemeron_table_list(num_scavenge_tasks);
     217      142502 :   for (int i = 0; i < num_scavenge_tasks; i++) {
     218       56662 :     scavengers[i] = new Scavenger(this, heap_, is_logging, &copied_list,
     219      113324 :                                   &promotion_list, &ephemeron_table_list, i);
     220      113324 :     job.AddTask(new ScavengingTask(heap_, scavengers[i], &barrier));
     221             :   }
     222             : 
     223             :   {
     224       29178 :     Sweeper* sweeper = heap_->mark_compact_collector()->sweeper();
     225             :     // Pause the concurrent sweeper.
     226       58356 :     Sweeper::PauseOrCompleteScope pause_scope(sweeper);
     227             :     // Filter out pages from the sweeper that need to be processed for old to
     228             :     // new slots by the Scavenger. After processing, the Scavenger adds back
     229             :     // pages that are still unsweeped. This way the Scavenger has exclusive
     230             :     // access to the slots of a page and can completely avoid any locks on
     231             :     // the page itself.
     232       58356 :     Sweeper::FilterSweepingPagesScope filter_scope(sweeper, pause_scope);
     233             :     filter_scope.FilterOldSpaceSweepingPages(
     234       30121 :         [](Page* page) { return !page->ContainsSlots<OLD_TO_NEW>(); });
     235             :     RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(
     236      398562 :         heap_, [&job](MemoryChunk* chunk) {
     237      184692 :           job.AddItem(new PageScavengingItem(chunk));
     238      243048 :         });
     239             : 
     240       29178 :     RootScavengeVisitor root_scavenge_visitor(scavengers[kMainThreadId]);
     241             : 
     242             :     {
     243             :       // Identify weak unmodified handles. Requires an unmodified graph.
     244      145890 :       TRACE_GC(
     245             :           heap_->tracer(),
     246             :           GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY);
     247       29178 :       isolate_->global_handles()->IdentifyWeakUnmodifiedObjects(
     248       29178 :           &JSObject::IsUnmodifiedApiObject);
     249             :     }
     250             :     {
     251             :       // Copy roots.
     252      145890 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_ROOTS);
     253       29178 :       heap_->IterateRoots(&root_scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
     254             :     }
     255             :     {
     256             :       // Parallel phase scavenging all copied and promoted objects.
     257      145890 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL);
     258       29178 :       job.Run();
     259             :       DCHECK(copied_list.IsEmpty());
     260             :       DCHECK(promotion_list.IsEmpty());
     261             :     }
     262             :     {
     263             :       // Scavenge weak global handles.
     264      145890 :       TRACE_GC(heap_->tracer(),
     265             :                GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS);
     266       29178 :       isolate_->global_handles()->MarkYoungWeakUnmodifiedObjectsPending(
     267       29178 :           &IsUnscavengedHeapObjectSlot);
     268       29178 :       isolate_->global_handles()->IterateYoungWeakUnmodifiedRootsForFinalizers(
     269       29178 :           &root_scavenge_visitor);
     270       29178 :       scavengers[kMainThreadId]->Process();
     271             : 
     272             :       DCHECK(copied_list.IsEmpty());
     273             :       DCHECK(promotion_list.IsEmpty());
     274       29178 :       isolate_->global_handles()
     275             :           ->IterateYoungWeakUnmodifiedRootsForPhantomHandles(
     276       29178 :               &root_scavenge_visitor, &IsUnscavengedHeapObjectSlot);
     277             :     }
     278             : 
     279             :     {
     280             :       // Finalize parallel scavenging.
     281      145890 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_FINALIZE);
     282             : 
     283             :       DCHECK(surviving_new_large_objects_.empty());
     284             : 
     285      142502 :       for (int i = 0; i < num_scavenge_tasks; i++) {
     286       56662 :         scavengers[i]->Finalize();
     287       56662 :         delete scavengers[i];
     288             :       }
     289             : 
     290       29178 :       HandleSurvivingNewLargeObjects();
     291             :     }
     292             :   }
     293             : 
     294             :   {
     295             :     // Update references into new space
     296      145890 :     TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_UPDATE_REFS);
     297       29178 :     heap_->UpdateYoungReferencesInExternalStringTable(
     298       29178 :         &Heap::UpdateYoungReferenceInExternalStringTableEntry);
     299             : 
     300       58356 :     heap_->incremental_marking()->UpdateMarkingWorklistAfterScavenge();
     301             :   }
     302             : 
     303       29178 :   if (FLAG_concurrent_marking) {
     304             :     // Ensure that concurrent marker does not track pages that are
     305             :     // going to be unmapped.
     306      289079 :     for (Page* p :
     307       28311 :          PageRange(heap_->new_space()->from_space().first_page(), nullptr)) {
     308      521536 :       heap_->concurrent_marking()->ClearMemoryChunkData(p);
     309             :     }
     310             :   }
     311             : 
     312       29178 :   ProcessWeakReferences(&ephemeron_table_list);
     313             : 
     314             :   // Set age mark.
     315       29178 :   heap_->new_space_->set_age_mark(heap_->new_space()->top());
     316             : 
     317             :   {
     318      145890 :     TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_PROCESS_ARRAY_BUFFERS);
     319       29178 :     ArrayBufferTracker::PrepareToFreeDeadInNewSpace(heap_);
     320             :   }
     321       58356 :   heap_->array_buffer_collector()->FreeAllocations();
     322             : 
     323             :   // Since we promote all surviving large objects immediatelly, all remaining
     324             :   // large objects must be dead.
     325             :   // TODO(hpayer): Don't free all as soon as we have an intermediate generation.
     326       58356 :   heap_->new_lo_space()->FreeDeadObjects([](HeapObject) { return true; });
     327             : 
     328      215108 :   RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(heap_, [](MemoryChunk* chunk) {
     329      185930 :     if (chunk->SweepingDone()) {
     330      184984 :       RememberedSet<OLD_TO_NEW>::FreeEmptyBuckets(chunk);
     331             :     } else {
     332         946 :       RememberedSet<OLD_TO_NEW>::PreFreeEmptyBuckets(chunk);
     333             :     }
     334      215108 :   });
     335             : 
     336             :   // Update how much has survived scavenge.
     337       29178 :   heap_->IncrementYoungSurvivorsCounter(heap_->SurvivedYoungObjectSize());
     338       29178 : }
     339             : 
     340       29178 : void ScavengerCollector::HandleSurvivingNewLargeObjects() {
     341       33177 :   for (SurvivingNewLargeObjectMapEntry update_info :
     342             :        surviving_new_large_objects_) {
     343             :     HeapObject object = update_info.first;
     344             :     Map map = update_info.second;
     345             :     // Order is important here. We have to re-install the map to have access
     346             :     // to meta-data like size during page promotion.
     347             :     object->set_map_word(MapWord::FromMap(map));
     348             :     LargePage* page = LargePage::FromHeapObject(object);
     349        3999 :     heap_->lo_space()->PromoteNewLargeObject(page);
     350             :   }
     351             :   surviving_new_large_objects_.clear();
     352       29178 : }
     353             : 
     354       56662 : void ScavengerCollector::MergeSurvivingNewLargeObjects(
     355             :     const SurvivingNewLargeObjectsMap& objects) {
     356       60661 :   for (SurvivingNewLargeObjectMapEntry object : objects) {
     357             :     bool success = surviving_new_large_objects_.insert(object).second;
     358             :     USE(success);
     359             :     DCHECK(success);
     360             :   }
     361       56662 : }
     362             : 
     363       29178 : int ScavengerCollector::NumberOfScavengeTasks() {
     364       29178 :   if (!FLAG_parallel_scavenge) return 1;
     365             :   const int num_scavenge_tasks =
     366       28758 :       static_cast<int>(heap_->new_space()->TotalCapacity()) / MB;
     367       28758 :   static int num_cores = V8::GetCurrentPlatform()->NumberOfWorkerThreads() + 1;
     368             :   int tasks =
     369       28758 :       Max(1, Min(Min(num_scavenge_tasks, kMaxScavengerTasks), num_cores));
     370       28758 :   if (!heap_->CanExpandOldGeneration(
     371       28758 :           static_cast<size_t>(tasks * Page::kPageSize))) {
     372             :     // Optimize for memory usage near the heap limit.
     373             :     tasks = 1;
     374             :   }
     375             :   return tasks;
     376             : }
     377             : 
     378       56662 : Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging,
     379             :                      CopiedList* copied_list, PromotionList* promotion_list,
     380             :                      EphemeronTableList* ephemeron_table_list, int task_id)
     381             :     : collector_(collector),
     382             :       heap_(heap),
     383             :       promotion_list_(promotion_list, task_id),
     384             :       copied_list_(copied_list, task_id),
     385             :       ephemeron_table_list_(ephemeron_table_list, task_id),
     386             :       local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity),
     387             :       copied_size_(0),
     388             :       promoted_size_(0),
     389             :       allocator_(heap),
     390             :       is_logging_(is_logging),
     391             :       is_incremental_marking_(heap->incremental_marking()->IsMarking()),
     392      283310 :       is_compacting_(heap->incremental_marking()->IsCompacting()) {}
     393             : 
     394    44765463 : void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map,
     395             :                                                  int size) {
     396             :   // We are not collecting slots on new space objects during mutation thus we
     397             :   // have to scan for pointers to evacuation candidates when we promote
     398             :   // objects. But we should not record any slots in non-black objects. Grey
     399             :   // object's slots would be rescanned. White object might not survive until
     400             :   // the end of collection it would be a violation of the invariant to record
     401             :   // its slots.
     402             :   const bool record_slots =
     403    45059188 :       is_compacting_ &&
     404             :       heap()->incremental_marking()->atomic_marking_state()->IsBlack(target);
     405             :   IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots);
     406             :   target->IterateBodyFast(map, size, &visitor);
     407    44825990 : }
     408             : 
     409         143 : void Scavenger::RememberPromotedEphemeron(EphemeronHashTable table, int entry) {
     410             :   auto indices =
     411         429 :       ephemeron_remembered_set_.insert({table, std::unordered_set<int>()});
     412             :   indices.first->second.insert(entry);
     413         143 : }
     414             : 
     415      184394 : void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) {
     416             :   AllocationSpace space = page->owner()->identity();
     417      316394 :   if ((space == OLD_SPACE) && !page->SweepingDone()) {
     418             :     heap()->mark_compact_collector()->sweeper()->AddPage(
     419             :         space, reinterpret_cast<Page*>(page),
     420         374 :         Sweeper::READD_TEMPORARY_REMOVED_PAGE);
     421             :   }
     422      184394 : }
     423             : 
     424      184440 : void Scavenger::ScavengePage(MemoryChunk* page) {
     425      184440 :   CodePageMemoryModificationScope memory_modification_scope(page);
     426             :   RememberedSet<OLD_TO_NEW>::Iterate(page,
     427    32647500 :                                      [this](MaybeObjectSlot addr) {
     428    32647500 :                                        return CheckAndScavengeObject(heap_,
     429             :                                                                      addr);
     430    32647500 :                                      },
     431      184412 :                                      SlotSet::KEEP_EMPTY_BUCKETS);
     432             :   RememberedSet<OLD_TO_NEW>::IterateTyped(
     433             :       page, [=](SlotType type, Address addr) {
     434      108089 :         return UpdateTypedSlotHelper::UpdateTypedSlot(
     435      216188 :             heap_, type, addr, [this](FullMaybeObjectSlot slot) {
     436             :               return CheckAndScavengeObject(heap(), slot);
     437      108099 :             });
     438      292512 :       });
     439             : 
     440      184410 :   AddPageToSweeperIfNecessary(page);
     441      184380 : }
     442             : 
     443      184872 : void Scavenger::Process(OneshotBarrier* barrier) {
     444             :   ScavengeVisitor scavenge_visitor(this);
     445             : 
     446             :   const bool have_barrier = barrier != nullptr;
     447             :   bool done;
     448             :   size_t objects = 0;
     449             :   do {
     450             :     done = true;
     451      219927 :     ObjectAndSize object_and_size;
     452   110136679 :     while (promotion_list_.ShouldEagerlyProcessPromotionList() &&
     453             :            copied_list_.Pop(&object_and_size)) {
     454             :       scavenge_visitor.Visit(object_and_size.first);
     455             :       done = false;
     456    54854291 :       if (have_barrier && ((++objects % kInterruptThreshold) == 0)) {
     457      415957 :         if (!copied_list_.IsGlobalPoolEmpty()) {
     458      132448 :           barrier->NotifyAll();
     459             :         }
     460             :       }
     461             :     }
     462             : 
     463             :     struct PromotionListEntry entry;
     464    89792758 :     while (promotion_list_.Pop(&entry)) {
     465    44792961 :       HeapObject target = entry.heap_object;
     466             :       DCHECK(!target->IsMap());
     467    44792961 :       IterateAndScavengePromotedObject(target, entry.map, entry.size);
     468             :       done = false;
     469    44808806 :       if (have_barrier && ((++objects % kInterruptThreshold) == 0)) {
     470      347293 :         if (!promotion_list_.IsGlobalPoolEmpty()) {
     471      114709 :           barrier->NotifyAll();
     472             :         }
     473             :       }
     474             :     }
     475      175471 :   } while (!done);
     476      140416 : }
     477             : 
     478       29178 : void ScavengerCollector::ProcessWeakReferences(
     479             :     EphemeronTableList* ephemeron_table_list) {
     480       29178 :   ScavengeWeakObjectRetainer weak_object_retainer;
     481       29178 :   heap_->ProcessYoungWeakReferences(&weak_object_retainer);
     482       29178 :   ClearYoungEphemerons(ephemeron_table_list);
     483       29178 :   ClearOldEphemerons();
     484       29178 : }
     485             : 
     486             : // Clear ephemeron entries from EphemeronHashTables in new-space whenever the
     487             : // entry has a dead new-space key.
     488       29178 : void ScavengerCollector::ClearYoungEphemerons(
     489             :     EphemeronTableList* ephemeron_table_list) {
     490       34690 :   ephemeron_table_list->Iterate([this](EphemeronHashTable table) {
     491      314010 :     for (int i = 0; i < table->Capacity(); i++) {
     492             :       // Keys in EphemeronHashTables must be heap objects.
     493             :       HeapObjectSlot key_slot(
     494             :           table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i)));
     495             :       HeapObject key = key_slot.ToHeapObject();
     496      139660 :       if (IsUnscavengedHeapObject(heap_, key)) {
     497          16 :         table->RemoveEntry(i);
     498             :       } else {
     499      139644 :         HeapObject forwarded = ForwardingAddress(key);
     500             :         key_slot.StoreHeapObject(forwarded);
     501             :       }
     502             :     }
     503       63868 :   });
     504             :   ephemeron_table_list->Clear();
     505       29178 : }
     506             : 
     507             : // Clear ephemeron entries from EphemeronHashTables in old-space whenever the
     508             : // entry has a dead new-space key.
     509       29178 : void ScavengerCollector::ClearOldEphemerons() {
     510       29207 :   for (auto it = heap_->ephemeron_remembered_set_.begin();
     511             :        it != heap_->ephemeron_remembered_set_.end();) {
     512          29 :     EphemeronHashTable table = it->first;
     513             :     auto& indices = it->second;
     514         177 :     for (auto iti = indices.begin(); iti != indices.end();) {
     515             :       // Keys in EphemeronHashTables must be heap objects.
     516             :       HeapObjectSlot key_slot(
     517         148 :           table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(*iti)));
     518             :       HeapObject key = key_slot.ToHeapObject();
     519         148 :       if (IsUnscavengedHeapObject(heap_, key)) {
     520           6 :         table->RemoveEntry(*iti);
     521             :         iti = indices.erase(iti);
     522             :       } else {
     523         142 :         HeapObject forwarded = ForwardingAddress(key);
     524             :         key_slot.StoreHeapObject(forwarded);
     525         142 :         if (!Heap::InYoungGeneration(forwarded)) {
     526             :           iti = indices.erase(iti);
     527             :         } else {
     528             :           ++iti;
     529             :         }
     530             :       }
     531             :     }
     532             : 
     533          29 :     if (indices.size() == 0) {
     534          24 :       it = heap_->ephemeron_remembered_set_.erase(it);
     535             :     } else {
     536             :       ++it;
     537             :     }
     538             :   }
     539       29178 : }
     540             : 
     541       56662 : void Scavenger::Finalize() {
     542       56662 :   heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_);
     543       56662 :   heap()->IncrementSemiSpaceCopiedObjectSize(copied_size_);
     544       56662 :   heap()->IncrementPromotedObjectsSize(promoted_size_);
     545       56662 :   collector_->MergeSurvivingNewLargeObjects(surviving_new_large_objects_);
     546       56662 :   allocator_.Finalize();
     547             :   ephemeron_table_list_.FlushToGlobal();
     548       56686 :   for (auto it = ephemeron_remembered_set_.begin();
     549             :        it != ephemeron_remembered_set_.end(); ++it) {
     550             :     auto insert_result = heap()->ephemeron_remembered_set_.insert(
     551          72 :         {it->first, std::unordered_set<int>()});
     552         167 :     for (int entry : it->second) {
     553             :       insert_result.first->second.insert(entry);
     554             :     }
     555             :   }
     556       56662 : }
     557             : 
     558           0 : void Scavenger::AddEphemeronHashTable(EphemeronHashTable table) {
     559             :   ephemeron_table_list_.Push(table);
     560           0 : }
     561             : 
     562    47051781 : void RootScavengeVisitor::VisitRootPointer(Root root, const char* description,
     563             :                                            FullObjectSlot p) {
     564             :   DCHECK(!HasWeakHeapObjectTag(*p));
     565    47051781 :   ScavengePointer(p);
     566    47051781 : }
     567             : 
     568     2424797 : void RootScavengeVisitor::VisitRootPointers(Root root, const char* description,
     569             :                                             FullObjectSlot start,
     570             :                                             FullObjectSlot end) {
     571             :   // Copy all HeapObject pointers in [start, end)
     572    59631174 :   for (FullObjectSlot p = start; p < end; ++p) ScavengePointer(p);
     573     2424797 : }
     574             : 
     575   101833361 : void RootScavengeVisitor::ScavengePointer(FullObjectSlot p) {
     576             :   Object object = *p;
     577             :   DCHECK(!HasWeakHeapObjectTag(object));
     578   101833361 :   if (Heap::InYoungGeneration(object)) {
     579     9360776 :     scavenger_->ScavengeObject(FullHeapObjectSlot(p), HeapObject::cast(object));
     580             :   }
     581   101833361 : }
     582             : 
     583           0 : RootScavengeVisitor::RootScavengeVisitor(Scavenger* scavenger)
     584       29178 :     : scavenger_(scavenger) {}
     585             : 
     586           0 : ScavengeVisitor::ScavengeVisitor(Scavenger* scavenger)
     587      184872 :     : scavenger_(scavenger) {}
     588             : 
     589             : }  // namespace internal
     590      122036 : }  // namespace v8

Generated by: LCOV version 1.10