LCOV - code coverage report
Current view: top level - src/heap - scavenger.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 191 208 91.8 %
Date: 2019-03-21 Functions: 27 38 71.1 %

          Line data    Source code
       1             : // Copyright 2015 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/heap/scavenger.h"
       6             : 
       7             : #include "src/heap/array-buffer-collector.h"
       8             : #include "src/heap/barrier.h"
       9             : #include "src/heap/gc-tracer.h"
      10             : #include "src/heap/heap-inl.h"
      11             : #include "src/heap/item-parallel-job.h"
      12             : #include "src/heap/mark-compact-inl.h"
      13             : #include "src/heap/objects-visiting-inl.h"
      14             : #include "src/heap/scavenger-inl.h"
      15             : #include "src/heap/sweeper.h"
      16             : #include "src/objects-body-descriptors-inl.h"
      17             : #include "src/objects/data-handler-inl.h"
      18             : #include "src/objects/embedder-data-array-inl.h"
      19             : #include "src/transitions-inl.h"
      20             : #include "src/utils-inl.h"
      21             : 
      22             : namespace v8 {
      23             : namespace internal {
      24             : 
      25             : class PageScavengingItem final : public ItemParallelJob::Item {
      26             :  public:
      27      108006 :   explicit PageScavengingItem(MemoryChunk* chunk) : chunk_(chunk) {}
      28      216012 :   ~PageScavengingItem() override = default;
      29             : 
      30      107992 :   void Process(Scavenger* scavenger) { scavenger->ScavengePage(chunk_); }
      31             : 
      32             :  private:
      33             :   MemoryChunk* const chunk_;
      34             : };
      35             : 
      36       67814 : class ScavengingTask final : public ItemParallelJob::Task {
      37             :  public:
      38             :   ScavengingTask(Heap* heap, Scavenger* scavenger, OneshotBarrier* barrier)
      39             :       : ItemParallelJob::Task(heap->isolate()),
      40             :         heap_(heap),
      41             :         scavenger_(scavenger),
      42       33934 :         barrier_(barrier) {}
      43             : 
      44       33462 :   void RunInParallel() final {
      45      167287 :     TRACE_BACKGROUND_GC(
      46             :         heap_->tracer(),
      47             :         GCTracer::BackgroundScope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL);
      48       33442 :     double scavenging_time = 0.0;
      49             :     {
      50       33442 :       barrier_->Start();
      51             :       TimedScope scope(&scavenging_time);
      52             :       PageScavengingItem* item = nullptr;
      53      249458 :       while ((item = GetItem<PageScavengingItem>()) != nullptr) {
      54      107992 :         item->Process(scavenger_);
      55      107972 :         item->MarkFinished();
      56             :       }
      57       34267 :       do {
      58       34247 :         scavenger_->Process(barrier_);
      59       34190 :       } while (!barrier_->Wait());
      60       33490 :       scavenger_->Process();
      61             :     }
      62       33478 :     if (FLAG_trace_parallel_scavenge) {
      63           0 :       PrintIsolate(heap_->isolate(),
      64             :                    "scavenge[%p]: time=%.2f copied=%zu promoted=%zu\n",
      65             :                    static_cast<void*>(this), scavenging_time,
      66           0 :                    scavenger_->bytes_copied(), scavenger_->bytes_promoted());
      67             :     }
      68       33489 :   }
      69             : 
      70             :  private:
      71             :   Heap* const heap_;
      72             :   Scavenger* const scavenger_;
      73             :   OneshotBarrier* const barrier_;
      74             : };
      75             : 
      76    35031930 : class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
      77             :  public:
      78             :   IterateAndScavengePromotedObjectsVisitor(Scavenger* scavenger,
      79             :                                            bool record_slots)
      80    35009740 :       : scavenger_(scavenger), record_slots_(record_slots) {}
      81             : 
      82      143484 :   V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
      83             :                                ObjectSlot end) final {
      84             :     VisitPointersImpl(host, start, end);
      85      143321 :   }
      86             : 
      87           8 :   V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
      88             :                                MaybeObjectSlot end) final {
      89             :     VisitPointersImpl(host, start, end);
      90           8 :   }
      91             : 
      92           0 :   V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final {
      93           0 :     Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
      94           0 :     HandleSlot(host, FullHeapObjectSlot(&target), target);
      95           0 :   }
      96           0 :   V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
      97           0 :     HeapObject heap_object = rinfo->target_object();
      98           0 :     HandleSlot(host, FullHeapObjectSlot(&heap_object), heap_object);
      99           0 :   }
     100             : 
     101             :  private:
     102             :   template <typename TSlot>
     103             :   V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
     104             :     using THeapObjectSlot = typename TSlot::THeapObjectSlot;
     105             :     // Treat weak references as strong.
     106             :     // TODO(marja): Proper weakness handling in the young generation.
     107  2446136118 :     for (TSlot slot = start; slot < end; ++slot) {
     108      143550 :       typename TSlot::TObject object = *slot;
     109      143604 :       HeapObject heap_object;
     110  2410859653 :       if (object.GetHeapObject(&heap_object)) {
     111       29854 :         HandleSlot(host, THeapObjectSlot(slot), heap_object);
     112             :       }
     113             :     }
     114             :   }
     115             : 
     116             :   template <typename THeapObjectSlot>
     117             :   V8_INLINE void HandleSlot(HeapObject host, THeapObjectSlot slot,
     118             :                             HeapObject target) {
     119             :     static_assert(
     120             :         std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
     121             :             std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
     122             :         "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
     123       14927 :     scavenger_->PageMemoryFence(MaybeObject::FromObject(target));
     124             : 
     125  1648792220 :     if (Heap::InFromPage(target)) {
     126    43099360 :       SlotCallbackResult result = scavenger_->ScavengeObject(slot, target);
     127        1105 :       bool success = (*slot)->GetHeapObject(&target);
     128             :       USE(success);
     129             :       DCHECK(success);
     130             : 
     131    42993343 :       if (result == KEEP_SLOT) {
     132             :         SLOW_DCHECK(target->IsHeapObject());
     133     6455142 :         RememberedSet<OLD_TO_NEW>::Insert(MemoryChunk::FromHeapObject(host),
     134             :                                           slot.address());
     135             :       }
     136             :       SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
     137             :           HeapObject::cast(target)));
     138  1613977016 :     } else if (record_slots_ && MarkCompactCollector::IsOnEvacuationCandidate(
     139             :                                     HeapObject::cast(target))) {
     140             :       // We should never try to record off-heap slots.
     141             :       DCHECK((std::is_same<THeapObjectSlot, HeapObjectSlot>::value));
     142             :       // We cannot call MarkCompactCollector::RecordSlot because that checks
     143             :       // that the host page is not in young generation, which does not hold
     144             :       // for pending large pages.
     145           5 :       RememberedSet<OLD_TO_OLD>::Insert(MemoryChunk::FromHeapObject(host),
     146             :                                         slot.address());
     147             :     }
     148             :   }
     149             : 
     150             :   Scavenger* const scavenger_;
     151             :   const bool record_slots_;
     152             : };
     153             : 
     154             : namespace {
     155             : 
     156             : V8_INLINE bool IsUnscavengedHeapObject(Heap* heap, Object object) {
     157       70127 :   return Heap::InFromPage(object) &&
     158       35056 :          !HeapObject::cast(object)->map_word().IsForwardingAddress();
     159             : }
     160             : 
     161             : // Same as IsUnscavengedHeapObject() above but specialized for HeapObjects.
     162             : V8_INLINE bool IsUnscavengedHeapObject(Heap* heap, HeapObject heap_object) {
     163        7451 :   return Heap::InFromPage(heap_object) &&
     164             :          !heap_object->map_word().IsForwardingAddress();
     165             : }
     166             : 
     167       35071 : bool IsUnscavengedHeapObjectSlot(Heap* heap, FullObjectSlot p) {
     168       35071 :   return IsUnscavengedHeapObject(heap, *p);
     169             : }
     170             : 
     171             : }  // namespace
     172             : 
     173       20973 : class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
     174             :  public:
     175       47273 :   Object RetainAs(Object object) override {
     176       47273 :     if (!Heap::InFromPage(object)) {
     177       47273 :       return object;
     178             :     }
     179             : 
     180             :     MapWord map_word = HeapObject::cast(object)->map_word();
     181           0 :     if (map_word.IsForwardingAddress()) {
     182           0 :       return map_word.ToForwardingAddress();
     183             :     }
     184           0 :     return Object();
     185             :   }
     186             : };
     187             : 
     188       61533 : ScavengerCollector::ScavengerCollector(Heap* heap)
     189      123066 :     : isolate_(heap->isolate()), heap_(heap), parallel_scavenge_semaphore_(0) {}
     190             : 
     191       20973 : void ScavengerCollector::CollectGarbage() {
     192             :   DCHECK(surviving_new_large_objects_.empty());
     193       20973 :   ItemParallelJob job(isolate_->cancelable_task_manager(),
     194       62919 :                       &parallel_scavenge_semaphore_);
     195             :   const int kMainThreadId = 0;
     196             :   Scavenger* scavengers[kMaxScavengerTasks];
     197       20973 :   const bool is_logging = isolate_->LogObjectRelocation();
     198       20973 :   const int num_scavenge_tasks = NumberOfScavengeTasks();
     199             :   OneshotBarrier barrier(base::TimeDelta::FromMilliseconds(kMaxWaitTimeMs));
     200       41946 :   Scavenger::CopiedList copied_list(num_scavenge_tasks);
     201             :   Scavenger::PromotionList promotion_list(num_scavenge_tasks);
     202       41946 :   EphemeronTableList ephemeron_table_list(num_scavenge_tasks);
     203       88841 :   for (int i = 0; i < num_scavenge_tasks; i++) {
     204       33934 :     scavengers[i] = new Scavenger(this, heap_, is_logging, &copied_list,
     205       67868 :                                   &promotion_list, &ephemeron_table_list, i);
     206       67868 :     job.AddTask(new ScavengingTask(heap_, scavengers[i], &barrier));
     207             :   }
     208             : 
     209             :   {
     210       20973 :     Sweeper* sweeper = heap_->mark_compact_collector()->sweeper();
     211             :     // Pause the concurrent sweeper.
     212       41946 :     Sweeper::PauseOrCompleteScope pause_scope(sweeper);
     213             :     // Filter out pages from the sweeper that need to be processed for old to
     214             :     // new slots by the Scavenger. After processing, the Scavenger adds back
     215             :     // pages that are still unsweeped. This way the Scavenger has exclusive
     216             :     // access to the slots of a page and can completely avoid any locks on
     217             :     // the page itself.
     218       41946 :     Sweeper::FilterSweepingPagesScope filter_scope(sweeper, pause_scope);
     219             :     filter_scope.FilterOldSpaceSweepingPages(
     220       21463 :         [](Page* page) { return !page->ContainsSlots<OLD_TO_NEW>(); });
     221             :     RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(
     222      236985 :         heap_, [&job](MemoryChunk* chunk) {
     223      108006 :           job.AddItem(new PageScavengingItem(chunk));
     224      149952 :         });
     225             : 
     226       20973 :     RootScavengeVisitor root_scavenge_visitor(scavengers[kMainThreadId]);
     227             : 
     228             :     {
     229             :       // Identify weak unmodified handles. Requires an unmodified graph.
     230      104865 :       TRACE_GC(
     231             :           heap_->tracer(),
     232             :           GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY);
     233       20973 :       isolate_->global_handles()->IdentifyWeakUnmodifiedObjects(
     234       20973 :           &JSObject::IsUnmodifiedApiObject);
     235             :     }
     236             :     {
     237             :       // Copy roots.
     238      104865 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_ROOTS);
     239       20973 :       heap_->IterateRoots(&root_scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
     240             :     }
     241             :     {
     242             :       // Parallel phase scavenging all copied and promoted objects.
     243      104865 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL);
     244       20973 :       job.Run();
     245             :       DCHECK(copied_list.IsEmpty());
     246             :       DCHECK(promotion_list.IsEmpty());
     247             :     }
     248             :     {
     249             :       // Scavenge weak global handles.
     250      104865 :       TRACE_GC(heap_->tracer(),
     251             :                GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS);
     252       20973 :       isolate_->global_handles()->MarkYoungWeakUnmodifiedObjectsPending(
     253       20973 :           &IsUnscavengedHeapObjectSlot);
     254       20973 :       isolate_->global_handles()->IterateYoungWeakUnmodifiedRootsForFinalizers(
     255       20973 :           &root_scavenge_visitor);
     256       20973 :       scavengers[kMainThreadId]->Process();
     257             : 
     258             :       DCHECK(copied_list.IsEmpty());
     259             :       DCHECK(promotion_list.IsEmpty());
     260       20973 :       isolate_->global_handles()
     261             :           ->IterateYoungWeakUnmodifiedRootsForPhantomHandles(
     262       20973 :               &root_scavenge_visitor, &IsUnscavengedHeapObjectSlot);
     263             :     }
     264             : 
     265             :     {
     266             :       // Finalize parallel scavenging.
     267      104865 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_FINALIZE);
     268             : 
     269             :       DCHECK(surviving_new_large_objects_.empty());
     270             : 
     271       88841 :       for (int i = 0; i < num_scavenge_tasks; i++) {
     272       33934 :         scavengers[i]->Finalize();
     273       33934 :         delete scavengers[i];
     274             :       }
     275             : 
     276       20973 :       HandleSurvivingNewLargeObjects();
     277             :     }
     278             :   }
     279             : 
     280             :   {
     281             :     // Update references into new space
     282      104865 :     TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_UPDATE_REFS);
     283       20973 :     heap_->UpdateYoungReferencesInExternalStringTable(
     284       20973 :         &Heap::UpdateYoungReferenceInExternalStringTableEntry);
     285             : 
     286       41946 :     heap_->incremental_marking()->UpdateMarkingWorklistAfterScavenge();
     287             :   }
     288             : 
     289       20973 :   if (FLAG_concurrent_marking) {
     290             :     // Ensure that concurrent marker does not track pages that are
     291             :     // going to be unmapped.
     292      160974 :     for (Page* p :
     293       20258 :          PageRange(heap_->new_space()->from_space().first_page(), nullptr)) {
     294      281432 :       heap_->concurrent_marking()->ClearMemoryChunkData(p);
     295             :     }
     296             :   }
     297             : 
     298       20973 :   ProcessWeakReferences(&ephemeron_table_list);
     299             : 
     300             :   // Set age mark.
     301       20973 :   heap_->new_space_->set_age_mark(heap_->new_space()->top());
     302             : 
     303             :   {
     304      104865 :     TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_PROCESS_ARRAY_BUFFERS);
     305       20973 :     ArrayBufferTracker::PrepareToFreeDeadInNewSpace(heap_);
     306             :   }
     307       41946 :   heap_->array_buffer_collector()->FreeAllocations();
     308             : 
     309             :   // Since we promote all surviving large objects immediatelly, all remaining
     310             :   // large objects must be dead.
     311             :   // TODO(hpayer): Don't free all as soon as we have an intermediate generation.
     312       41946 :   heap_->new_lo_space()->FreeDeadObjects([](HeapObject) { return true; });
     313             : 
     314      129521 :   RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(heap_, [](MemoryChunk* chunk) {
     315      108548 :     if (chunk->SweepingDone()) {
     316      107939 :       RememberedSet<OLD_TO_NEW>::FreeEmptyBuckets(chunk);
     317             :     } else {
     318         609 :       RememberedSet<OLD_TO_NEW>::PreFreeEmptyBuckets(chunk);
     319             :     }
     320      129521 :   });
     321             : 
     322             :   // Update how much has survived scavenge.
     323       20973 :   heap_->IncrementYoungSurvivorsCounter(heap_->SurvivedYoungObjectSize());
     324       20973 : }
     325             : 
     326       20973 : void ScavengerCollector::HandleSurvivingNewLargeObjects() {
     327       24430 :   for (SurvivingNewLargeObjectMapEntry update_info :
     328             :        surviving_new_large_objects_) {
     329             :     HeapObject object = update_info.first;
     330             :     Map map = update_info.second;
     331             :     // Order is important here. We have to re-install the map to have access
     332             :     // to meta-data like size during page promotion.
     333             :     object->set_map_word(MapWord::FromMap(map));
     334             :     LargePage* page = LargePage::FromHeapObject(object);
     335        3457 :     heap_->lo_space()->PromoteNewLargeObject(page);
     336             :   }
     337             :   surviving_new_large_objects_.clear();
     338       20973 : }
     339             : 
     340       33934 : void ScavengerCollector::MergeSurvivingNewLargeObjects(
     341             :     const SurvivingNewLargeObjectsMap& objects) {
     342       37391 :   for (SurvivingNewLargeObjectMapEntry object : objects) {
     343             :     bool success = surviving_new_large_objects_.insert(object).second;
     344             :     USE(success);
     345             :     DCHECK(success);
     346             :   }
     347       33934 : }
     348             : 
     349       20973 : int ScavengerCollector::NumberOfScavengeTasks() {
     350       20973 :   if (!FLAG_parallel_scavenge) return 1;
     351             :   const int num_scavenge_tasks =
     352       20719 :       static_cast<int>(heap_->new_space()->TotalCapacity()) / MB;
     353       20719 :   static int num_cores = V8::GetCurrentPlatform()->NumberOfWorkerThreads() + 1;
     354             :   int tasks =
     355       20719 :       Max(1, Min(Min(num_scavenge_tasks, kMaxScavengerTasks), num_cores));
     356       20719 :   if (!heap_->CanExpandOldGeneration(
     357       20719 :           static_cast<size_t>(tasks * Page::kPageSize))) {
     358             :     // Optimize for memory usage near the heap limit.
     359             :     tasks = 1;
     360             :   }
     361             :   return tasks;
     362             : }
     363             : 
     364       33934 : Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging,
     365             :                      CopiedList* copied_list, PromotionList* promotion_list,
     366             :                      EphemeronTableList* ephemeron_table_list, int task_id)
     367             :     : collector_(collector),
     368             :       heap_(heap),
     369             :       promotion_list_(promotion_list, task_id),
     370             :       copied_list_(copied_list, task_id),
     371             :       ephemeron_table_list_(ephemeron_table_list, task_id),
     372             :       local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity),
     373             :       copied_size_(0),
     374             :       promoted_size_(0),
     375             :       allocator_(heap),
     376             :       is_logging_(is_logging),
     377             :       is_incremental_marking_(heap->incremental_marking()->IsMarking()),
     378      169670 :       is_compacting_(heap->incremental_marking()->IsCompacting()) {}
     379             : 
     380    35009740 : void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map,
     381             :                                                  int size) {
     382             :   // We are not collecting slots on new space objects during mutation thus we
     383             :   // have to scan for pointers to evacuation candidates when we promote
     384             :   // objects. But we should not record any slots in non-black objects. Grey
     385             :   // object's slots would be rescanned. White object might not survive until
     386             :   // the end of collection it would be a violation of the invariant to record
     387             :   // its slots.
     388             :   const bool record_slots =
     389    35176533 :       is_compacting_ &&
     390             :       heap()->incremental_marking()->atomic_marking_state()->IsBlack(target);
     391             :   IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots);
     392             :   target->IterateBodyFast(map, size, &visitor);
     393    35031930 : }
     394             : 
     395      107971 : void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) {
     396             :   AllocationSpace space = page->owner()->identity();
     397      180847 :   if ((space == OLD_SPACE) && !page->SweepingDone()) {
     398             :     heap()->mark_compact_collector()->sweeper()->AddPage(
     399             :         space, reinterpret_cast<Page*>(page),
     400         239 :         Sweeper::READD_TEMPORARY_REMOVED_PAGE);
     401             :   }
     402      107971 : }
     403             : 
     404      107979 : void Scavenger::ScavengePage(MemoryChunk* page) {
     405      107979 :   CodePageMemoryModificationScope memory_modification_scope(page);
     406             :   RememberedSet<OLD_TO_NEW>::Iterate(page,
     407    28564234 :                                      [this](MaybeObjectSlot addr) {
     408    28564234 :                                        return CheckAndScavengeObject(heap_,
     409             :                                                                      addr);
     410    28564234 :                                      },
     411      107965 :                                      SlotSet::KEEP_EMPTY_BUCKETS);
     412             :   RememberedSet<OLD_TO_NEW>::IterateTyped(
     413             :       page, [=](SlotType type, Address addr) {
     414      107901 :         return UpdateTypedSlotHelper::UpdateTypedSlot(
     415      215835 :             heap_, type, addr, [this](FullMaybeObjectSlot slot) {
     416             :               return CheckAndScavengeObject(heap(), slot);
     417      107934 :             });
     418      215879 :       });
     419             : 
     420      107974 :   AddPageToSweeperIfNecessary(page);
     421      107973 : }
     422             : 
     423      184779 : void Scavenger::Process(OneshotBarrier* barrier) {
     424             :   ScavengeVisitor scavenge_visitor(this);
     425             : 
     426             :   const bool have_barrier = barrier != nullptr;
     427             :   bool done;
     428             :   size_t objects = 0;
     429             :   do {
     430             :     done = true;
     431      207981 :     ObjectAndSize object_and_size;
     432    93378269 :     while (promotion_list_.ShouldEagerlyProcessPromotionList() &&
     433             :            copied_list_.Pop(&object_and_size)) {
     434             :       scavenge_visitor.Visit(object_and_size.first);
     435             :       done = false;
     436    46511186 :       if (have_barrier && ((++objects % kInterruptThreshold) == 0)) {
     437      355071 :         if (!copied_list_.IsGlobalPoolEmpty()) {
     438      130159 :           barrier->NotifyAll();
     439             :         }
     440             :       }
     441             :     }
     442             : 
     443             :     struct PromotionListEntry entry;
     444    70180343 :     while (promotion_list_.Pop(&entry)) {
     445    35035640 :       HeapObject target = entry.heap_object;
     446             :       DCHECK(!target->IsMap());
     447    35035640 :       IterateAndScavengePromotedObject(target, entry.map, entry.size);
     448             :       done = false;
     449    35022821 :       if (have_barrier && ((++objects % kInterruptThreshold) == 0)) {
     450      272441 :         if (!promotion_list_.IsGlobalPoolEmpty()) {
     451      116518 :           barrier->NotifyAll();
     452             :         }
     453             :       }
     454             :     }
     455      111815 :   } while (!done);
     456       88613 : }
     457             : 
     458       20973 : void ScavengerCollector::ProcessWeakReferences(
     459             :     EphemeronTableList* ephemeron_table_list) {
     460       20973 :   ScavengeWeakObjectRetainer weak_object_retainer;
     461       20973 :   heap_->ProcessYoungWeakReferences(&weak_object_retainer);
     462       20973 :   ClearYoungEphemerons(ephemeron_table_list);
     463       20973 : }
     464             : 
     465             : // Clears ephemerons contained in {EphemeronHashTable}s in young generation.
     466       20973 : void ScavengerCollector::ClearYoungEphemerons(
     467             :     EphemeronTableList* ephemeron_table_list) {
     468        1709 :   ephemeron_table_list->Iterate([this](EphemeronHashTable table) {
     469       16117 :     for (int i = 0; i < table->Capacity(); i++) {
     470             :       ObjectSlot key_slot =
     471             :           table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
     472             :       Object key = *key_slot;
     473        7204 :       if (key->IsHeapObject()) {
     474        7204 :         if (IsUnscavengedHeapObject(heap_, HeapObject::cast(key))) {
     475          16 :           table->RemoveEntry(i);
     476             :         } else {
     477        7188 :           HeapObject forwarded = ForwardingAddress(HeapObject::cast(key));
     478             :           HeapObjectReference::Update(HeapObjectSlot(key_slot), forwarded);
     479             :         }
     480             :       }
     481             :     }
     482       22682 :   });
     483             :   ephemeron_table_list->Clear();
     484       20973 : }
     485             : 
     486       33934 : void Scavenger::Finalize() {
     487       33934 :   heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_);
     488       33934 :   heap()->IncrementSemiSpaceCopiedObjectSize(copied_size_);
     489       33934 :   heap()->IncrementPromotedObjectsSize(promoted_size_);
     490       33934 :   collector_->MergeSurvivingNewLargeObjects(surviving_new_large_objects_);
     491       33934 :   allocator_.Finalize();
     492             :   ephemeron_table_list_.FlushToGlobal();
     493       33934 : }
     494             : 
     495           0 : void Scavenger::AddEphemeronHashTable(EphemeronHashTable table) {
     496             :   ephemeron_table_list_.Push(table);
     497           0 : }
     498             : 
     499    33316076 : void RootScavengeVisitor::VisitRootPointer(Root root, const char* description,
     500             :                                            FullObjectSlot p) {
     501             :   DCHECK(!HasWeakHeapObjectTag(*p));
     502    33316076 :   ScavengePointer(p);
     503    33316076 : }
     504             : 
     505     1301335 : void RootScavengeVisitor::VisitRootPointers(Root root, const char* description,
     506             :                                             FullObjectSlot start,
     507             :                                             FullObjectSlot end) {
     508             :   // Copy all HeapObject pointers in [start, end)
     509    37458420 :   for (FullObjectSlot p = start; p < end; ++p) ScavengePointer(p);
     510     1301335 : }
     511             : 
     512    68171826 : void RootScavengeVisitor::ScavengePointer(FullObjectSlot p) {
     513             :   Object object = *p;
     514             :   DCHECK(!HasWeakHeapObjectTag(object));
     515    68171826 :   if (Heap::InYoungGeneration(object)) {
     516     7576533 :     scavenger_->ScavengeObject(FullHeapObjectSlot(p), HeapObject::cast(object));
     517             :   }
     518    68171826 : }
     519             : 
     520           0 : RootScavengeVisitor::RootScavengeVisitor(Scavenger* scavenger)
     521       20973 :     : scavenger_(scavenger) {}
     522             : 
     523           0 : ScavengeVisitor::ScavengeVisitor(Scavenger* scavenger)
     524      184779 :     : scavenger_(scavenger) {}
     525             : 
     526             : }  // namespace internal
     527      120216 : }  // namespace v8

Generated by: LCOV version 1.10