LCOV - code coverage report
Current view: top level - src/heap - scavenger.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 175 192 91.1 %
Date: 2019-02-19 Functions: 25 35 71.4 %

          Line data    Source code
       1             : // Copyright 2015 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/heap/scavenger.h"
       6             : 
       7             : #include "src/heap/array-buffer-collector.h"
       8             : #include "src/heap/barrier.h"
       9             : #include "src/heap/gc-tracer.h"
      10             : #include "src/heap/heap-inl.h"
      11             : #include "src/heap/item-parallel-job.h"
      12             : #include "src/heap/mark-compact-inl.h"
      13             : #include "src/heap/objects-visiting-inl.h"
      14             : #include "src/heap/scavenger-inl.h"
      15             : #include "src/heap/sweeper.h"
      16             : #include "src/objects-body-descriptors-inl.h"
      17             : #include "src/objects/data-handler-inl.h"
      18             : #include "src/objects/embedder-data-array-inl.h"
      19             : #include "src/transitions-inl.h"
      20             : #include "src/utils-inl.h"
      21             : 
      22             : namespace v8 {
      23             : namespace internal {
      24             : 
      25             : class PageScavengingItem final : public ItemParallelJob::Item {
      26             :  public:
      27      110665 :   explicit PageScavengingItem(MemoryChunk* chunk) : chunk_(chunk) {}
      28      221330 :   ~PageScavengingItem() override = default;
      29             : 
      30      110610 :   void Process(Scavenger* scavenger) { scavenger->ScavengePage(chunk_); }
      31             : 
      32             :  private:
      33             :   MemoryChunk* const chunk_;
      34             : };
      35             : 
      36       75680 : class ScavengingTask final : public ItemParallelJob::Task {
      37             :  public:
      38             :   ScavengingTask(Heap* heap, Scavenger* scavenger, OneshotBarrier* barrier)
      39             :       : ItemParallelJob::Task(heap->isolate()),
      40             :         heap_(heap),
      41             :         scavenger_(scavenger),
      42       37850 :         barrier_(barrier) {}
      43             : 
      44       37545 :   void RunInParallel() final {
      45      187744 :     TRACE_BACKGROUND_GC(
      46             :         heap_->tracer(),
      47             :         GCTracer::BackgroundScope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL);
      48       37542 :     double scavenging_time = 0.0;
      49             :     {
      50       37542 :       barrier_->Start();
      51             :       TimedScope scope(&scavenging_time);
      52      110610 :       PageScavengingItem* item = nullptr;
      53      185742 :       while ((item = GetItem<PageScavengingItem>()) != nullptr) {
      54      110610 :         item->Process(scavenger_);
      55      110577 :         item->MarkFinished();
      56             :       }
      57       38238 :       do {
      58       38222 :         scavenger_->Process(barrier_);
      59       38201 :       } while (!barrier_->Wait());
      60       37582 :       scavenger_->Process();
      61             :     }
      62       37570 :     if (FLAG_trace_parallel_scavenge) {
      63           0 :       PrintIsolate(heap_->isolate(),
      64             :                    "scavenge[%p]: time=%.2f copied=%zu promoted=%zu\n",
      65             :                    static_cast<void*>(this), scavenging_time,
      66           0 :                    scavenger_->bytes_copied(), scavenger_->bytes_promoted());
      67       37571 :     }
      68       37582 :   }
      69             : 
      70             :  private:
      71             :   Heap* const heap_;
      72             :   Scavenger* const scavenger_;
      73             :   OneshotBarrier* const barrier_;
      74             : };
      75             : 
      76           0 : class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
      77             :  public:
      78             :   IterateAndScavengePromotedObjectsVisitor(Scavenger* scavenger,
      79             :                                            bool record_slots)
      80    45636144 :       : scavenger_(scavenger), record_slots_(record_slots) {}
      81             : 
      82       64546 :   V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
      83             :                                ObjectSlot end) final {
      84             :     VisitPointersImpl(host, start, end);
      85       64546 :   }
      86             : 
      87           8 :   V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
      88             :                                MaybeObjectSlot end) final {
      89             :     VisitPointersImpl(host, start, end);
      90           8 :   }
      91             : 
      92           0 :   V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final {
      93           0 :     Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
      94           0 :     HandleSlot(host, FullHeapObjectSlot(&target), target);
      95           0 :   }
      96           0 :   V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
      97           0 :     HeapObject heap_object = rinfo->target_object();
      98           0 :     HandleSlot(host, FullHeapObjectSlot(&heap_object), heap_object);
      99           0 :   }
     100             : 
     101             :  private:
     102             :   template <typename TSlot>
     103             :   V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
     104             :     using THeapObjectSlot = typename TSlot::THeapObjectSlot;
     105             :     // Treat weak references as strong.
     106             :     // TODO(marja): Proper weakness handling in the young generation.
     107   445165559 :     for (TSlot slot = start; slot < end; ++slot) {
     108   399947765 :       typename TSlot::TObject object = *slot;
     109   399542164 :       HeapObject heap_object;
     110   399542164 :       if (object.GetHeapObject(&heap_object)) {
     111   574430555 :         HandleSlot(host, THeapObjectSlot(slot), heap_object);
     112             :       }
     113             :     }
     114             :   }
     115             : 
     116             :   template <typename THeapObjectSlot>
     117             :   V8_INLINE void HandleSlot(HeapObject host, THeapObjectSlot slot,
     118             :                             HeapObject target) {
     119             :     static_assert(
     120             :         std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
     121             :             std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
     122             :         "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
     123   287176609 :     scavenger_->PageMemoryFence(MaybeObject::FromObject(target));
     124             : 
     125   287261329 :     if (Heap::InFromPage(target)) {
     126    46656019 :       SlotCallbackResult result = scavenger_->ScavengeObject(slot, target);
     127    46642762 :       bool success = (*slot)->GetHeapObject(&target);
     128             :       USE(success);
     129             :       DCHECK(success);
     130             : 
     131    46635996 :       if (result == KEEP_SLOT) {
     132             :         SLOW_DCHECK(target->IsHeapObject());
     133      711252 :         RememberedSet<OLD_TO_NEW>::Insert(MemoryChunk::FromHeapObject(host),
     134      711274 :                                           slot.address());
     135             :       }
     136             :       SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
     137             :           HeapObject::cast(target)));
     138   485435210 :     } else if (record_slots_ && MarkCompactCollector::IsOnEvacuationCandidate(
     139   243963967 :                                     HeapObject::cast(target))) {
     140             :       // We cannot call MarkCompactCollector::RecordSlot because that checks
     141             :       // that the host page is not in young generation, which does not hold
     142             :       // for pending large pages.
     143        2411 :       RememberedSet<OLD_TO_OLD>::Insert(MemoryChunk::FromHeapObject(host),
     144        2411 :                                         slot.address());
     145             :     }
     146             :   }
     147             : 
     148             :   Scavenger* const scavenger_;
     149             :   const bool record_slots_;
     150             : };
     151             : 
     152       69375 : static bool IsUnscavengedHeapObject(Heap* heap, FullObjectSlot p) {
     153      138735 :   return Heap::InFromPage(*p) &&
     154       69375 :          !HeapObject::cast(*p)->map_word().IsForwardingAddress();
     155             : }
     156             : 
     157       23490 : class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
     158             :  public:
     159       55323 :   Object RetainAs(Object object) override {
     160       55323 :     if (!Heap::InFromPage(object)) {
     161       55323 :       return object;
     162             :     }
     163             : 
     164             :     MapWord map_word = HeapObject::cast(object)->map_word();
     165           0 :     if (map_word.IsForwardingAddress()) {
     166           0 :       return map_word.ToForwardingAddress();
     167             :     }
     168           0 :     return Object();
     169             :   }
     170             : };
     171             : 
     172       61049 : ScavengerCollector::ScavengerCollector(Heap* heap)
     173      122098 :     : isolate_(heap->isolate()), heap_(heap), parallel_scavenge_semaphore_(0) {}
     174             : 
     175       23490 : void ScavengerCollector::CollectGarbage() {
     176             :   DCHECK(surviving_new_large_objects_.empty());
     177             :   ItemParallelJob job(isolate_->cancelable_task_manager(),
     178      117450 :                       &parallel_scavenge_semaphore_);
     179             :   const int kMainThreadId = 0;
     180             :   Scavenger* scavengers[kMaxScavengerTasks];
     181       23490 :   const bool is_logging = isolate_->LogObjectRelocation();
     182       23490 :   const int num_scavenge_tasks = NumberOfScavengeTasks();
     183       23490 :   OneshotBarrier barrier(base::TimeDelta::FromMilliseconds(kMaxWaitTimeMs));
     184       46980 :   Scavenger::CopiedList copied_list(num_scavenge_tasks);
     185             :   Scavenger::PromotionList promotion_list(num_scavenge_tasks);
     186       61340 :   for (int i = 0; i < num_scavenge_tasks; i++) {
     187             :     scavengers[i] = new Scavenger(this, heap_, is_logging, &copied_list,
     188      131091 :                                   &promotion_list, i);
     189       75700 :     job.AddTask(new ScavengingTask(heap_, scavengers[i], &barrier));
     190             :   }
     191             : 
     192             :   {
     193       46980 :     Sweeper* sweeper = heap_->mark_compact_collector()->sweeper();
     194             :     // Pause the concurrent sweeper.
     195       23490 :     Sweeper::PauseOrCompleteScope pause_scope(sweeper);
     196             :     // Filter out pages from the sweeper that need to be processed for old to
     197             :     // new slots by the Scavenger. After processing, the Scavenger adds back
     198             :     // pages that are still unsweeped. This way the Scavenger has exclusive
     199             :     // access to the slots of a page and can completely avoid any locks on
     200             :     // the page itself.
     201       46980 :     Sweeper::FilterSweepingPagesScope filter_scope(sweeper, pause_scope);
     202             :     filter_scope.FilterOldSpaceSweepingPages(
     203       24226 :         [](Page* page) { return !page->ContainsSlots<OLD_TO_NEW>(); });
     204             :     RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(
     205      110665 :         heap_, [&job](MemoryChunk* chunk) {
     206      221330 :           job.AddItem(new PageScavengingItem(chunk));
     207      134155 :         });
     208             : 
     209       23490 :     RootScavengeVisitor root_scavenge_visitor(scavengers[kMainThreadId]);
     210             : 
     211             :     {
     212             :       // Identify weak unmodified handles. Requires an unmodified graph.
     213      117450 :       TRACE_GC(
     214             :           heap_->tracer(),
     215             :           GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY);
     216             :       isolate_->global_handles()->IdentifyWeakUnmodifiedObjects(
     217       70470 :           &JSObject::IsUnmodifiedApiObject);
     218             :     }
     219             :     {
     220             :       // Copy roots.
     221      117450 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_ROOTS);
     222       46980 :       heap_->IterateRoots(&root_scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
     223             :     }
     224             :     {
     225             :       // Parallel phase scavenging all copied and promoted objects.
     226      117450 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL);
     227       23490 :       job.Run();
     228             :       DCHECK(copied_list.IsEmpty());
     229       23490 :       DCHECK(promotion_list.IsEmpty());
     230             :     }
     231             :     {
     232             :       // Scavenge weak global handles.
     233      117450 :       TRACE_GC(heap_->tracer(),
     234             :                GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS);
     235             :       isolate_->global_handles()->MarkYoungWeakUnmodifiedObjectsPending(
     236       46980 :           &IsUnscavengedHeapObject);
     237             :       isolate_->global_handles()->IterateYoungWeakUnmodifiedRootsForFinalizers(
     238       46980 :           &root_scavenge_visitor);
     239       23490 :       scavengers[kMainThreadId]->Process();
     240             : 
     241             :       DCHECK(copied_list.IsEmpty());
     242             :       DCHECK(promotion_list.IsEmpty());
     243             :       isolate_->global_handles()
     244             :           ->IterateYoungWeakUnmodifiedRootsForPhantomHandles(
     245       70470 :               &root_scavenge_visitor, &IsUnscavengedHeapObject);
     246             :     }
     247             : 
     248             :     {
     249             :       // Finalize parallel scavenging.
     250      117450 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_FINALIZE);
     251             : 
     252             :       DCHECK(surviving_new_large_objects_.empty());
     253             : 
     254       37850 :       for (int i = 0; i < num_scavenge_tasks; i++) {
     255       37850 :         scavengers[i]->Finalize();
     256       37850 :         delete scavengers[i];
     257             :       }
     258             : 
     259       46980 :       HandleSurvivingNewLargeObjects();
     260       23490 :     }
     261             :   }
     262             : 
     263             :   {
     264             :     // Update references into new space
     265      117450 :     TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_UPDATE_REFS);
     266             :     heap_->UpdateYoungReferencesInExternalStringTable(
     267       23490 :         &Heap::UpdateYoungReferenceInExternalStringTableEntry);
     268             : 
     269       70470 :     heap_->incremental_marking()->UpdateMarkingWorklistAfterScavenge();
     270             :   }
     271             : 
     272       23490 :   if (FLAG_concurrent_marking) {
     273             :     // Ensure that concurrent marker does not track pages that are
     274             :     // going to be unmapped.
     275      109058 :     for (Page* p :
     276       22771 :          PageRange(heap_->new_space()->from_space().first_page(), nullptr)) {
     277      172574 :       heap_->concurrent_marking()->ClearMemoryChunkData(p);
     278             :     }
     279             :   }
     280             : 
     281       23490 :   ScavengeWeakObjectRetainer weak_object_retainer;
     282       23490 :   heap_->ProcessYoungWeakReferences(&weak_object_retainer);
     283             : 
     284             :   // Set age mark.
     285       23490 :   heap_->new_space_->set_age_mark(heap_->new_space()->top());
     286             : 
     287             :   {
     288      117450 :     TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_PROCESS_ARRAY_BUFFERS);
     289       46980 :     ArrayBufferTracker::PrepareToFreeDeadInNewSpace(heap_);
     290             :   }
     291       46980 :   heap_->array_buffer_collector()->FreeAllocations();
     292             : 
     293             :   // Since we promote all surviving large objects immediatelly, all remaining
     294             :   // large objects must be dead.
     295             :   // TODO(hpayer): Don't free all as soon as we have an intermediate generation.
     296       70470 :   heap_->new_lo_space()->FreeDeadObjects([](HeapObject) { return true; });
     297             : 
     298      111135 :   RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(heap_, [](MemoryChunk* chunk) {
     299      111135 :     if (chunk->SweepingDone()) {
     300      110742 :       RememberedSet<OLD_TO_NEW>::FreeEmptyBuckets(chunk);
     301             :     } else {
     302         393 :       RememberedSet<OLD_TO_NEW>::PreFreeEmptyBuckets(chunk);
     303             :     }
     304      134625 :   });
     305             : 
     306             :   // Update how much has survived scavenge.
     307       46980 :   heap_->IncrementYoungSurvivorsCounter(heap_->SurvivedYoungObjectSize());
     308       23490 : }
     309             : 
     310       23490 : void ScavengerCollector::HandleSurvivingNewLargeObjects() {
     311       46980 :   for (SurvivingNewLargeObjectMapEntry update_info :
     312             :        surviving_new_large_objects_) {
     313             :     HeapObject object = update_info.first;
     314             :     Map map = update_info.second;
     315             :     // Order is important here. We have to re-install the map to have access
     316             :     // to meta-data like size during page promotion.
     317             :     object->set_map_word(MapWord::FromMap(map));
     318             :     LargePage* page = LargePage::FromHeapObject(object);
     319           0 :     heap_->lo_space()->PromoteNewLargeObject(page);
     320             :   }
     321             :   surviving_new_large_objects_.clear();
     322       23490 : }
     323             : 
     324       37850 : void ScavengerCollector::MergeSurvivingNewLargeObjects(
     325             :     const SurvivingNewLargeObjectsMap& objects) {
     326       75700 :   for (SurvivingNewLargeObjectMapEntry object : objects) {
     327             :     bool success = surviving_new_large_objects_.insert(object).second;
     328             :     USE(success);
     329             :     DCHECK(success);
     330             :   }
     331       37850 : }
     332             : 
     333       23490 : int ScavengerCollector::NumberOfScavengeTasks() {
     334       23490 :   if (!FLAG_parallel_scavenge) return 1;
     335             :   const int num_scavenge_tasks =
     336       46122 :       static_cast<int>(heap_->new_space()->TotalCapacity()) / MB;
     337       23061 :   static int num_cores = V8::GetCurrentPlatform()->NumberOfWorkerThreads() + 1;
     338             :   int tasks =
     339       23061 :       Max(1, Min(Min(num_scavenge_tasks, kMaxScavengerTasks), num_cores));
     340       23061 :   if (!heap_->CanExpandOldGeneration(
     341       23061 :           static_cast<size_t>(tasks * Page::kPageSize))) {
     342             :     // Optimize for memory usage near the heap limit.
     343             :     tasks = 1;
     344             :   }
     345       23061 :   return tasks;
     346             : }
     347             : 
     348       37850 : Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging,
     349             :                      CopiedList* copied_list, PromotionList* promotion_list,
     350             :                      int task_id)
     351             :     : collector_(collector),
     352             :       heap_(heap),
     353             :       promotion_list_(promotion_list, task_id),
     354             :       copied_list_(copied_list, task_id),
     355             :       local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity),
     356             :       copied_size_(0),
     357             :       promoted_size_(0),
     358             :       allocator_(heap),
     359             :       is_logging_(is_logging),
     360             :       is_incremental_marking_(heap->incremental_marking()->IsMarking()),
     361      189250 :       is_compacting_(heap->incremental_marking()->IsCompacting()) {}
     362             : 
     363    45638515 : void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map,
     364             :                                                  int size) {
     365             :   // We are not collecting slots on new space objects during mutation thus we
     366             :   // have to scan for pointers to evacuation candidates when we promote
     367             :   // objects. But we should not record any slots in non-black objects. Grey
     368             :   // object's slots would be rescanned. White object might not survive until
     369             :   // the end of collection it would be a violation of the invariant to record
     370             :   // its slots.
     371             :   const bool record_slots =
     372    46191588 :       is_compacting_ &&
     373             :       heap()->incremental_marking()->atomic_marking_state()->IsBlack(target);
     374             :   IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots);
     375             :   target->IterateBodyFast(map, size, &visitor);
     376    45522971 : }
     377             : 
     378      110808 : void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) {
     379      110569 :   AllocationSpace space = page->owner()->identity();
     380      187638 :   if ((space == OLD_SPACE) && !page->SweepingDone()) {
     381             :     heap()->mark_compact_collector()->sweeper()->AddPage(
     382             :         space, reinterpret_cast<Page*>(page),
     383         239 :         Sweeper::READD_TEMPORARY_REMOVED_PAGE);
     384             :   }
     385      110569 : }
     386             : 
     387      110578 : void Scavenger::ScavengePage(MemoryChunk* page) {
     388      110578 :   CodePageMemoryModificationScope memory_modification_scope(page);
     389             :   RememberedSet<OLD_TO_NEW>::Iterate(page,
     390             :                                      [this](MaybeObjectSlot addr) {
     391             :                                        return CheckAndScavengeObject(heap_,
     392    43032220 :                                                                      addr);
     393    43032220 :                                      },
     394      110582 :                                      SlotSet::KEEP_EMPTY_BUCKETS);
     395             :   RememberedSet<OLD_TO_NEW>::IterateTyped(
     396             :       page, [=](SlotType type, Address addr) {
     397             :         return UpdateTypedSlotHelper::UpdateTypedSlot(
     398             :             heap_, type, addr, [this](FullMaybeObjectSlot slot) {
     399      103512 :               return CheckAndScavengeObject(heap(), slot);
     400      207203 :             });
     401      214294 :       });
     402             : 
     403      110597 :   AddPageToSweeperIfNecessary(page);
     404      110582 : }
     405             : 
     406      128047 : void Scavenger::Process(OneshotBarrier* barrier) {
     407             :   ScavengeVisitor scavenge_visitor(this);
     408             : 
     409      128047 :   const bool have_barrier = barrier != nullptr;
     410             :   bool done;
     411             :   size_t objects = 0;
     412      123873 :   do {
     413             :     done = true;
     414      152662 :     ObjectAndSize object_and_size;
     415   208805169 :     while (promotion_list_.ShouldEagerlyProcessPromotionList() &&
     416             :            copied_list_.Pop(&object_and_size)) {
     417             :       scavenge_visitor.Visit(object_and_size.first);
     418             :       done = false;
     419    54031298 :       if (have_barrier && ((++objects % kInterruptThreshold) == 0)) {
     420    55010273 :         if (!copied_list_.IsGlobalPoolEmpty()) {
     421      141615 :           barrier->NotifyAll();
     422             :         }
     423             :       }
     424             :     }
     425             : 
     426             :     struct PromotionListEntry entry;
     427    45760267 :     while (promotion_list_.Pop(&entry)) {
     428    45636394 :       HeapObject target = entry.heap_object;
     429             :       DCHECK(!target->IsMap());
     430    45636394 :       IterateAndScavengePromotedObject(target, entry.map, entry.size);
     431             :       done = false;
     432    45527865 :       if (have_barrier && ((++objects % kInterruptThreshold) == 0)) {
     433      349903 :         if (!promotion_list_.IsGlobalPoolEmpty()) {
     434      118584 :           barrier->NotifyAll();
     435             :         }
     436             :       }
     437             :     }
     438             :   } while (!done);
     439       99258 : }
     440             : 
     441      151400 : void Scavenger::Finalize() {
     442       75700 :   heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_);
     443       37850 :   heap()->IncrementSemiSpaceCopiedObjectSize(copied_size_);
     444       37850 :   heap()->IncrementPromotedObjectsSize(promoted_size_);
     445       37850 :   collector_->MergeSurvivingNewLargeObjects(surviving_new_large_objects_);
     446       37850 :   allocator_.Finalize();
     447       37850 : }
     448             : 
     449    38387144 : void RootScavengeVisitor::VisitRootPointer(Root root, const char* description,
     450             :                                            FullObjectSlot p) {
     451             :   DCHECK(!HasWeakHeapObjectTag(*p));
     452    38387144 :   ScavengePointer(p);
     453    38387144 : }
     454             : 
     455     2269641 : void RootScavengeVisitor::VisitRootPointers(Root root, const char* description,
     456             :                                             FullObjectSlot start,
     457             :                                             FullObjectSlot end) {
     458             :   // Copy all HeapObject pointers in [start, end)
     459    52393482 :   for (FullObjectSlot p = start; p < end; ++p) ScavengePointer(p);
     460     2269641 : }
     461             : 
     462    86241344 : void RootScavengeVisitor::ScavengePointer(FullObjectSlot p) {
     463             :   Object object = *p;
     464             :   DCHECK(!HasWeakHeapObjectTag(object));
     465    86241344 :   if (Heap::InYoungGeneration(object)) {
     466     9138991 :     scavenger_->ScavengeObject(FullHeapObjectSlot(p), HeapObject::cast(object));
     467             :   }
     468    86241344 : }
     469             : 
     470           0 : RootScavengeVisitor::RootScavengeVisitor(Scavenger* scavenger)
     471       23490 :     : scavenger_(scavenger) {}
     472             : 
     473           0 : ScavengeVisitor::ScavengeVisitor(Scavenger* scavenger)
     474      128047 :     : scavenger_(scavenger) {}
     475             : 
     476             : }  // namespace internal
     477      178779 : }  // namespace v8

Generated by: LCOV version 1.10