LCOV - code coverage report
Current view: top level - src/heap - incremental-marking.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 438 498 88.0 %
Date: 2019-02-19 Functions: 58 68 85.3 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/heap/incremental-marking.h"
       6             : 
       7             : #include "src/compilation-cache.h"
       8             : #include "src/conversions.h"
       9             : #include "src/heap/concurrent-marking.h"
      10             : #include "src/heap/embedder-tracing.h"
      11             : #include "src/heap/gc-idle-time-handler.h"
      12             : #include "src/heap/gc-tracer.h"
      13             : #include "src/heap/heap-inl.h"
      14             : #include "src/heap/incremental-marking-inl.h"
      15             : #include "src/heap/mark-compact-inl.h"
      16             : #include "src/heap/object-stats.h"
      17             : #include "src/heap/objects-visiting-inl.h"
      18             : #include "src/heap/objects-visiting.h"
      19             : #include "src/heap/sweeper.h"
      20             : #include "src/objects/data-handler-inl.h"
      21             : #include "src/objects/embedder-data-array-inl.h"
      22             : #include "src/objects/hash-table-inl.h"
      23             : #include "src/objects/slots-inl.h"
      24             : #include "src/tracing/trace-event.h"
      25             : #include "src/transitions-inl.h"
      26             : #include "src/v8.h"
      27             : #include "src/visitors.h"
      28             : #include "src/vm-state-inl.h"
      29             : 
      30             : namespace v8 {
      31             : namespace internal {
      32             : 
      33             : using IncrementalMarkingMarkingVisitor =
      34             :     MarkingVisitor<FixedArrayVisitationMode::kIncremental,
      35             :                    TraceRetainingPathMode::kDisabled,
      36             :                    IncrementalMarking::MarkingState>;
      37             : 
      38       71799 : void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
      39             :                                         size_t size) {
      40       71799 :   Heap* heap = incremental_marking_.heap();
      41             :   VMState<GC> state(heap->isolate());
      42             :   RuntimeCallTimerScope runtime_timer(
      43             :       heap->isolate(),
      44       71799 :       RuntimeCallCounterId::kGC_Custom_IncrementalMarkingObserver);
      45       71799 :   incremental_marking_.AdvanceOnAllocation();
      46             :   // AdvanceIncrementalMarkingOnAllocation can start incremental marking.
      47       71799 :   incremental_marking_.EnsureBlackAllocated(addr, size);
      48       71799 : }
      49             : 
      50       61049 : IncrementalMarking::IncrementalMarking(
      51             :     Heap* heap, MarkCompactCollector::MarkingWorklist* marking_worklist,
      52             :     WeakObjects* weak_objects)
      53             :     : heap_(heap),
      54             :       marking_worklist_(marking_worklist),
      55             :       weak_objects_(weak_objects),
      56             :       initial_old_generation_size_(0),
      57             :       bytes_marked_(0),
      58             :       scheduled_bytes_to_mark_(0),
      59             :       schedule_update_time_ms_(0),
      60             :       bytes_marked_concurrently_(0),
      61             :       unscanned_bytes_of_large_object_(0),
      62             :       is_compacting_(false),
      63             :       should_hurry_(false),
      64             :       was_activated_(false),
      65             :       black_allocation_(false),
      66             :       finalize_marking_completed_(false),
      67             :       request_type_(NONE),
      68             :       new_generation_observer_(*this, kYoungGenerationAllocatedThreshold),
      69       61049 :       old_generation_observer_(*this, kOldGenerationAllocatedThreshold) {
      70             :   DCHECK_NOT_NULL(marking_worklist_);
      71             :   SetState(STOPPED);
      72       61049 : }
      73             : 
      74             : bool IncrementalMarking::BaseRecordWrite(HeapObject obj, Object value) {
      75   292664928 :   HeapObject value_heap_obj = HeapObject::cast(value);
      76             :   DCHECK(!marking_state()->IsImpossible(value_heap_obj));
      77             :   DCHECK(!marking_state()->IsImpossible(obj));
      78             : #ifdef V8_CONCURRENT_MARKING
      79             :   // The write barrier stub generated with V8_CONCURRENT_MARKING does not
      80             :   // check the color of the source object.
      81             :   const bool need_recording = true;
      82             : #else
      83             :   const bool need_recording = marking_state()->IsBlack(obj);
      84             : #endif
      85             : 
      86   292664928 :   if (need_recording && WhiteToGreyAndPush(value_heap_obj)) {
      87    23007551 :     RestartIfNotMarking();
      88             :   }
      89   292664943 :   return is_compacting_ && need_recording;
      90             : }
      91             : 
      92   292373094 : void IncrementalMarking::RecordWriteSlow(HeapObject obj, HeapObjectSlot slot,
      93             :                                          Object value) {
      94   314920916 :   if (BaseRecordWrite(obj, value) && slot.address() != kNullAddress) {
      95             :     // Object is not going to be rescanned we need to record the slot.
      96             :     heap_->mark_compact_collector()->RecordSlot(obj, slot,
      97             :                                                 HeapObject::cast(value));
      98             :   }
      99   292373055 : }
     100             : 
     101     6871297 : int IncrementalMarking::RecordWriteFromCode(Address raw_obj,
     102             :                                             Address slot_address,
     103             :                                             Isolate* isolate) {
     104             :   HeapObject obj = HeapObject::cast(Object(raw_obj));
     105             :   MaybeObjectSlot slot(slot_address);
     106             :   isolate->heap()->incremental_marking()->RecordMaybeWeakWrite(obj, slot,
     107             :                                                                *slot);
     108             :   // Called by RecordWriteCodeStubAssembler, which doesnt accept void type
     109     6871298 :   return 0;
     110             : }
     111             : 
     112      291888 : void IncrementalMarking::RecordWriteIntoCode(Code host, RelocInfo* rinfo,
     113             :                                              HeapObject value) {
     114             :   DCHECK(IsMarking());
     115      291888 :   if (BaseRecordWrite(host, value)) {
     116             :     // Object is not going to be rescanned.  We need to record the slot.
     117        4123 :     heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value);
     118             :   }
     119      291888 : }
     120             : 
     121   621159417 : bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) {
     122   526845023 :   if (marking_state()->WhiteToGrey(obj)) {
     123             :     marking_worklist()->Push(obj);
     124    94315332 :     return true;
     125             :   }
     126             :   return false;
     127             : }
     128             : 
     129     3271878 : void IncrementalMarking::MarkBlackAndVisitObjectDueToLayoutChange(
     130     3271878 :     HeapObject obj) {
     131     6543756 :   TRACE_EVENT0("v8", "V8.GCIncrementalMarkingLayoutChange");
     132    13087517 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_LAYOUT_CHANGE);
     133             :   marking_state()->WhiteToGrey(obj);
     134     3271881 :   if (marking_state()->GreyToBlack(obj)) {
     135     1958543 :     RevisitObject(obj);
     136     3271880 :   }
     137     3271880 : }
     138             : 
     139       40594 : void IncrementalMarking::NotifyLeftTrimming(HeapObject from, HeapObject to) {
     140             :   DCHECK(IsMarking());
     141             :   DCHECK(MemoryChunk::FromHeapObject(from)->SweepingDone());
     142             :   DCHECK_EQ(MemoryChunk::FromHeapObject(from), MemoryChunk::FromHeapObject(to));
     143             :   DCHECK_NE(from, to);
     144             : 
     145             :   MarkBit new_mark_bit = marking_state()->MarkBitFrom(to);
     146             : 
     147       40594 :   if (black_allocation() && Marking::IsBlack<kAtomicity>(new_mark_bit)) {
     148             :     // Nothing to do if the object is in black area.
     149       20297 :     return;
     150             :   }
     151       20142 :   MarkBlackAndVisitObjectDueToLayoutChange(from);
     152             :   DCHECK(marking_state()->IsBlack(from));
     153             :   // Mark the new address as black.
     154       40284 :   if (from->address() + kTaggedSize == to->address()) {
     155             :     // The old and the new markbits overlap. The |to| object has the
     156             :     // grey color. To make it black, we need to set the second bit.
     157             :     DCHECK(new_mark_bit.Get<kAtomicity>());
     158             :     new_mark_bit.Next().Set<kAtomicity>();
     159             :   } else {
     160             :     bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit);
     161             :     DCHECK(success);
     162             :     USE(success);
     163             :   }
     164             :   DCHECK(marking_state()->IsBlack(to));
     165             : }
     166             : 
     167           0 : class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
     168             :  public:
     169             :   explicit IncrementalMarkingRootMarkingVisitor(
     170       45390 :       IncrementalMarking* incremental_marking)
     171       90780 :       : heap_(incremental_marking->heap()) {}
     172             : 
     173   140608113 :   void VisitRootPointer(Root root, const char* description,
     174             :                         FullObjectSlot p) override {
     175   140608113 :     MarkObjectByPointer(p);
     176   140608126 :   }
     177             : 
     178     1405277 :   void VisitRootPointers(Root root, const char* description,
     179             :                          FullObjectSlot start, FullObjectSlot end) override {
     180    28977703 :     for (FullObjectSlot p = start; p < end; ++p) MarkObjectByPointer(p);
     181     1405277 :   }
     182             : 
     183             :  private:
     184   166774968 :   void MarkObjectByPointer(FullObjectSlot p) {
     185   166774968 :     Object obj = *p;
     186   171730333 :     if (!obj->IsHeapObject()) return;
     187             : 
     188   323638578 :     heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj));
     189             :   }
     190             : 
     191             :   Heap* heap_;
     192             : };
     193             : 
     194           0 : void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
     195             :     PagedSpace* space) {
     196      209243 :   for (Page* p : *space) {
     197      145307 :     p->SetOldGenerationPageFlags(false);
     198             :   }
     199           0 : }
     200             : 
     201             : 
     202           0 : void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
     203             :     NewSpace* space) {
     204       90187 :   for (Page* p : *space) {
     205       68875 :     p->SetYoungGenerationPageFlags(false);
     206             :   }
     207           0 : }
     208             : 
     209             : 
     210       21312 : void IncrementalMarking::DeactivateIncrementalWriteBarrier() {
     211      149184 :   DeactivateIncrementalWriteBarrierForSpace(heap_->old_space());
     212       21312 :   DeactivateIncrementalWriteBarrierForSpace(heap_->map_space());
     213       21312 :   DeactivateIncrementalWriteBarrierForSpace(heap_->code_space());
     214       21312 :   DeactivateIncrementalWriteBarrierForSpace(heap_->new_space());
     215             : 
     216       42624 :   for (LargePage* p : *heap_->new_lo_space()) {
     217           0 :     p->SetYoungGenerationPageFlags(false);
     218             :     DCHECK(p->IsLargePage());
     219             :   }
     220             : 
     221       52305 :   for (LargePage* p : *heap_->lo_space()) {
     222        9681 :     p->SetOldGenerationPageFlags(false);
     223             :   }
     224             : 
     225       60981 :   for (LargePage* p : *heap_->code_lo_space()) {
     226       18357 :     p->SetOldGenerationPageFlags(false);
     227             :   }
     228       21312 : }
     229             : 
     230             : 
     231           0 : void IncrementalMarking::ActivateIncrementalWriteBarrier(PagedSpace* space) {
     232      224548 :   for (Page* p : *space) {
     233      144712 :     p->SetOldGenerationPageFlags(true);
     234             :   }
     235           0 : }
     236             : 
     237             : 
     238           0 : void IncrementalMarking::ActivateIncrementalWriteBarrier(NewSpace* space) {
     239      105438 :   for (Page* p : *space) {
     240       78826 :     p->SetYoungGenerationPageFlags(true);
     241             :   }
     242           0 : }
     243             : 
     244             : 
     245       26612 : void IncrementalMarking::ActivateIncrementalWriteBarrier() {
     246      186284 :   ActivateIncrementalWriteBarrier(heap_->old_space());
     247       26612 :   ActivateIncrementalWriteBarrier(heap_->map_space());
     248       26612 :   ActivateIncrementalWriteBarrier(heap_->code_space());
     249       26612 :   ActivateIncrementalWriteBarrier(heap_->new_space());
     250             : 
     251       53224 :   for (LargePage* p : *heap_->new_lo_space()) {
     252           0 :     p->SetYoungGenerationPageFlags(true);
     253             :     DCHECK(p->IsLargePage());
     254             :   }
     255             : 
     256       60925 :   for (LargePage* p : *heap_->lo_space()) {
     257        7701 :     p->SetOldGenerationPageFlags(true);
     258             :   }
     259             : 
     260       72803 :   for (LargePage* p : *heap_->code_lo_space()) {
     261       19579 :     p->SetOldGenerationPageFlags(true);
     262             :   }
     263       26612 : }
     264             : 
     265             : 
     266       74505 : bool IncrementalMarking::WasActivated() { return was_activated_; }
     267             : 
     268             : 
     269     1324399 : bool IncrementalMarking::CanBeActivated() {
     270             :   // Only start incremental marking in a safe state: 1) when incremental
     271             :   // marking is turned on, 2) when we are currently not in a GC, and
     272             :   // 3) when we are currently not serializing or deserializing the heap.
     273     1310335 :   return FLAG_incremental_marking && heap_->gc_state() == Heap::NOT_IN_GC &&
     274     2147112 :          heap_->deserialization_complete() &&
     275     2147112 :          !heap_->isolate()->serializer_enabled();
     276             : }
     277             : 
     278             : 
     279       21312 : void IncrementalMarking::Deactivate() {
     280       21312 :   DeactivateIncrementalWriteBarrier();
     281       21312 : }
     282             : 
     283      108446 : void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
     284       30410 :   if (FLAG_trace_incremental_marking) {
     285             :     int old_generation_size_mb =
     286           5 :         static_cast<int>(heap()->OldGenerationSizeOfObjects() / MB);
     287             :     int old_generation_limit_mb =
     288           5 :         static_cast<int>(heap()->old_generation_allocation_limit() / MB);
     289             :     heap()->isolate()->PrintWithTimestamp(
     290             :         "[IncrementalMarking] Start (%s): old generation %dMB, limit %dMB, "
     291             :         "slack %dMB\n",
     292             :         Heap::GarbageCollectionReasonToString(gc_reason),
     293             :         old_generation_size_mb, old_generation_limit_mb,
     294          15 :         Max(0, old_generation_limit_mb - old_generation_size_mb));
     295             :   }
     296             :   DCHECK(FLAG_incremental_marking);
     297             :   DCHECK(state_ == STOPPED);
     298             :   DCHECK(heap_->gc_state() == Heap::NOT_IN_GC);
     299             :   DCHECK(!heap_->isolate()->serializer_enabled());
     300             : 
     301       30410 :   Counters* counters = heap_->isolate()->counters();
     302             : 
     303             :   counters->incremental_marking_reason()->AddSample(
     304       30410 :       static_cast<int>(gc_reason));
     305             :   HistogramTimerScope incremental_marking_scope(
     306       30410 :       counters->gc_incremental_marking_start());
     307       91230 :   TRACE_EVENT0("v8", "V8.GCIncrementalMarkingStart");
     308      121640 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START);
     309       60820 :   heap_->tracer()->NotifyIncrementalMarkingStart();
     310             : 
     311       30410 :   start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
     312       30410 :   initial_old_generation_size_ = heap_->OldGenerationSizeOfObjects();
     313       60820 :   old_generation_allocation_counter_ = heap_->OldGenerationAllocationCounter();
     314       30410 :   bytes_marked_ = 0;
     315       30410 :   scheduled_bytes_to_mark_ = 0;
     316       30410 :   schedule_update_time_ms_ = start_time_ms_;
     317       30410 :   bytes_marked_concurrently_ = 0;
     318       30410 :   should_hurry_ = false;
     319       30410 :   was_activated_ = true;
     320             : 
     321       91230 :   if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
     322       13209 :     StartMarking();
     323             :   } else {
     324       17201 :     if (FLAG_trace_incremental_marking) {
     325             :       heap()->isolate()->PrintWithTimestamp(
     326           0 :           "[IncrementalMarking] Start sweeping.\n");
     327             :     }
     328             :     SetState(SWEEPING);
     329             :   }
     330             : 
     331             :   heap_->AddAllocationObserversToAllSpaces(&old_generation_observer_,
     332       30410 :                                            &new_generation_observer_);
     333       30410 :   incremental_marking_job()->Start(heap_);
     334       30410 : }
     335             : 
     336             : 
     337       79841 : void IncrementalMarking::StartMarking() {
     338       79558 :   if (heap_->isolate()->serializer_enabled()) {
     339             :     // Black allocation currently starts when we start incremental marking,
     340             :     // but we cannot enable black allocation while deserializing. Hence, we
     341             :     // have to delay the start of incremental marking in that case.
     342           0 :     if (FLAG_trace_incremental_marking) {
     343             :       heap()->isolate()->PrintWithTimestamp(
     344           0 :           "[IncrementalMarking] Start delayed - serializer\n");
     345             :     }
     346           0 :     return;
     347             :   }
     348       26612 :   if (FLAG_trace_incremental_marking) {
     349             :     heap()->isolate()->PrintWithTimestamp(
     350           5 :         "[IncrementalMarking] Start marking\n");
     351             :   }
     352             : 
     353             :   is_compacting_ =
     354       53224 :       !FLAG_never_compact && heap_->mark_compact_collector()->StartCompaction();
     355             : 
     356             :   SetState(MARKING);
     357             : 
     358             :   {
     359      106448 :     TRACE_GC(heap()->tracer(),
     360             :              GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_PROLOGUE);
     361       79836 :     heap_->local_embedder_heap_tracer()->TracePrologue();
     362             :   }
     363             : 
     364       26612 :   ActivateIncrementalWriteBarrier();
     365             : 
     366             : // Marking bits are cleared by the sweeper.
     367             : #ifdef VERIFY_HEAP
     368             :   if (FLAG_verify_heap) {
     369             :     heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
     370             :   }
     371             : #endif
     372             : 
     373       53224 :   heap_->isolate()->compilation_cache()->MarkCompactPrologue();
     374             : 
     375       26612 :   StartBlackAllocation();
     376             : 
     377             :   // Mark strong roots grey.
     378             :   IncrementalMarkingRootMarkingVisitor visitor(this);
     379       26612 :   heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
     380             : 
     381       52946 :   if (FLAG_concurrent_marking && !heap_->IsTearingDown()) {
     382       52668 :     heap_->concurrent_marking()->ScheduleTasks();
     383             :   }
     384             : 
     385             :   // Ready to start incremental marking.
     386       26612 :   if (FLAG_trace_incremental_marking) {
     387           5 :     heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Running\n");
     388             :   }
     389             : }
     390             : 
     391      109609 : void IncrementalMarking::StartBlackAllocation() {
     392             :   DCHECK(!black_allocation_);
     393             :   DCHECK(IsMarking());
     394       27401 :   black_allocation_ = true;
     395       27401 :   heap()->old_space()->MarkLinearAllocationAreaBlack();
     396       27401 :   heap()->map_space()->MarkLinearAllocationAreaBlack();
     397       27401 :   heap()->code_space()->MarkLinearAllocationAreaBlack();
     398       27401 :   if (FLAG_trace_incremental_marking) {
     399             :     heap()->isolate()->PrintWithTimestamp(
     400           5 :         "[IncrementalMarking] Black allocation started\n");
     401             :   }
     402       27401 : }
     403             : 
     404        2367 : void IncrementalMarking::PauseBlackAllocation() {
     405             :   DCHECK(IsMarking());
     406         789 :   heap()->old_space()->UnmarkLinearAllocationArea();
     407         789 :   heap()->map_space()->UnmarkLinearAllocationArea();
     408         789 :   heap()->code_space()->UnmarkLinearAllocationArea();
     409         789 :   if (FLAG_trace_incremental_marking) {
     410             :     heap()->isolate()->PrintWithTimestamp(
     411           0 :         "[IncrementalMarking] Black allocation paused\n");
     412             :   }
     413         789 :   black_allocation_ = false;
     414         789 : }
     415             : 
     416       24869 : void IncrementalMarking::FinishBlackAllocation() {
     417       24864 :   if (black_allocation_) {
     418       21312 :     black_allocation_ = false;
     419       21312 :     if (FLAG_trace_incremental_marking) {
     420             :       heap()->isolate()->PrintWithTimestamp(
     421           5 :           "[IncrementalMarking] Black allocation finished\n");
     422             :     }
     423             :   }
     424       24864 : }
     425             : 
     426       71799 : void IncrementalMarking::EnsureBlackAllocated(Address allocated, size_t size) {
     427       71799 :   if (black_allocation() && allocated != kNullAddress) {
     428             :     HeapObject object = HeapObject::FromAddress(allocated);
     429      135995 :     if (marking_state()->IsWhite(object) && !Heap::InYoungGeneration(object)) {
     430        1008 :       if (heap_->IsLargeObject(object)) {
     431             :         marking_state()->WhiteToBlack(object);
     432             :       } else {
     433             :         Page::FromAddress(allocated)->CreateBlackArea(allocated,
     434        1222 :                                                       allocated + size);
     435             :       }
     436             :     }
     437             :   }
     438       71799 : }
     439             : 
     440       18778 : void IncrementalMarking::MarkRoots() {
     441             :   DCHECK(!finalize_marking_completed_);
     442             :   DCHECK(IsMarking());
     443             : 
     444             :   IncrementalMarkingRootMarkingVisitor visitor(this);
     445       18778 :   heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
     446       18778 : }
     447             : 
     448       17126 : bool IncrementalMarking::ShouldRetainMap(Map map, int age) {
     449       17126 :   if (age == 0) {
     450             :     // The map has aged. Do not retain this map.
     451             :     return false;
     452             :   }
     453       15910 :   Object constructor = map->GetConstructor();
     454       31820 :   if (!constructor->IsHeapObject() ||
     455             :       marking_state()->IsWhite(HeapObject::cast(constructor))) {
     456             :     // The constructor is dead, no new objects with this map can
     457             :     // be created. Do not retain this map.
     458             :     return false;
     459             :   }
     460        4385 :   return true;
     461             : }
     462             : 
     463             : 
     464       37556 : void IncrementalMarking::RetainMaps() {
     465             :   // Do not retain dead maps if flag disables it or there is
     466             :   // - memory pressure (reduce_memory_footprint_),
     467             :   // - GC is requested by tests or dev-tools (abort_incremental_marking_).
     468       56316 :   bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
     469       18760 :                                    FLAG_retain_maps_for_n_gc == 0;
     470       18778 :   WeakArrayList retained_maps = heap()->retained_maps();
     471             :   int length = retained_maps->length();
     472             :   // The number_of_disposed_maps separates maps in the retained_maps
     473             :   // array that were created before and after context disposal.
     474             :   // We do not age and retain disposed maps to avoid memory leaks.
     475       18778 :   int number_of_disposed_maps = heap()->number_of_disposed_maps_;
     476       65689 :   for (int i = 0; i < length; i += 2) {
     477       46911 :     MaybeObject value = retained_maps->Get(i);
     478       46911 :     HeapObject map_heap_object;
     479       46911 :     if (!value->GetHeapObjectIfWeak(&map_heap_object)) {
     480       15671 :       continue;
     481             :     }
     482       62480 :     int age = retained_maps->Get(i + 1).ToSmi().value();
     483             :     int new_age;
     484             :     Map map = Map::cast(map_heap_object);
     485       56314 :     if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
     486             :         marking_state()->IsWhite(map)) {
     487       17126 :       if (ShouldRetainMap(map, age)) {
     488        4385 :         WhiteToGreyAndPush(map);
     489             :       }
     490       17126 :       Object prototype = map->prototype();
     491       48946 :       if (age > 0 && prototype->IsHeapObject() &&
     492             :           marking_state()->IsWhite(HeapObject::cast(prototype))) {
     493             :         // The prototype is not marked, age the map.
     494       15530 :         new_age = age - 1;
     495             :       } else {
     496             :         // The prototype and the constructor are marked, this map keeps only
     497             :         // transition tree alive, not JSObjects. Do not age the map.
     498             :         new_age = age;
     499             :       }
     500             :     } else {
     501       14114 :       new_age = FLAG_retain_maps_for_n_gc;
     502             :     }
     503             :     // Compact the array and update the age.
     504       31240 :     if (new_age != age) {
     505       15642 :       retained_maps->Set(i + 1, MaybeObject::FromSmi(Smi::FromInt(new_age)));
     506             :     }
     507             :   }
     508       18778 : }
     509             : 
     510       18783 : void IncrementalMarking::FinalizeIncrementally() {
     511       75112 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE_BODY);
     512             :   DCHECK(!finalize_marking_completed_);
     513             :   DCHECK(IsMarking());
     514             : 
     515       18778 :   double start = heap_->MonotonicallyIncreasingTimeInMs();
     516             : 
     517             :   // After finishing incremental marking, we try to discover all unmarked
     518             :   // objects to reduce the marking load in the final pause.
     519             :   // 1) We scan and mark the roots again to find all changes to the root set.
     520             :   // 2) Age and retain maps embedded in optimized code.
     521       18778 :   MarkRoots();
     522             : 
     523             :   // Map retaining is needed for perfromance, not correctness,
     524             :   // so we can do it only once at the beginning of the finalization.
     525       18778 :   RetainMaps();
     526             : 
     527       18778 :   finalize_marking_completed_ = true;
     528             : 
     529       18778 :   if (FLAG_trace_incremental_marking) {
     530           5 :     double end = heap_->MonotonicallyIncreasingTimeInMs();
     531           5 :     double delta = end - start;
     532             :     heap()->isolate()->PrintWithTimestamp(
     533           5 :         "[IncrementalMarking] Finalize incrementally spent %.1f ms.\n", delta);
     534       18778 :   }
     535       18778 : }
     536             : 
     537       25068 : void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
     538       46980 :   if (!IsMarking()) return;
     539             : 
     540         789 :   Map filler_map = ReadOnlyRoots(heap_).one_pointer_filler_map();
     541             : 
     542             : #ifdef ENABLE_MINOR_MC
     543             :   MinorMarkCompactCollector::MarkingState* minor_marking_state =
     544         789 :       heap()->minor_mark_compact_collector()->marking_state();
     545             : #else
     546             :   void* minor_marking_state = nullptr;
     547             : #endif  // ENABLE_MINOR_MC
     548             : 
     549             :   marking_worklist()->Update([
     550             : #ifdef DEBUG
     551             :                                  // this is referred inside DCHECK.
     552             :                                  this,
     553             : #endif
     554             :                                  filler_map, minor_marking_state](
     555     1138050 :                                  HeapObject obj, HeapObject* out) -> bool {
     556             :     DCHECK(obj->IsHeapObject());
     557             :     // Only pointers to from space have to be updated.
     558     1138050 :     if (Heap::InFromPage(obj)) {
     559             :       MapWord map_word = obj->map_word();
     560      394998 :       if (!map_word.IsForwardingAddress()) {
     561             :         // There may be objects on the marking deque that do not exist anymore,
     562             :         // e.g. left trimmed objects or objects from the root set (frames).
     563             :         // If these object are dead at scavenging time, their marking deque
     564             :         // entries will not point to forwarding addresses. Hence, we can discard
     565             :         // them.
     566             :         return false;
     567             :       }
     568             :       HeapObject dest = map_word.ToForwardingAddress();
     569             :       DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
     570      307885 :       *out = dest;
     571      307885 :       return true;
     572      743052 :     } else if (Heap::InToPage(obj)) {
     573             :       // The object may be on a large page or on a page that was moved in new
     574             :       // space.
     575             :       DCHECK(Heap::IsLargeObject(obj) ||
     576             :              Page::FromHeapObject(obj)->IsFlagSet(Page::SWEEP_TO_ITERATE));
     577             : #ifdef ENABLE_MINOR_MC
     578           0 :       if (minor_marking_state->IsWhite(obj)) {
     579             :         return false;
     580             :       }
     581             : #endif  // ENABLE_MINOR_MC
     582             :       // Either a large object or an object marked by the minor mark-compactor.
     583           0 :       *out = obj;
     584           0 :       return true;
     585             :     } else {
     586             :       // The object may be on a page that was moved from new to old space. Only
     587             :       // applicable during minor MC garbage collections.
     588      743052 :       if (Page::FromHeapObject(obj)->IsFlagSet(Page::SWEEP_TO_ITERATE)) {
     589             : #ifdef ENABLE_MINOR_MC
     590           0 :         if (minor_marking_state->IsWhite(obj)) {
     591             :           return false;
     592             :         }
     593             : #endif  // ENABLE_MINOR_MC
     594           0 :         *out = obj;
     595           0 :         return true;
     596             :       }
     597             :       DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
     598             :       // Skip one word filler objects that appear on the
     599             :       // stack when we perform in place array shift.
     600      743052 :       if (obj->map() != filler_map) {
     601      743052 :         *out = obj;
     602      743052 :         return true;
     603             :       }
     604             :       return false;
     605             :     }
     606        1578 :   });
     607             : 
     608         789 :   UpdateWeakReferencesAfterScavenge();
     609             : }
     610             : 
     611             : namespace {
     612             : template <typename T>
     613      254932 : T ForwardingAddress(T heap_obj) {
     614             :   MapWord map_word = heap_obj->map_word();
     615             : 
     616      254932 :   if (map_word.IsForwardingAddress()) {
     617             :     return T::cast(map_word.ToForwardingAddress());
     618      203162 :   } else if (Heap::InFromPage(heap_obj)) {
     619        4608 :     return T();
     620             :   } else {
     621             :     // TODO(ulan): Support minor mark-compactor here.
     622      198554 :     return heap_obj;
     623             :   }
     624             : }
     625             : }  // namespace
     626             : 
     627         789 : void IncrementalMarking::UpdateWeakReferencesAfterScavenge() {
     628             :   weak_objects_->weak_references.Update(
     629             :       [](std::pair<HeapObject, HeapObjectSlot> slot_in,
     630      238732 :          std::pair<HeapObject, HeapObjectSlot>* slot_out) -> bool {
     631      238732 :         HeapObject heap_obj = slot_in.first;
     632      238732 :         HeapObject forwarded = ForwardingAddress(heap_obj);
     633             : 
     634      238732 :         if (!forwarded.is_null()) {
     635             :           ptrdiff_t distance_to_slot =
     636      234124 :               slot_in.second.address() - slot_in.first.ptr();
     637      234124 :           Address new_slot = forwarded.ptr() + distance_to_slot;
     638      234124 :           slot_out->first = forwarded;
     639      234124 :           slot_out->second = HeapObjectSlot(new_slot);
     640             :           return true;
     641             :         }
     642             : 
     643             :         return false;
     644         789 :       });
     645             :   weak_objects_->weak_objects_in_code.Update(
     646             :       [](std::pair<HeapObject, Code> slot_in,
     647        1462 :          std::pair<HeapObject, Code>* slot_out) -> bool {
     648        1462 :         HeapObject heap_obj = slot_in.first;
     649        1462 :         HeapObject forwarded = ForwardingAddress(heap_obj);
     650             : 
     651        1462 :         if (!forwarded.is_null()) {
     652        1462 :           slot_out->first = forwarded;
     653        1462 :           slot_out->second = slot_in.second;
     654             :           return true;
     655             :         }
     656             : 
     657             :         return false;
     658         789 :       });
     659             :   weak_objects_->ephemeron_hash_tables.Update(
     660             :       [](EphemeronHashTable slot_in, EphemeronHashTable* slot_out) -> bool {
     661       14658 :         EphemeronHashTable forwarded = ForwardingAddress(slot_in);
     662             : 
     663       14658 :         if (!forwarded.is_null()) {
     664       14658 :           *slot_out = forwarded;
     665             :           return true;
     666             :         }
     667             : 
     668             :         return false;
     669         789 :       });
     670             : 
     671           0 :   auto ephemeron_updater = [](Ephemeron slot_in, Ephemeron* slot_out) -> bool {
     672           0 :     HeapObject key = slot_in.key;
     673           0 :     HeapObject value = slot_in.value;
     674           0 :     HeapObject forwarded_key = ForwardingAddress(key);
     675           0 :     HeapObject forwarded_value = ForwardingAddress(value);
     676             : 
     677           0 :     if (!forwarded_key.is_null() && !forwarded_value.is_null()) {
     678           0 :       *slot_out = Ephemeron{forwarded_key, forwarded_value};
     679             :       return true;
     680             :     }
     681             : 
     682             :     return false;
     683             :   };
     684             : 
     685         789 :   weak_objects_->current_ephemerons.Update(ephemeron_updater);
     686         789 :   weak_objects_->next_ephemerons.Update(ephemeron_updater);
     687         789 :   weak_objects_->discovered_ephemerons.Update(ephemeron_updater);
     688             : 
     689             :   weak_objects_->flushed_js_functions.Update(
     690             :       [](JSFunction slot_in, JSFunction* slot_out) -> bool {
     691          80 :         JSFunction forwarded = ForwardingAddress(slot_in);
     692             : 
     693          80 :         if (!forwarded.is_null()) {
     694          80 :           *slot_out = forwarded;
     695             :           return true;
     696             :         }
     697             : 
     698             :         return false;
     699         789 :       });
     700             : #ifdef DEBUG
     701             :   weak_objects_->bytecode_flushing_candidates.Iterate(
     702             :       [](SharedFunctionInfo candidate) {
     703             :         DCHECK(!Heap::InYoungGeneration(candidate));
     704             :       });
     705             : #endif
     706         789 : }
     707             : 
     708       23490 : void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
     709             :     size_t dead_bytes_in_new_space) {
     710       46980 :   if (!IsMarking()) return;
     711        1578 :   bytes_marked_ -= Min(bytes_marked_, dead_bytes_in_new_space);
     712             : }
     713             : 
     714             : bool IncrementalMarking::IsFixedArrayWithProgressBar(HeapObject obj) {
     715             :   if (!obj->IsFixedArray()) return false;
     716             :   MemoryChunk* chunk = MemoryChunk::FromHeapObject(obj);
     717             :   return chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR);
     718             : }
     719             : 
     720    51747139 : int IncrementalMarking::VisitObject(Map map, HeapObject obj) {
     721             :   DCHECK(marking_state()->IsGrey(obj) || marking_state()->IsBlack(obj));
     722             :   if (!marking_state()->GreyToBlack(obj)) {
     723             :     // The object can already be black in these cases:
     724             :     // 1. The object is a fixed array with the progress bar.
     725             :     // 2. The object is a JSObject that was colored black before
     726             :     //    unsafe layout change.
     727             :     // 3. The object is a string that was colored black before
     728             :     //    unsafe layout change.
     729             :     // 4. The object is materizalized by the deoptimizer.
     730             :     // 5. The object is a descriptor array marked black by
     731             :     //    the descriptor array marking barrier.
     732             :     DCHECK(obj->IsHashTable() || obj->IsPropertyArray() ||
     733             :            obj->IsFixedArray() || obj->IsContext() || obj->IsJSObject() ||
     734             :            obj->IsString() || obj->IsDescriptorArray());
     735             :   }
     736             :   DCHECK(marking_state()->IsBlack(obj));
     737    51747143 :   WhiteToGreyAndPush(map);
     738             :   IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
     739   103494278 :                                            marking_state());
     740    51747149 :   return visitor.Visit(map, obj);
     741             : }
     742             : 
     743    18650751 : void IncrementalMarking::ProcessBlackAllocatedObject(HeapObject obj) {
     744    37301476 :   if (IsMarking() && marking_state()->IsBlack(obj)) {
     745    18650726 :     RevisitObject(obj);
     746             :   }
     747    18650748 : }
     748             : 
     749    41219601 : void IncrementalMarking::RevisitObject(HeapObject obj) {
     750             :   DCHECK(IsMarking());
     751             :   DCHECK(marking_state()->IsBlack(obj));
     752             :   Page* page = Page::FromHeapObject(obj);
     753    20609802 :   if (page->owner()->identity() == LO_SPACE) {
     754          20 :     page->ResetProgressBar();
     755             :   }
     756             :   Map map = obj->map();
     757    20609802 :   WhiteToGreyAndPush(map);
     758             :   IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
     759    20609799 :                                            marking_state());
     760             :   visitor.Visit(map, obj);
     761    20609804 : }
     762             : 
     763     3462421 : void IncrementalMarking::VisitDescriptors(HeapObject host,
     764             :                                           DescriptorArray descriptors,
     765     3462421 :                                           int number_of_own_descriptors) {
     766             :   IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
     767     3462421 :                                            marking_state());
     768             :   // This is necessary because the Scavenger records slots only for the
     769             :   // promoted black objects and the marking visitor of DescriptorArray skips
     770             :   // the descriptors marked by the visitor.VisitDescriptors() below.
     771             :   visitor.MarkDescriptorArrayBlack(host, descriptors);
     772             :   visitor.VisitDescriptors(descriptors, number_of_own_descriptors);
     773     3462421 : }
     774             : 
     775             : intptr_t IncrementalMarking::ProcessMarkingWorklist(
     776    52776439 :     intptr_t bytes_to_process, ForceCompletionAction completion) {
     777             :   intptr_t bytes_processed = 0;
     778    48731215 :   while (bytes_processed < bytes_to_process || completion == FORCE_COMPLETION) {
     779    52776439 :     HeapObject obj = marking_worklist()->Pop();
     780    52776438 :     if (obj.is_null()) break;
     781             :     // Left trimming may result in white, grey, or black filler objects on the
     782             :     // marking deque. Ignore these objects.
     783   103494355 :     if (obj->IsFiller()) {
     784             :       DCHECK(!marking_state()->IsImpossible(obj));
     785          33 :       continue;
     786             :     }
     787    51747145 :     unscanned_bytes_of_large_object_ = 0;
     788    51747145 :     int size = VisitObject(obj->map(), obj);
     789    47657838 :     bytes_processed += size - unscanned_bytes_of_large_object_;
     790             :   }
     791             :   return bytes_processed;
     792             : }
     793             : 
     794      998834 : StepResult IncrementalMarking::EmbedderStep(double duration_ms) {
     795      998834 :   if (!ShouldDoEmbedderStep()) return StepResult::kDone;
     796             : 
     797             :   constexpr size_t kObjectsToProcessBeforeInterrupt = 500;
     798             : 
     799           0 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_TRACING);
     800           0 :   double deadline = heap_->MonotonicallyIncreasingTimeInMs() + duration_ms;
     801             :   bool empty_worklist;
     802           0 :   do {
     803             :     {
     804             :       LocalEmbedderHeapTracer::ProcessingScope scope(
     805           0 :           heap_->local_embedder_heap_tracer());
     806           0 :       HeapObject object;
     807             :       size_t cnt = 0;
     808             :       empty_worklist = true;
     809           0 :       while (marking_worklist()->embedder()->Pop(0, &object)) {
     810           0 :         scope.TracePossibleWrapper(JSObject::cast(object));
     811           0 :         if (++cnt == kObjectsToProcessBeforeInterrupt) {
     812             :           cnt = 0;
     813             :           empty_worklist = false;
     814             :           break;
     815             :         }
     816           0 :       }
     817             :     }
     818           0 :     heap_->local_embedder_heap_tracer()->Trace(deadline);
     819           0 :   } while (!empty_worklist &&
     820           0 :            (heap_->MonotonicallyIncreasingTimeInMs() < deadline));
     821           0 :   heap_->local_embedder_heap_tracer()->SetEmbedderWorklistEmpty(empty_worklist);
     822           0 :   return empty_worklist ? StepResult::kDone : StepResult::kMoreWorkRemaining;
     823             : }
     824             : 
     825       28479 : void IncrementalMarking::Hurry() {
     826             :   // A scavenge may have pushed new objects on the marking deque (due to black
     827             :   // allocation) even in COMPLETE state. This may happen if scavenges are
     828             :   // forced e.g. in tests. It should not happen when COMPLETE was set when
     829             :   // incremental marking finished and a regular GC was triggered after that
     830             :   // because should_hurry_ will force a full GC.
     831       21312 :   if (!marking_worklist()->IsEmpty()) {
     832             :     double start = 0.0;
     833        7167 :     if (FLAG_trace_incremental_marking) {
     834           0 :       start = heap_->MonotonicallyIncreasingTimeInMs();
     835           0 :       if (FLAG_trace_incremental_marking) {
     836           0 :         heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Hurry\n");
     837             :       }
     838             :     }
     839             :     // TODO(gc) hurry can mark objects it encounters black as mutator
     840             :     // was stopped.
     841             :     ProcessMarkingWorklist(0, FORCE_COMPLETION);
     842             :     SetState(COMPLETE);
     843        7167 :     if (FLAG_trace_incremental_marking) {
     844           0 :       double end = heap_->MonotonicallyIncreasingTimeInMs();
     845           0 :       double delta = end - start;
     846           0 :       if (FLAG_trace_incremental_marking) {
     847             :         heap()->isolate()->PrintWithTimestamp(
     848             :             "[IncrementalMarking] Complete (hurry), spent %d ms.\n",
     849           0 :             static_cast<int>(delta));
     850             :       }
     851             :     }
     852             :   }
     853       21312 : }
     854             : 
     855             : 
     856       49762 : void IncrementalMarking::Stop() {
     857       24912 :   if (IsStopped()) return;
     858       24864 :   if (FLAG_trace_incremental_marking) {
     859             :     int old_generation_size_mb =
     860           5 :         static_cast<int>(heap()->OldGenerationSizeOfObjects() / MB);
     861             :     int old_generation_limit_mb =
     862           5 :         static_cast<int>(heap()->old_generation_allocation_limit() / MB);
     863             :     heap()->isolate()->PrintWithTimestamp(
     864             :         "[IncrementalMarking] Stopping: old generation %dMB, limit %dMB, "
     865             :         "overshoot %dMB\n",
     866             :         old_generation_size_mb, old_generation_limit_mb,
     867          10 :         Max(0, old_generation_size_mb - old_generation_limit_mb));
     868             :   }
     869             : 
     870      223776 :   SpaceIterator it(heap_);
     871      248640 :   while (it.has_next()) {
     872      198912 :     Space* space = it.next();
     873      397824 :     if (space == heap_->new_space()) {
     874       24864 :       space->RemoveAllocationObserver(&new_generation_observer_);
     875             :     } else {
     876      174048 :       space->RemoveAllocationObserver(&old_generation_observer_);
     877             :     }
     878             :   }
     879             : 
     880             :   IncrementalMarking::set_should_hurry(false);
     881       49728 :   heap_->isolate()->stack_guard()->ClearGC();
     882             :   SetState(STOPPED);
     883       24864 :   is_compacting_ = false;
     884       24864 :   FinishBlackAllocation();
     885             : }
     886             : 
     887             : 
     888       21312 : void IncrementalMarking::Finalize() {
     889       21312 :   Hurry();
     890       21312 :   Stop();
     891       21312 : }
     892             : 
     893             : 
     894     1001277 : void IncrementalMarking::FinalizeMarking(CompletionAction action) {
     895             :   DCHECK(!finalize_marking_completed_);
     896     1001272 :   if (FLAG_trace_incremental_marking) {
     897             :     heap()->isolate()->PrintWithTimestamp(
     898             :         "[IncrementalMarking] requesting finalization of incremental "
     899           5 :         "marking.\n");
     900             :   }
     901     1001272 :   request_type_ = FINALIZATION;
     902     1001272 :   if (action == GC_VIA_STACK_GUARD) {
     903     1986694 :     heap_->isolate()->stack_guard()->RequestGC();
     904             :   }
     905     1001272 : }
     906             : 
     907             : 
     908       22149 : void IncrementalMarking::MarkingComplete(CompletionAction action) {
     909             :   SetState(COMPLETE);
     910             :   // We will set the stack guard to request a GC now.  This will mean the rest
     911             :   // of the GC gets performed as soon as possible (we can't do a GC here in a
     912             :   // record-write context).  If a few things get allocated between now and then
     913             :   // that shouldn't make us do a scavenge and keep being incremental, so we set
     914             :   // the should-hurry flag to indicate that there can't be much work left to do.
     915             :   set_should_hurry(true);
     916       22144 :   if (FLAG_trace_incremental_marking) {
     917             :     heap()->isolate()->PrintWithTimestamp(
     918           5 :         "[IncrementalMarking] Complete (normal).\n");
     919             :   }
     920       22144 :   request_type_ = COMPLETE_MARKING;
     921       22144 :   if (action == GC_VIA_STACK_GUARD) {
     922       20510 :     heap_->isolate()->stack_guard()->RequestGC();
     923             :   }
     924       22144 : }
     925             : 
     926             : 
     927       74510 : void IncrementalMarking::Epilogue() {
     928       74510 :   was_activated_ = false;
     929       74510 :   finalize_marking_completed_ = false;
     930       74510 : }
     931             : 
     932           0 : bool IncrementalMarking::ShouldDoEmbedderStep() {
     933     1985737 :   return state_ == MARKING && FLAG_incremental_marking_wrappers &&
     934     1973806 :          heap_->local_embedder_heap_tracer()->InUse();
     935             : }
     936             : 
     937     1493999 : void IncrementalMarking::FastForwardSchedule() {
     938     1493999 :   if (scheduled_bytes_to_mark_ < bytes_marked_) {
     939      426073 :     scheduled_bytes_to_mark_ = bytes_marked_;
     940      426073 :     if (FLAG_trace_incremental_marking) {
     941             :       heap_->isolate()->PrintWithTimestamp(
     942          10 :           "[IncrementalMarking] Fast-forwarded schedule\n");
     943             :     }
     944             :   }
     945     1493999 : }
     946             : 
     947           0 : void IncrementalMarking::FastForwardScheduleIfCloseToFinalization() {
     948             :   // Consider marking close to finalization if 75% of the initial old
     949             :   // generation was marked.
     950      998823 :   if (bytes_marked_ > 3 * (initial_old_generation_size_ / 4)) {
     951      492727 :     FastForwardSchedule();
     952             :   }
     953           0 : }
     954             : 
     955      998823 : void IncrementalMarking::ScheduleBytesToMarkBasedOnTime(double time_ms) {
     956             :   // Time interval that should be sufficient to complete incremental marking.
     957             :   constexpr double kTargetMarkingWallTimeInMs = 500;
     958             :   constexpr double kMinTimeBetweenScheduleInMs = 10;
     959     1997646 :   if (schedule_update_time_ms_ + kMinTimeBetweenScheduleInMs > time_ms) return;
     960             :   double delta_ms =
     961        5775 :       Min(time_ms - schedule_update_time_ms_, kTargetMarkingWallTimeInMs);
     962        5775 :   schedule_update_time_ms_ = time_ms;
     963             : 
     964             :   size_t bytes_to_mark =
     965        5775 :       (delta_ms / kTargetMarkingWallTimeInMs) * initial_old_generation_size_;
     966             :   AddScheduledBytesToMark(bytes_to_mark);
     967             : 
     968        5775 :   if (FLAG_trace_incremental_marking) {
     969             :     heap_->isolate()->PrintWithTimestamp(
     970             :         "[IncrementalMarking] Scheduled %" PRIuS
     971             :         "KB to mark based on time delta %.1fms\n",
     972           0 :         bytes_to_mark / KB, delta_ms);
     973             :   }
     974             : }
     975             : 
     976             : namespace {
     977             : StepResult CombineStepResults(StepResult a, StepResult b) {
     978      998834 :   if (a == StepResult::kDone && b == StepResult::kDone)
     979             :     return StepResult::kDone;
     980             :   return StepResult::kMoreWorkRemaining;
     981             : }
     982             : }  // anonymous namespace
     983             : 
     984      998823 : StepResult IncrementalMarking::AdvanceWithDeadline(
     985             :     double deadline_in_ms, CompletionAction completion_action,
     986     2965670 :     StepOrigin step_origin) {
     987             :   HistogramTimerScope incremental_marking_scope(
     988     1997646 :       heap_->isolate()->counters()->gc_incremental_marking());
     989     2996469 :   TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
     990     4994115 :   TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
     991             :   DCHECK(!IsStopped());
     992             : 
     993      998823 :   ScheduleBytesToMarkBasedOnTime(heap()->MonotonicallyIncreasingTimeInMs());
     994             :   FastForwardScheduleIfCloseToFinalization();
     995             : 
     996             :   double remaining_time_in_ms = 0.0;
     997             :   StepResult result;
     998      998834 :   do {
     999      998834 :     StepResult embedder_result = EmbedderStep(kStepSizeInMs / 2);
    1000             :     StepResult v8_result =
    1001      998834 :         V8Step(kStepSizeInMs / 2, completion_action, step_origin);
    1002             :     result = CombineStepResults(v8_result, embedder_result);
    1003             :     remaining_time_in_ms =
    1004      998834 :         deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
    1005     1936256 :   } while (remaining_time_in_ms > kStepSizeInMs && !IsComplete() &&
    1006     1967770 :            !marking_worklist()->IsEmpty() &&
    1007             :            result == StepResult::kMoreWorkRemaining);
    1008      998823 :   return result;
    1009             : }
    1010             : 
    1011       20900 : void IncrementalMarking::FinalizeSweeping() {
    1012             :   DCHECK(state_ == SWEEPING);
    1013       96362 :   if (heap_->mark_compact_collector()->sweeping_in_progress() &&
    1014       25476 :       (!FLAG_concurrent_sweeping ||
    1015       12714 :        !heap_->mark_compact_collector()->sweeper()->AreSweeperTasksRunning())) {
    1016       10530 :     heap_->mark_compact_collector()->EnsureSweepingCompleted();
    1017             :   }
    1018       62700 :   if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
    1019             : #ifdef DEBUG
    1020             :     heap_->VerifyCountersAfterSweeping();
    1021             : #endif
    1022       13403 :     StartMarking();
    1023             :   }
    1024       20900 : }
    1025             : 
    1026       71112 : size_t IncrementalMarking::StepSizeToKeepUpWithAllocations() {
    1027             :   // Update bytes_allocated_ based on the allocation counter.
    1028       71112 :   size_t current_counter = heap_->OldGenerationAllocationCounter();
    1029       71112 :   size_t result = current_counter - old_generation_allocation_counter_;
    1030       71112 :   old_generation_allocation_counter_ = current_counter;
    1031       71112 :   return result;
    1032             : }
    1033             : 
    1034      143421 : size_t IncrementalMarking::StepSizeToMakeProgress() {
    1035             :   const size_t kTargetStepCount = 256;
    1036             :   const size_t kTargetStepCountAtOOM = 32;
    1037             :   const size_t kMaxStepSizeInByte = 256 * KB;
    1038      142224 :   size_t oom_slack = heap()->new_space()->Capacity() + 64 * MB;
    1039             : 
    1040       71112 :   if (!heap()->CanExpandOldGeneration(oom_slack)) {
    1041        1197 :     return heap()->OldGenerationSizeOfObjects() / kTargetStepCountAtOOM;
    1042             :   }
    1043             : 
    1044             :   return Min(Max(initial_old_generation_size_ / kTargetStepCount,
    1045             :                  IncrementalMarking::kMinStepSizeInBytes),
    1046      139830 :              kMaxStepSizeInByte);
    1047             : }
    1048             : 
    1049           0 : void IncrementalMarking::AddScheduledBytesToMark(size_t bytes_to_mark) {
    1050       76887 :   if (scheduled_bytes_to_mark_ + bytes_to_mark < scheduled_bytes_to_mark_) {
    1051             :     // The overflow case.
    1052           0 :     scheduled_bytes_to_mark_ = std::numeric_limits<std::size_t>::max();
    1053             :   } else {
    1054       76887 :     scheduled_bytes_to_mark_ += bytes_to_mark;
    1055             :   }
    1056           0 : }
    1057             : 
    1058       71112 : void IncrementalMarking::ScheduleBytesToMarkBasedOnAllocation() {
    1059       71112 :   size_t progress_bytes = StepSizeToMakeProgress();
    1060       71112 :   size_t allocation_bytes = StepSizeToKeepUpWithAllocations();
    1061       71112 :   size_t bytes_to_mark = progress_bytes + allocation_bytes;
    1062             :   AddScheduledBytesToMark(bytes_to_mark);
    1063             : 
    1064       71112 :   if (FLAG_trace_incremental_marking) {
    1065             :     heap_->isolate()->PrintWithTimestamp(
    1066             :         "[IncrementalMarking] Scheduled %" PRIuS
    1067             :         "KB to mark based on allocation (progress="
    1068             :         "%" PRIuS "KB, allocation=%" PRIuS "KB)\n",
    1069           0 :         bytes_to_mark / KB, progress_bytes / KB, allocation_bytes / KB);
    1070             :   }
    1071       71112 : }
    1072             : 
    1073     2135411 : void IncrementalMarking::FetchBytesMarkedConcurrently() {
    1074     1073344 :   if (FLAG_concurrent_marking) {
    1075             :     size_t current_bytes_marked_concurrently =
    1076     1062067 :         heap()->concurrent_marking()->TotalMarkedBytes();
    1077             :     // The concurrent_marking()->TotalMarkedBytes() is not monothonic for a
    1078             :     // short period of time when a concurrent marking task is finishing.
    1079     1062067 :     if (current_bytes_marked_concurrently > bytes_marked_concurrently_) {
    1080             :       bytes_marked_ +=
    1081       32762 :           current_bytes_marked_concurrently - bytes_marked_concurrently_;
    1082       32762 :       bytes_marked_concurrently_ = current_bytes_marked_concurrently;
    1083             :     }
    1084     1062067 :     if (FLAG_trace_incremental_marking) {
    1085             :       heap_->isolate()->PrintWithTimestamp(
    1086             :           "[IncrementalMarking] Marked %" PRIuS "KB on background threads\n",
    1087          45 :           heap_->concurrent_marking()->TotalMarkedBytes() / KB);
    1088             :     }
    1089             :   }
    1090     1073344 : }
    1091             : 
    1092     1073344 : size_t IncrementalMarking::ComputeStepSizeInBytes(StepOrigin step_origin) {
    1093     1073344 :   FetchBytesMarkedConcurrently();
    1094     1073344 :   if (FLAG_trace_incremental_marking) {
    1095          15 :     if (scheduled_bytes_to_mark_ > bytes_marked_) {
    1096             :       heap_->isolate()->PrintWithTimestamp(
    1097             :           "[IncrementalMarking] Marker is %" PRIuS "KB behind schedule\n",
    1098           0 :           (scheduled_bytes_to_mark_ - bytes_marked_) / KB);
    1099             :     } else {
    1100             :       heap_->isolate()->PrintWithTimestamp(
    1101             :           "[IncrementalMarking] Marker is %" PRIuS "KB ahead of schedule\n",
    1102          30 :           (bytes_marked_ - scheduled_bytes_to_mark_) / KB);
    1103             :     }
    1104             :   }
    1105             :   // Allow steps on allocation to get behind the schedule by small ammount.
    1106             :   // This gives higher priority to steps in tasks.
    1107     1073344 :   size_t kScheduleMarginInBytes = step_origin == StepOrigin::kV8 ? 1 * MB : 0;
    1108     1073344 :   if (bytes_marked_ + kScheduleMarginInBytes > scheduled_bytes_to_mark_)
    1109             :     return 0;
    1110       36033 :   return scheduled_bytes_to_mark_ - bytes_marked_ - kScheduleMarginInBytes;
    1111             : }
    1112             : 
    1113       71799 : void IncrementalMarking::AdvanceOnAllocation() {
    1114             :   // Code using an AlwaysAllocateScope assumes that the GC state does not
    1115             :   // change; that implies that no marking steps must be performed.
    1116      287196 :   if (heap_->gc_state() != Heap::NOT_IN_GC || !FLAG_incremental_marking ||
    1117      214739 :       (state_ != SWEEPING && state_ != MARKING) || heap_->always_allocate()) {
    1118         687 :     return;
    1119             :   }
    1120             :   HistogramTimerScope incremental_marking_scope(
    1121      142224 :       heap_->isolate()->counters()->gc_incremental_marking());
    1122      213336 :   TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
    1123      355560 :   TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
    1124       71112 :   ScheduleBytesToMarkBasedOnAllocation();
    1125       71112 :   V8Step(kMaxStepSizeInMs, GC_VIA_STACK_GUARD, StepOrigin::kV8);
    1126             : }
    1127             : 
    1128     1080875 : StepResult IncrementalMarking::V8Step(double max_step_size_in_ms,
    1129             :                                       CompletionAction action,
    1130     3208771 :                                       StepOrigin step_origin) {
    1131             :   StepResult result = StepResult::kMoreWorkRemaining;
    1132     3205009 :   double start = heap_->MonotonicallyIncreasingTimeInMs();
    1133             : 
    1134     1080875 :   if (state_ == SWEEPING) {
    1135       83596 :     TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL_SWEEPING);
    1136       41798 :     FinalizeSweeping();
    1137             :   }
    1138             : 
    1139             :   size_t bytes_processed = 0, bytes_to_process = 0;
    1140     1080875 :   if (state_ == MARKING) {
    1141     1073344 :     if (FLAG_concurrent_marking) {
    1142     2124134 :       heap_->new_space()->ResetOriginalTop();
    1143     2124134 :       heap_->new_lo_space()->ResetPendingObject();
    1144             :       // It is safe to merge back all objects that were on hold to the shared
    1145             :       // work list at Step because we are at a safepoint where all objects
    1146             :       // are properly initialized.
    1147             :       marking_worklist()->shared()->MergeGlobalPool(
    1148     1062067 :           marking_worklist()->on_hold());
    1149             :     }
    1150             : 
    1151             : // Only print marking worklist in debug mode to save ~40KB of code size.
    1152             : #ifdef DEBUG
    1153             :     if (FLAG_trace_incremental_marking && FLAG_trace_concurrent_marking &&
    1154             :         FLAG_trace_gc_verbose) {
    1155             :       marking_worklist()->Print();
    1156             :     }
    1157             : #endif
    1158     1073344 :     if (FLAG_trace_incremental_marking) {
    1159             :       heap_->isolate()->PrintWithTimestamp(
    1160             :           "[IncrementalMarking] Marking speed %.fKB/ms\n",
    1161          30 :           heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
    1162             :     }
    1163             :     // The first step after Scavenge will see many allocated bytes.
    1164             :     // Cap the step size to distribute the marking work more uniformly.
    1165             :     size_t max_step_size = GCIdleTimeHandler::EstimateMarkingStepSize(
    1166             :         max_step_size_in_ms,
    1167     1073344 :         heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
    1168     1073344 :     bytes_to_process = Min(ComputeStepSizeInBytes(step_origin), max_step_size);
    1169     1073344 :     if (bytes_to_process == 0) {
    1170             :       result = StepResult::kDone;
    1171             :     }
    1172             : 
    1173             :     bytes_processed =
    1174     2146689 :         ProcessMarkingWorklist(Max(bytes_to_process, kMinStepSizeInBytes));
    1175             : 
    1176     1073345 :     bytes_marked_ += bytes_processed;
    1177             : 
    1178     1073345 :     if (marking_worklist()->IsEmpty()) {
    1179     1023416 :       if (heap_->local_embedder_heap_tracer()
    1180     1023416 :               ->ShouldFinalizeIncrementalMarking()) {
    1181     1023416 :         if (!finalize_marking_completed_) {
    1182     1001272 :           FinalizeMarking(action);
    1183     1001272 :           FastForwardSchedule();
    1184             :           result = StepResult::kMoreWorkRemaining;
    1185     1001272 :           incremental_marking_job()->Start(heap_);
    1186             :         } else {
    1187       22144 :           MarkingComplete(action);
    1188             :         }
    1189             :       } else {
    1190           0 :         heap_->local_embedder_heap_tracer()->NotifyV8MarkingWorklistWasEmpty();
    1191             :       }
    1192             :     }
    1193             :   }
    1194     1080875 :   if (FLAG_concurrent_marking) {
    1195     2139196 :     heap_->concurrent_marking()->RescheduleTasksIfNeeded();
    1196             :   }
    1197             : 
    1198     1080875 :   double end = heap_->MonotonicallyIncreasingTimeInMs();
    1199     1080875 :   double duration = (end - start);
    1200             :   // Note that we report zero bytes here when sweeping was in progress or
    1201             :   // when we just started incremental marking. In these cases we did not
    1202             :   // process the marking deque.
    1203     2161750 :   heap_->tracer()->AddIncrementalMarkingStep(duration, bytes_processed);
    1204     1080875 :   if (FLAG_trace_incremental_marking) {
    1205             :     heap_->isolate()->PrintWithTimestamp(
    1206             :         "[IncrementalMarking] Step %s %" PRIuS "KB (%" PRIuS "KB) in %.1f\n",
    1207             :         step_origin == StepOrigin::kV8 ? "in v8" : "in task",
    1208          30 :         bytes_processed / KB, bytes_to_process / KB, duration);
    1209             :   }
    1210     1080875 :   return result;
    1211             : }
    1212             : 
    1213             : }  // namespace internal
    1214      178779 : }  // namespace v8

Generated by: LCOV version 1.10