LCOV - code coverage report
Current view: top level - src/heap - incremental-marking.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 402 459 87.6 %
Date: 2019-01-20 Functions: 51 60 85.0 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/heap/incremental-marking.h"
       6             : 
       7             : #include "src/compilation-cache.h"
       8             : #include "src/conversions.h"
       9             : #include "src/heap/concurrent-marking.h"
      10             : #include "src/heap/embedder-tracing.h"
      11             : #include "src/heap/gc-idle-time-handler.h"
      12             : #include "src/heap/gc-tracer.h"
      13             : #include "src/heap/heap-inl.h"
      14             : #include "src/heap/incremental-marking-inl.h"
      15             : #include "src/heap/mark-compact-inl.h"
      16             : #include "src/heap/object-stats.h"
      17             : #include "src/heap/objects-visiting-inl.h"
      18             : #include "src/heap/objects-visiting.h"
      19             : #include "src/heap/sweeper.h"
      20             : #include "src/objects/hash-table-inl.h"
      21             : #include "src/objects/slots-inl.h"
      22             : #include "src/tracing/trace-event.h"
      23             : #include "src/v8.h"
      24             : #include "src/visitors.h"
      25             : #include "src/vm-state-inl.h"
      26             : 
      27             : namespace v8 {
      28             : namespace internal {
      29             : 
      30             : using IncrementalMarkingMarkingVisitor =
      31             :     MarkingVisitor<FixedArrayVisitationMode::kIncremental,
      32             :                    TraceRetainingPathMode::kDisabled,
      33             :                    IncrementalMarking::MarkingState>;
      34             : 
      35       64108 : void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
      36             :                                         size_t size) {
      37      131506 :   Heap* heap = incremental_marking_.heap();
      38             :   VMState<GC> state(heap->isolate());
      39             :   RuntimeCallTimerScope runtime_timer(
      40             :       heap->isolate(),
      41       64108 :       RuntimeCallCounterId::kGC_Custom_IncrementalMarkingObserver);
      42       64108 :   incremental_marking_.AdvanceIncrementalMarkingOnAllocation();
      43      128218 :   if (incremental_marking_.black_allocation() && addr != kNullAddress) {
      44             :     // AdvanceIncrementalMarkingOnAllocation can start black allocation.
      45             :     // Ensure that the new object is marked black.
      46             :     HeapObject object = HeapObject::FromAddress(addr);
      47      185073 :     if (incremental_marking_.marking_state()->IsWhite(object) &&
      48        3289 :         !(Heap::InNewSpace(object) || heap->new_lo_space()->Contains(object))) {
      49        3289 :       if (heap->IsLargeObject(object)) {
      50             :         incremental_marking_.marking_state()->WhiteToBlack(object);
      51             :       } else {
      52        1278 :         Page::FromAddress(addr)->CreateBlackArea(addr, addr + size);
      53             :       }
      54             :     }
      55             :   }
      56       64109 : }
      57             : 
      58       62882 : IncrementalMarking::IncrementalMarking(
      59             :     Heap* heap, MarkCompactCollector::MarkingWorklist* marking_worklist,
      60             :     WeakObjects* weak_objects)
      61             :     : heap_(heap),
      62             :       marking_worklist_(marking_worklist),
      63             :       weak_objects_(weak_objects),
      64             :       initial_old_generation_size_(0),
      65             :       bytes_marked_ahead_of_schedule_(0),
      66             :       bytes_marked_concurrently_(0),
      67             :       unscanned_bytes_of_large_object_(0),
      68             :       is_compacting_(false),
      69             :       should_hurry_(false),
      70             :       was_activated_(false),
      71             :       black_allocation_(false),
      72             :       finalize_marking_completed_(false),
      73             :       trace_wrappers_toggle_(false),
      74             :       request_type_(NONE),
      75             :       new_generation_observer_(*this, kYoungGenerationAllocatedThreshold),
      76       62882 :       old_generation_observer_(*this, kOldGenerationAllocatedThreshold) {
      77             :   DCHECK_NOT_NULL(marking_worklist_);
      78             :   SetState(STOPPED);
      79       62882 : }
      80             : 
      81             : bool IncrementalMarking::BaseRecordWrite(HeapObject obj, Object value) {
      82   281508225 :   HeapObject value_heap_obj = HeapObject::cast(value);
      83             :   DCHECK(!marking_state()->IsImpossible(value_heap_obj));
      84             :   DCHECK(!marking_state()->IsImpossible(obj));
      85             : #ifdef V8_CONCURRENT_MARKING
      86             :   // The write barrier stub generated with V8_CONCURRENT_MARKING does not
      87             :   // check the color of the source object.
      88             :   const bool need_recording = true;
      89             : #else
      90             :   const bool need_recording = marking_state()->IsBlack(obj);
      91             : #endif
      92             : 
      93   281508225 :   if (need_recording && WhiteToGreyAndPush(value_heap_obj)) {
      94    19495347 :     RestartIfNotMarking();
      95             :   }
      96   281508245 :   return is_compacting_ && need_recording;
      97             : }
      98             : 
      99   281253370 : void IncrementalMarking::RecordWriteSlow(HeapObject obj, HeapObjectSlot slot,
     100             :                                          Object value) {
     101   289545649 :   if (BaseRecordWrite(obj, value) && slot.address() != kNullAddress) {
     102             :     // Object is not going to be rescanned we need to record the slot.
     103             :     heap_->mark_compact_collector()->RecordSlot(obj, slot,
     104             :                                                 HeapObject::cast(value));
     105             :   }
     106   281253302 : }
     107             : 
     108     6171905 : int IncrementalMarking::RecordWriteFromCode(Address raw_obj,
     109             :                                             Address slot_address,
     110             :                                             Isolate* isolate) {
     111             :   HeapObject obj = HeapObject::cast(Object(raw_obj));
     112             :   MaybeObjectSlot slot(slot_address);
     113             :   isolate->heap()->incremental_marking()->RecordMaybeWeakWrite(obj, slot,
     114     6171905 :                                                                *slot);
     115             :   // Called by RecordWriteCodeStubAssembler, which doesnt accept void type
     116     6171905 :   return 0;
     117             : }
     118             : 
     119      254943 : void IncrementalMarking::RecordWriteIntoCode(Code host, RelocInfo* rinfo,
     120             :                                              HeapObject value) {
     121             :   DCHECK(IsMarking());
     122      254943 :   if (BaseRecordWrite(host, value)) {
     123             :     // Object is not going to be rescanned.  We need to record the slot.
     124        1023 :     heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value);
     125             :   }
     126      254943 : }
     127             : 
     128   637397091 : bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) {
     129   544276982 :   if (marking_state()->WhiteToGrey(obj)) {
     130             :     marking_worklist()->Push(obj);
     131    93120496 :     return true;
     132             :   }
     133             :   return false;
     134             : }
     135             : 
     136     2393907 : void IncrementalMarking::MarkBlackAndVisitObjectDueToLayoutChange(
     137     2393907 :     HeapObject obj) {
     138     4787814 :   TRACE_EVENT0("v8", "V8.GCIncrementalMarkingLayoutChange");
     139     9575626 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_LAYOUT_CHANGE);
     140             :   marking_state()->WhiteToGrey(obj);
     141     2393908 :   if (marking_state()->GreyToBlack(obj)) {
     142     1443354 :     RevisitObject(obj);
     143     2393907 :   }
     144     2393907 : }
     145             : 
     146         364 : void IncrementalMarking::NotifyLeftTrimming(HeapObject from, HeapObject to) {
     147             :   DCHECK(IsMarking());
     148             :   DCHECK(MemoryChunk::FromHeapObject(from)->SweepingDone());
     149             :   DCHECK_EQ(MemoryChunk::FromHeapObject(from), MemoryChunk::FromHeapObject(to));
     150             :   DCHECK_NE(from, to);
     151             : 
     152             :   MarkBit new_mark_bit = marking_state()->MarkBitFrom(to);
     153             : 
     154         364 :   if (black_allocation() && Marking::IsBlack<kAtomicity>(new_mark_bit)) {
     155             :     // Nothing to do if the object is in black area.
     156         182 :     return;
     157             :   }
     158          27 :   MarkBlackAndVisitObjectDueToLayoutChange(from);
     159             :   DCHECK(marking_state()->IsBlack(from));
     160             :   // Mark the new address as black.
     161          54 :   if (from->address() + kTaggedSize == to->address()) {
     162             :     // The old and the new markbits overlap. The |to| object has the
     163             :     // grey color. To make it black, we need to set the second bit.
     164             :     DCHECK(new_mark_bit.Get<kAtomicity>());
     165             :     new_mark_bit.Next().Set<kAtomicity>();
     166             :   } else {
     167             :     bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit);
     168             :     DCHECK(success);
     169             :     USE(success);
     170             :   }
     171             :   DCHECK(marking_state()->IsBlack(to));
     172             : }
     173             : 
     174           0 : class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
     175             :  public:
     176             :   explicit IncrementalMarkingRootMarkingVisitor(
     177       51904 :       IncrementalMarking* incremental_marking)
     178      103808 :       : heap_(incremental_marking->heap()) {}
     179             : 
     180   159037981 :   void VisitRootPointer(Root root, const char* description,
     181             :                         FullObjectSlot p) override {
     182   159037981 :     MarkObjectByPointer(p);
     183   159037943 :   }
     184             : 
     185     1404915 :   void VisitRootPointers(Root root, const char* description,
     186             :                          FullObjectSlot start, FullObjectSlot end) override {
     187    33238878 :     for (FullObjectSlot p = start; p < end; ++p) MarkObjectByPointer(p);
     188     1404915 :   }
     189             : 
     190             :  private:
     191   189466995 :   void MarkObjectByPointer(FullObjectSlot p) {
     192   189466995 :     Object obj = *p;
     193   195703642 :     if (!obj->IsHeapObject()) return;
     194             : 
     195   183230416 :     heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj));
     196             :   }
     197             : 
     198             :   Heap* heap_;
     199             : };
     200             : 
     201           0 : void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
     202             :     PagedSpace* space) {
     203      255367 :   for (Page* p : *space) {
     204      174151 :     p->SetOldGenerationPageFlags(false);
     205             :   }
     206           0 : }
     207             : 
     208             : 
     209           0 : void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
     210             :     NewSpace* space) {
     211      109454 :   for (Page* p : *space) {
     212       82382 :     p->SetYoungGenerationPageFlags(false);
     213             :   }
     214           0 : }
     215             : 
     216             : 
     217       27072 : void IncrementalMarking::DeactivateIncrementalWriteBarrier() {
     218      162432 :   DeactivateIncrementalWriteBarrierForSpace(heap_->old_space());
     219       27072 :   DeactivateIncrementalWriteBarrierForSpace(heap_->map_space());
     220       27072 :   DeactivateIncrementalWriteBarrierForSpace(heap_->code_space());
     221       27072 :   DeactivateIncrementalWriteBarrierForSpace(heap_->new_space());
     222             : 
     223       62482 :   for (LargePage* p : *heap_->lo_space()) {
     224        8338 :     p->SetOldGenerationPageFlags(false);
     225             :   }
     226             : 
     227       65325 :   for (LargePage* p : *heap_->code_lo_space()) {
     228       11181 :     p->SetOldGenerationPageFlags(false);
     229             :   }
     230       27072 : }
     231             : 
     232             : 
     233           0 : void IncrementalMarking::ActivateIncrementalWriteBarrier(PagedSpace* space) {
     234      231519 :   for (Page* p : *space) {
     235      148695 :     p->SetOldGenerationPageFlags(true);
     236             :   }
     237           0 : }
     238             : 
     239             : 
     240           0 : void IncrementalMarking::ActivateIncrementalWriteBarrier(NewSpace* space) {
     241      109682 :   for (Page* p : *space) {
     242       82074 :     p->SetYoungGenerationPageFlags(true);
     243             :   }
     244           0 : }
     245             : 
     246             : 
     247       27608 : void IncrementalMarking::ActivateIncrementalWriteBarrier() {
     248      165648 :   ActivateIncrementalWriteBarrier(heap_->old_space());
     249       27608 :   ActivateIncrementalWriteBarrier(heap_->map_space());
     250       27608 :   ActivateIncrementalWriteBarrier(heap_->code_space());
     251       27608 :   ActivateIncrementalWriteBarrier(heap_->new_space());
     252             : 
     253       61231 :   for (LargePage* p : *heap_->lo_space()) {
     254        6015 :     p->SetOldGenerationPageFlags(true);
     255             :   }
     256             : 
     257       64745 :   for (LargePage* p : *heap_->code_lo_space()) {
     258        9529 :     p->SetOldGenerationPageFlags(true);
     259             :   }
     260       27608 : }
     261             : 
     262             : 
     263       83487 : bool IncrementalMarking::WasActivated() { return was_activated_; }
     264             : 
     265             : 
     266     1400903 : bool IncrementalMarking::CanBeActivated() {
     267             :   // Only start incremental marking in a safe state: 1) when incremental
     268             :   // marking is turned on, 2) when we are currently not in a GC, and
     269             :   // 3) when we are currently not serializing or deserializing the heap.
     270     1386678 :   return FLAG_incremental_marking && heap_->gc_state() == Heap::NOT_IN_GC &&
     271     2284965 :          heap_->deserialization_complete() &&
     272     2284965 :          !heap_->isolate()->serializer_enabled();
     273             : }
     274             : 
     275             : 
     276       27072 : void IncrementalMarking::Deactivate() {
     277       27072 :   DeactivateIncrementalWriteBarrier();
     278       27072 : }
     279             : 
     280      114585 : void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
     281       31884 :   if (FLAG_trace_incremental_marking) {
     282             :     int old_generation_size_mb =
     283           5 :         static_cast<int>(heap()->OldGenerationSizeOfObjects() / MB);
     284             :     int old_generation_limit_mb =
     285           5 :         static_cast<int>(heap()->old_generation_allocation_limit() / MB);
     286             :     heap()->isolate()->PrintWithTimestamp(
     287             :         "[IncrementalMarking] Start (%s): old generation %dMB, limit %dMB, "
     288             :         "slack %dMB\n",
     289             :         Heap::GarbageCollectionReasonToString(gc_reason),
     290             :         old_generation_size_mb, old_generation_limit_mb,
     291          15 :         Max(0, old_generation_limit_mb - old_generation_size_mb));
     292             :   }
     293             :   DCHECK(FLAG_incremental_marking);
     294             :   DCHECK(state_ == STOPPED);
     295             :   DCHECK(heap_->gc_state() == Heap::NOT_IN_GC);
     296             :   DCHECK(!heap_->isolate()->serializer_enabled());
     297             : 
     298       95652 :   Counters* counters = heap_->isolate()->counters();
     299             : 
     300             :   counters->incremental_marking_reason()->AddSample(
     301       31884 :       static_cast<int>(gc_reason));
     302             :   HistogramTimerScope incremental_marking_scope(
     303       31884 :       counters->gc_incremental_marking_start());
     304       95652 :   TRACE_EVENT0("v8", "V8.GCIncrementalMarkingStart");
     305      127536 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START);
     306       63768 :   heap_->tracer()->NotifyIncrementalMarkingStart();
     307             : 
     308       31884 :   start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
     309       31884 :   initial_old_generation_size_ = heap_->OldGenerationSizeOfObjects();
     310       63768 :   old_generation_allocation_counter_ = heap_->OldGenerationAllocationCounter();
     311       31884 :   bytes_allocated_ = 0;
     312       31884 :   bytes_marked_ahead_of_schedule_ = 0;
     313       31884 :   bytes_marked_concurrently_ = 0;
     314       31884 :   should_hurry_ = false;
     315       31884 :   was_activated_ = true;
     316             : 
     317       95652 :   if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
     318       12966 :     StartMarking();
     319             :   } else {
     320       18918 :     if (FLAG_trace_incremental_marking) {
     321             :       heap()->isolate()->PrintWithTimestamp(
     322           0 :           "[IncrementalMarking] Start sweeping.\n");
     323             :     }
     324             :     SetState(SWEEPING);
     325             :   }
     326             : 
     327             :   heap_->AddAllocationObserversToAllSpaces(&old_generation_observer_,
     328       31884 :                                            &new_generation_observer_);
     329       31884 :   incremental_marking_job()->Start(heap_);
     330       31884 : }
     331             : 
     332             : 
     333       82829 : void IncrementalMarking::StartMarking() {
     334      165132 :   if (heap_->isolate()->serializer_enabled()) {
     335             :     // Black allocation currently starts when we start incremental marking,
     336             :     // but we cannot enable black allocation while deserializing. Hence, we
     337             :     // have to delay the start of incremental marking in that case.
     338           0 :     if (FLAG_trace_incremental_marking) {
     339             :       heap()->isolate()->PrintWithTimestamp(
     340           0 :           "[IncrementalMarking] Start delayed - serializer\n");
     341             :     }
     342           0 :     return;
     343             :   }
     344       27608 :   if (FLAG_trace_incremental_marking) {
     345             :     heap()->isolate()->PrintWithTimestamp(
     346           5 :         "[IncrementalMarking] Start marking\n");
     347             :   }
     348             : 
     349             :   is_compacting_ =
     350       55216 :       !FLAG_never_compact && heap_->mark_compact_collector()->StartCompaction();
     351             : 
     352             :   SetState(MARKING);
     353             : 
     354             :   {
     355      110432 :     TRACE_GC(heap()->tracer(),
     356             :              GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_PROLOGUE);
     357       82824 :     heap_->local_embedder_heap_tracer()->TracePrologue();
     358             :   }
     359             : 
     360       27608 :   ActivateIncrementalWriteBarrier();
     361             : 
     362             : // Marking bits are cleared by the sweeper.
     363             : #ifdef VERIFY_HEAP
     364             :   if (FLAG_verify_heap) {
     365             :     heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
     366             :   }
     367             : #endif
     368             : 
     369       55216 :   heap_->isolate()->compilation_cache()->MarkCompactPrologue();
     370             : 
     371             : #ifdef V8_CONCURRENT_MARKING
     372             :   // The write-barrier does not check the color of the source object.
     373             :   // Start black allocation earlier to ensure faster marking progress.
     374       27608 :   if (!black_allocation_) {
     375       27608 :     StartBlackAllocation();
     376             :   }
     377             : #endif
     378             : 
     379             :   // Mark strong roots grey.
     380             :   IncrementalMarkingRootMarkingVisitor visitor(this);
     381       27608 :   heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
     382             : 
     383       54958 :   if (FLAG_concurrent_marking && !heap_->IsTearingDown()) {
     384       54700 :     heap_->concurrent_marking()->ScheduleTasks();
     385             :   }
     386             : 
     387             :   // Ready to start incremental marking.
     388       27608 :   if (FLAG_trace_incremental_marking) {
     389           5 :     heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Running\n");
     390             :   }
     391             : }
     392             : 
     393      112981 : void IncrementalMarking::StartBlackAllocation() {
     394             :   DCHECK(FLAG_black_allocation);
     395             :   DCHECK(!black_allocation_);
     396             :   DCHECK(IsMarking());
     397       28244 :   black_allocation_ = true;
     398       28244 :   heap()->old_space()->MarkLinearAllocationAreaBlack();
     399       28244 :   heap()->map_space()->MarkLinearAllocationAreaBlack();
     400       28244 :   heap()->code_space()->MarkLinearAllocationAreaBlack();
     401       28244 :   if (FLAG_trace_incremental_marking) {
     402             :     heap()->isolate()->PrintWithTimestamp(
     403           5 :         "[IncrementalMarking] Black allocation started\n");
     404             :   }
     405       28244 : }
     406             : 
     407        1908 : void IncrementalMarking::PauseBlackAllocation() {
     408             :   DCHECK(FLAG_black_allocation);
     409             :   DCHECK(IsMarking());
     410         636 :   heap()->old_space()->UnmarkLinearAllocationArea();
     411         636 :   heap()->map_space()->UnmarkLinearAllocationArea();
     412         636 :   heap()->code_space()->UnmarkLinearAllocationArea();
     413         636 :   if (FLAG_trace_incremental_marking) {
     414             :     heap()->isolate()->PrintWithTimestamp(
     415           0 :         "[IncrementalMarking] Black allocation paused\n");
     416             :   }
     417         636 :   black_allocation_ = false;
     418         636 : }
     419             : 
     420       31098 : void IncrementalMarking::FinishBlackAllocation() {
     421       31093 :   if (black_allocation_) {
     422       27072 :     black_allocation_ = false;
     423       27072 :     if (FLAG_trace_incremental_marking) {
     424             :       heap()->isolate()->PrintWithTimestamp(
     425           5 :           "[IncrementalMarking] Black allocation finished\n");
     426             :     }
     427             :   }
     428       31093 : }
     429             : 
     430       24296 : void IncrementalMarking::MarkRoots() {
     431             :   DCHECK(!finalize_marking_completed_);
     432             :   DCHECK(IsMarking());
     433             : 
     434             :   IncrementalMarkingRootMarkingVisitor visitor(this);
     435       24296 :   heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
     436       24296 : }
     437             : 
     438       17955 : bool IncrementalMarking::ShouldRetainMap(Map map, int age) {
     439       17955 :   if (age == 0) {
     440             :     // The map has aged. Do not retain this map.
     441             :     return false;
     442             :   }
     443       16913 :   Object constructor = map->GetConstructor();
     444       33826 :   if (!constructor->IsHeapObject() ||
     445             :       marking_state()->IsWhite(HeapObject::cast(constructor))) {
     446             :     // The constructor is dead, no new objects with this map can
     447             :     // be created. Do not retain this map.
     448             :     return false;
     449             :   }
     450        3674 :   return true;
     451             : }
     452             : 
     453             : 
     454       48592 : void IncrementalMarking::RetainMaps() {
     455             :   // Do not retain dead maps if flag disables it or there is
     456             :   // - memory pressure (reduce_memory_footprint_),
     457             :   // - GC is requested by tests or dev-tools (abort_incremental_marking_).
     458       72874 :   bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
     459       24282 :                                    FLAG_retain_maps_for_n_gc == 0;
     460       24296 :   WeakArrayList retained_maps = heap()->retained_maps();
     461             :   int length = retained_maps->length();
     462             :   // The number_of_disposed_maps separates maps in the retained_maps
     463             :   // array that were created before and after context disposal.
     464             :   // We do not age and retain disposed maps to avoid memory leaks.
     465       24296 :   int number_of_disposed_maps = heap()->number_of_disposed_maps_;
     466       65704 :   for (int i = 0; i < length; i += 2) {
     467       41408 :     MaybeObject value = retained_maps->Get(i);
     468       41408 :     HeapObject map_heap_object;
     469       41408 :     if (!value->GetHeapObjectIfWeak(&map_heap_object)) {
     470       14188 :       continue;
     471             :     }
     472       54440 :     int age = retained_maps->Get(i + 1).ToSmi().value();
     473             :     int new_age;
     474             :     Map map = Map::cast(map_heap_object);
     475       54426 :     if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
     476             :         marking_state()->IsWhite(map)) {
     477       17955 :       if (ShouldRetainMap(map, age)) {
     478        3674 :         WhiteToGreyAndPush(map);
     479             :       }
     480       17955 :       Object prototype = map->prototype();
     481       51781 :       if (age > 0 && prototype->IsHeapObject() &&
     482             :           marking_state()->IsWhite(HeapObject::cast(prototype))) {
     483             :         // The prototype is not marked, age the map.
     484       16333 :         new_age = age - 1;
     485             :       } else {
     486             :         // The prototype and the constructor are marked, this map keeps only
     487             :         // transition tree alive, not JSObjects. Do not age the map.
     488             :         new_age = age;
     489             :       }
     490             :     } else {
     491        9265 :       new_age = FLAG_retain_maps_for_n_gc;
     492             :     }
     493             :     // Compact the array and update the age.
     494       27220 :     if (new_age != age) {
     495       16440 :       retained_maps->Set(i + 1, MaybeObject::FromSmi(Smi::FromInt(new_age)));
     496             :     }
     497             :   }
     498       24296 : }
     499             : 
     500       48597 : void IncrementalMarking::FinalizeIncrementally() {
     501       97184 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE_BODY);
     502             :   DCHECK(!finalize_marking_completed_);
     503             :   DCHECK(IsMarking());
     504             : 
     505       24296 :   double start = heap_->MonotonicallyIncreasingTimeInMs();
     506             : 
     507             :   // After finishing incremental marking, we try to discover all unmarked
     508             :   // objects to reduce the marking load in the final pause.
     509             :   // 1) We scan and mark the roots again to find all changes to the root set.
     510             :   // 2) Age and retain maps embedded in optimized code.
     511       24296 :   MarkRoots();
     512             : 
     513             :   // Map retaining is needed for perfromance, not correctness,
     514             :   // so we can do it only once at the beginning of the finalization.
     515       24296 :   RetainMaps();
     516             : 
     517       24296 :   finalize_marking_completed_ = true;
     518             : 
     519       97170 :   if (FLAG_black_allocation && !heap()->ShouldReduceMemory() &&
     520       24282 :       !black_allocation_) {
     521             :     // TODO(hpayer): Move to an earlier point as soon as we make faster marking
     522             :     // progress.
     523           0 :     StartBlackAllocation();
     524             :   }
     525             : 
     526       24296 :   if (FLAG_trace_incremental_marking) {
     527           5 :     double end = heap_->MonotonicallyIncreasingTimeInMs();
     528           5 :     double delta = end - start;
     529             :     heap()->isolate()->PrintWithTimestamp(
     530           5 :         "[IncrementalMarking] Finalize incrementally spent %.1f ms.\n", delta);
     531       24296 :   }
     532       24296 : }
     533             : 
     534       24866 : void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
     535       47188 :   if (!IsMarking()) return;
     536             : 
     537         636 :   Map filler_map = ReadOnlyRoots(heap_).one_pointer_filler_map();
     538             : 
     539             : #ifdef ENABLE_MINOR_MC
     540             :   MinorMarkCompactCollector::MarkingState* minor_marking_state =
     541         636 :       heap()->minor_mark_compact_collector()->marking_state();
     542             : #else
     543             :   void* minor_marking_state = nullptr;
     544             : #endif  // ENABLE_MINOR_MC
     545             : 
     546             :   marking_worklist()->Update([
     547             : #ifdef DEBUG
     548             :                                  // this is referred inside DCHECK.
     549             :                                  this,
     550             : #endif
     551             :                                  filler_map, minor_marking_state](
     552      778107 :                                  HeapObject obj, HeapObject* out) -> bool {
     553             :     DCHECK(obj->IsHeapObject());
     554             :     // Only pointers to from space have to be updated.
     555      778107 :     if (Heap::InFromSpace(obj)) {
     556             :       MapWord map_word = obj->map_word();
     557      281900 :       if (!map_word.IsForwardingAddress()) {
     558             :         // There may be objects on the marking deque that do not exist anymore,
     559             :         // e.g. left trimmed objects or objects from the root set (frames).
     560             :         // If these object are dead at scavenging time, their marking deque
     561             :         // entries will not point to forwarding addresses. Hence, we can discard
     562             :         // them.
     563             :         return false;
     564             :       }
     565             :       HeapObject dest = map_word.ToForwardingAddress();
     566             :       DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
     567      188468 :       *out = dest;
     568      188468 :       return true;
     569      496207 :     } else if (Heap::InToSpace(obj)) {
     570             :       // The object may be on a page that was moved in new space.
     571             :       DCHECK(Page::FromHeapObject(obj)->IsFlagSet(Page::SWEEP_TO_ITERATE));
     572             : #ifdef ENABLE_MINOR_MC
     573           0 :       if (minor_marking_state->IsGrey(obj)) {
     574           0 :         *out = obj;
     575           0 :         return true;
     576             :       }
     577             : #endif  // ENABLE_MINOR_MC
     578             :       return false;
     579             :     } else {
     580             :       // The object may be on a page that was moved from new to old space. Only
     581             :       // applicable during minor MC garbage collections.
     582      496207 :       if (Page::FromHeapObject(obj)->IsFlagSet(Page::SWEEP_TO_ITERATE)) {
     583             : #ifdef ENABLE_MINOR_MC
     584           0 :         if (minor_marking_state->IsGrey(obj)) {
     585           0 :           *out = obj;
     586           0 :           return true;
     587             :         }
     588             : #endif  // ENABLE_MINOR_MC
     589             :         return false;
     590             :       }
     591             :       DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
     592             :       // Skip one word filler objects that appear on the
     593             :       // stack when we perform in place array shift.
     594      496207 :       if (obj->map() != filler_map) {
     595      496207 :         *out = obj;
     596      496207 :         return true;
     597             :       }
     598             :       return false;
     599             :     }
     600        1272 :   });
     601             : 
     602         636 :   UpdateWeakReferencesAfterScavenge();
     603             : }
     604             : 
     605             : namespace {
     606             : template <typename T>
     607      102123 : T ForwardingAddress(T heap_obj) {
     608             :   MapWord map_word = heap_obj->map_word();
     609             : 
     610      102123 :   if (map_word.IsForwardingAddress()) {
     611             :     return T::cast(map_word.ToForwardingAddress());
     612       82451 :   } else if (Heap::InNewSpace(heap_obj)) {
     613        1431 :     return T();
     614             :   } else {
     615       81020 :     return heap_obj;
     616             :   }
     617             : }
     618             : }  // namespace
     619             : 
     620         636 : void IncrementalMarking::UpdateWeakReferencesAfterScavenge() {
     621             :   weak_objects_->weak_references.Update(
     622             :       [](std::pair<HeapObject, HeapObjectSlot> slot_in,
     623       86632 :          std::pair<HeapObject, HeapObjectSlot>* slot_out) -> bool {
     624       86632 :         HeapObject heap_obj = slot_in.first;
     625       86632 :         HeapObject forwarded = ForwardingAddress(heap_obj);
     626             : 
     627       86632 :         if (!forwarded.is_null()) {
     628             :           ptrdiff_t distance_to_slot =
     629       85201 :               slot_in.second.address() - slot_in.first.ptr();
     630       85201 :           Address new_slot = forwarded.ptr() + distance_to_slot;
     631       85201 :           slot_out->first = forwarded;
     632       85201 :           slot_out->second = HeapObjectSlot(new_slot);
     633             :           return true;
     634             :         }
     635             : 
     636             :         return false;
     637         636 :       });
     638             :   weak_objects_->weak_objects_in_code.Update(
     639             :       [](std::pair<HeapObject, Code> slot_in,
     640         843 :          std::pair<HeapObject, Code>* slot_out) -> bool {
     641         843 :         HeapObject heap_obj = slot_in.first;
     642         843 :         HeapObject forwarded = ForwardingAddress(heap_obj);
     643             : 
     644         843 :         if (!forwarded.is_null()) {
     645         843 :           slot_out->first = forwarded;
     646         843 :           slot_out->second = slot_in.second;
     647             :           return true;
     648             :         }
     649             : 
     650             :         return false;
     651         636 :       });
     652             :   weak_objects_->ephemeron_hash_tables.Update(
     653             :       [](EphemeronHashTable slot_in, EphemeronHashTable* slot_out) -> bool {
     654       14648 :         EphemeronHashTable forwarded = ForwardingAddress(slot_in);
     655             : 
     656       14648 :         if (!forwarded.is_null()) {
     657       14648 :           *slot_out = forwarded;
     658             :           return true;
     659             :         }
     660             : 
     661             :         return false;
     662         636 :       });
     663             : 
     664           0 :   auto ephemeron_updater = [](Ephemeron slot_in, Ephemeron* slot_out) -> bool {
     665           0 :     HeapObject key = slot_in.key;
     666           0 :     HeapObject value = slot_in.value;
     667           0 :     HeapObject forwarded_key = ForwardingAddress(key);
     668           0 :     HeapObject forwarded_value = ForwardingAddress(value);
     669             : 
     670           0 :     if (!forwarded_key.is_null() && !forwarded_value.is_null()) {
     671           0 :       *slot_out = Ephemeron{forwarded_key, forwarded_value};
     672             :       return true;
     673             :     }
     674             : 
     675             :     return false;
     676             :   };
     677             : 
     678         636 :   weak_objects_->current_ephemerons.Update(ephemeron_updater);
     679         636 :   weak_objects_->next_ephemerons.Update(ephemeron_updater);
     680         636 :   weak_objects_->discovered_ephemerons.Update(ephemeron_updater);
     681             : #ifdef DEBUG
     682             :   weak_objects_->bytecode_flushing_candidates.Iterate(
     683             :       [](SharedFunctionInfo candidate) {
     684             :         DCHECK(!Heap::InNewSpace(candidate));
     685             :       });
     686             : #endif
     687         636 : }
     688             : 
     689       23594 : void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
     690             :     size_t dead_bytes_in_new_space) {
     691       47188 :   if (!IsMarking()) return;
     692             :   bytes_marked_ahead_of_schedule_ -=
     693        1272 :       Min(bytes_marked_ahead_of_schedule_, dead_bytes_in_new_space);
     694             : }
     695             : 
     696             : bool IncrementalMarking::IsFixedArrayWithProgressBar(HeapObject obj) {
     697             :   if (!obj->IsFixedArray()) return false;
     698             :   MemoryChunk* chunk = MemoryChunk::FromHeapObject(obj);
     699             :   return chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR);
     700             : }
     701             : 
     702    60936996 : int IncrementalMarking::VisitObject(Map map, HeapObject obj) {
     703             :   DCHECK(marking_state()->IsGrey(obj) || marking_state()->IsBlack(obj));
     704             :   if (!marking_state()->GreyToBlack(obj)) {
     705             :     // The object can already be black in these cases:
     706             :     // 1. The object is a fixed array with the progress bar.
     707             :     // 2. The object is a JSObject that was colored black before
     708             :     //    unsafe layout change.
     709             :     // 3. The object is a string that was colored black before
     710             :     //    unsafe layout change.
     711             :     // 4. The object is materizalized by the deoptimizer.
     712             :     // 5. The object is a descriptor array marked black by
     713             :     //    the descriptor array marking barrier.
     714             :     DCHECK(obj->IsHashTable() || obj->IsPropertyArray() ||
     715             :            obj->IsFixedArray() || obj->IsContext() || obj->IsJSObject() ||
     716             :            obj->IsString() || obj->IsDescriptorArray());
     717             :   }
     718             :   DCHECK(marking_state()->IsBlack(obj));
     719    60936995 :   WhiteToGreyAndPush(map);
     720             :   IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
     721   121873991 :                                            marking_state());
     722    60936994 :   return visitor.Visit(map, obj);
     723             : }
     724             : 
     725    17155024 : void IncrementalMarking::ProcessBlackAllocatedObject(HeapObject obj) {
     726    34309944 :   if (IsMarking() && marking_state()->IsBlack(obj)) {
     727    17154925 :     RevisitObject(obj);
     728             :   }
     729    17155098 : }
     730             : 
     731    37197800 : void IncrementalMarking::RevisitObject(HeapObject obj) {
     732             :   DCHECK(IsMarking());
     733             :   DCHECK(marking_state()->IsBlack(obj));
     734             :   Page* page = Page::FromHeapObject(obj);
     735    18598881 :   if (page->owner()->identity() == LO_SPACE) {
     736          20 :     page->ResetProgressBar();
     737             :   }
     738             :   Map map = obj->map();
     739    18598881 :   WhiteToGreyAndPush(map);
     740             :   IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
     741    37197838 :                                            marking_state());
     742             :   visitor.Visit(map, obj);
     743    18599065 : }
     744             : 
     745     3477055 : void IncrementalMarking::VisitDescriptors(HeapObject host,
     746             :                                           DescriptorArray descriptors,
     747     3477055 :                                           int number_of_own_descriptors) {
     748             :   IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
     749     6954110 :                                            marking_state());
     750             :   // This is necessary because the Scavenger records slots only for the
     751             :   // promoted black objects and the marking visitor of DescriptorArray skips
     752             :   // the descriptors marked by the visitor.VisitDescriptors() below.
     753             :   visitor.MarkDescriptorArrayBlack(host, descriptors);
     754             :   visitor.VisitDescriptors(descriptors, number_of_own_descriptors);
     755     3477056 : }
     756             : 
     757             : intptr_t IncrementalMarking::ProcessMarkingWorklist(
     758    62213028 :     intptr_t bytes_to_process, ForceCompletionAction completion) {
     759             :   intptr_t bytes_processed = 0;
     760    58840631 :   while (bytes_processed < bytes_to_process || completion == FORCE_COMPLETION) {
     761    62213028 :     HeapObject obj = marking_worklist()->Pop();
     762    62213026 :     if (obj.is_null()) break;
     763             :     // Left trimming may result in white, grey, or black filler objects on the
     764             :     // marking deque. Ignore these objects.
     765   121874030 :     if (obj->IsFiller()) {
     766             :       DCHECK(!marking_state()->IsImpossible(obj));
     767          21 :       continue;
     768             :     }
     769    60936993 :     unscanned_bytes_of_large_object_ = 0;
     770    60936993 :     int size = VisitObject(obj->map(), obj);
     771    57543006 :     bytes_processed += size - unscanned_bytes_of_large_object_;
     772             :   }
     773             :   return bytes_processed;
     774             : }
     775             : 
     776           0 : void IncrementalMarking::EmbedderStep(double duration_ms) {
     777             :   constexpr size_t kObjectsToProcessBeforeInterrupt = 500;
     778             : 
     779           0 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_TRACING);
     780           0 :   double deadline = heap_->MonotonicallyIncreasingTimeInMs() + duration_ms;
     781             :   bool empty_worklist;
     782           0 :   do {
     783             :     {
     784             :       LocalEmbedderHeapTracer::ProcessingScope scope(
     785           0 :           heap_->local_embedder_heap_tracer());
     786           0 :       HeapObject object;
     787             :       size_t cnt = 0;
     788             :       empty_worklist = true;
     789           0 :       while (marking_worklist()->embedder()->Pop(0, &object)) {
     790           0 :         scope.TracePossibleWrapper(JSObject::cast(object));
     791           0 :         if (++cnt == kObjectsToProcessBeforeInterrupt) {
     792             :           cnt = 0;
     793             :           empty_worklist = false;
     794             :           break;
     795             :         }
     796           0 :       }
     797             :     }
     798           0 :     heap_->local_embedder_heap_tracer()->Trace(deadline);
     799           0 :   } while (!empty_worklist &&
     800           0 :            (heap_->MonotonicallyIncreasingTimeInMs() < deadline));
     801           0 :   heap_->local_embedder_heap_tracer()->SetEmbedderWorklistEmpty(empty_worklist);
     802           0 : }
     803             : 
     804       35102 : void IncrementalMarking::Hurry() {
     805             :   // A scavenge may have pushed new objects on the marking deque (due to black
     806             :   // allocation) even in COMPLETE state. This may happen if scavenges are
     807             :   // forced e.g. in tests. It should not happen when COMPLETE was set when
     808             :   // incremental marking finished and a regular GC was triggered after that
     809             :   // because should_hurry_ will force a full GC.
     810       27072 :   if (!marking_worklist()->IsEmpty()) {
     811             :     double start = 0.0;
     812        8030 :     if (FLAG_trace_incremental_marking) {
     813           0 :       start = heap_->MonotonicallyIncreasingTimeInMs();
     814           0 :       if (FLAG_trace_incremental_marking) {
     815           0 :         heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Hurry\n");
     816             :       }
     817             :     }
     818             :     // TODO(gc) hurry can mark objects it encounters black as mutator
     819             :     // was stopped.
     820             :     ProcessMarkingWorklist(0, FORCE_COMPLETION);
     821             :     SetState(COMPLETE);
     822        8030 :     if (FLAG_trace_incremental_marking) {
     823           0 :       double end = heap_->MonotonicallyIncreasingTimeInMs();
     824           0 :       double delta = end - start;
     825           0 :       if (FLAG_trace_incremental_marking) {
     826             :         heap()->isolate()->PrintWithTimestamp(
     827             :             "[IncrementalMarking] Complete (hurry), spent %d ms.\n",
     828           0 :             static_cast<int>(delta));
     829             :       }
     830             :     }
     831             :   }
     832       27072 : }
     833             : 
     834             : 
     835       62220 : void IncrementalMarking::Stop() {
     836       31141 :   if (IsStopped()) return;
     837       31093 :   if (FLAG_trace_incremental_marking) {
     838             :     int old_generation_size_mb =
     839           5 :         static_cast<int>(heap()->OldGenerationSizeOfObjects() / MB);
     840             :     int old_generation_limit_mb =
     841           5 :         static_cast<int>(heap()->old_generation_allocation_limit() / MB);
     842             :     heap()->isolate()->PrintWithTimestamp(
     843             :         "[IncrementalMarking] Stopping: old generation %dMB, limit %dMB, "
     844             :         "overshoot %dMB\n",
     845             :         old_generation_size_mb, old_generation_limit_mb,
     846          10 :         Max(0, old_generation_size_mb - old_generation_limit_mb));
     847             :   }
     848             : 
     849      279837 :   SpaceIterator it(heap_);
     850      310930 :   while (it.has_next()) {
     851      248744 :     Space* space = it.next();
     852      497488 :     if (space == heap_->new_space()) {
     853       31093 :       space->RemoveAllocationObserver(&new_generation_observer_);
     854             :     } else {
     855      217651 :       space->RemoveAllocationObserver(&old_generation_observer_);
     856             :     }
     857             :   }
     858             : 
     859             :   IncrementalMarking::set_should_hurry(false);
     860       62186 :   heap_->isolate()->stack_guard()->ClearGC();
     861             :   SetState(STOPPED);
     862       31093 :   is_compacting_ = false;
     863       31093 :   FinishBlackAllocation();
     864             : }
     865             : 
     866             : 
     867       27072 : void IncrementalMarking::Finalize() {
     868       27072 :   Hurry();
     869       27072 :   Stop();
     870       27072 : }
     871             : 
     872             : 
     873     1241596 : void IncrementalMarking::FinalizeMarking(CompletionAction action) {
     874             :   DCHECK(!finalize_marking_completed_);
     875     1241591 :   if (FLAG_trace_incremental_marking) {
     876             :     heap()->isolate()->PrintWithTimestamp(
     877             :         "[IncrementalMarking] requesting finalization of incremental "
     878           5 :         "marking.\n");
     879             :   }
     880     1241591 :   request_type_ = FINALIZATION;
     881     1241591 :   if (action == GC_VIA_STACK_GUARD) {
     882     2457706 :     heap_->isolate()->stack_guard()->RequestGC();
     883             :   }
     884     1241591 : }
     885             : 
     886             : 
     887       27719 : void IncrementalMarking::MarkingComplete(CompletionAction action) {
     888             :   SetState(COMPLETE);
     889             :   // We will set the stack guard to request a GC now.  This will mean the rest
     890             :   // of the GC gets performed as soon as possible (we can't do a GC here in a
     891             :   // record-write context).  If a few things get allocated between now and then
     892             :   // that shouldn't make us do a scavenge and keep being incremental, so we set
     893             :   // the should-hurry flag to indicate that there can't be much work left to do.
     894             :   set_should_hurry(true);
     895       27714 :   if (FLAG_trace_incremental_marking) {
     896             :     heap()->isolate()->PrintWithTimestamp(
     897           5 :         "[IncrementalMarking] Complete (normal).\n");
     898             :   }
     899       27714 :   request_type_ = COMPLETE_MARKING;
     900       27714 :   if (action == GC_VIA_STACK_GUARD) {
     901       21850 :     heap_->isolate()->stack_guard()->RequestGC();
     902             :   }
     903       27714 : }
     904             : 
     905             : 
     906       83492 : void IncrementalMarking::Epilogue() {
     907       83492 :   was_activated_ = false;
     908       83492 :   finalize_marking_completed_ = false;
     909       83492 : }
     910             : 
     911           0 : bool IncrementalMarking::ShouldDoEmbedderStep() {
     912     2581413 :   return state_ == MARKING && FLAG_incremental_marking_wrappers &&
     913     1280906 :          heap_->local_embedder_heap_tracer()->InUse();
     914             : }
     915             : 
     916     1235747 : double IncrementalMarking::AdvanceIncrementalMarking(
     917             :     double deadline_in_ms, CompletionAction completion_action,
     918     3672380 :     StepOrigin step_origin) {
     919             :   HistogramTimerScope incremental_marking_scope(
     920     3707241 :       heap_->isolate()->counters()->gc_incremental_marking());
     921     3707241 :   TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
     922     6178735 :   TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
     923             :   DCHECK(!IsStopped());
     924             : 
     925             :   double remaining_time_in_ms = 0.0;
     926     1236942 :   do {
     927     1236942 :     if (ShouldDoEmbedderStep() && trace_wrappers_toggle_) {
     928           0 :       EmbedderStep(kStepSizeInMs);
     929             :     } else {
     930             :       const intptr_t step_size_in_bytes =
     931             :           GCIdleTimeHandler::EstimateMarkingStepSize(
     932             :               kStepSizeInMs,
     933     1236942 :               heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
     934     1236942 :       Step(step_size_in_bytes, completion_action, step_origin);
     935             :     }
     936     1236942 :     trace_wrappers_toggle_ = !trace_wrappers_toggle_;
     937             :     remaining_time_in_ms =
     938     1236942 :         deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
     939     3634204 :   } while (remaining_time_in_ms > kStepSizeInMs && !IsComplete() &&
     940     1198496 :            !marking_worklist()->IsEmpty());
     941     1235747 :   return remaining_time_in_ms;
     942             : }
     943             : 
     944             : 
     945       19556 : void IncrementalMarking::FinalizeSweeping() {
     946             :   DCHECK(state_ == SWEEPING);
     947       94212 :   if (heap_->mark_compact_collector()->sweeping_in_progress() &&
     948       20878 :       (!FLAG_concurrent_sweeping ||
     949       10427 :        !heap_->mark_compact_collector()->sweeper()->AreSweeperTasksRunning())) {
     950       11074 :     heap_->mark_compact_collector()->EnsureSweepingCompleted();
     951             :   }
     952       58668 :   if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
     953             : #ifdef DEBUG
     954             :     heap_->VerifyCountersAfterSweeping();
     955             : #endif
     956       14642 :     StartMarking();
     957             :   }
     958       19556 : }
     959             : 
     960       63565 : size_t IncrementalMarking::StepSizeToKeepUpWithAllocations() {
     961             :   // Update bytes_allocated_ based on the allocation counter.
     962       63565 :   size_t current_counter = heap_->OldGenerationAllocationCounter();
     963       63565 :   bytes_allocated_ += current_counter - old_generation_allocation_counter_;
     964       63565 :   old_generation_allocation_counter_ = current_counter;
     965       63565 :   return bytes_allocated_;
     966             : }
     967             : 
     968      128233 : size_t IncrementalMarking::StepSizeToMakeProgress() {
     969             :   const size_t kTargetStepCount = 256;
     970             :   const size_t kTargetStepCountAtOOM = 32;
     971             :   const size_t kMaxStepSizeInByte = 256 * KB;
     972      127129 :   size_t oom_slack = heap()->new_space()->Capacity() + 64 * MB;
     973             : 
     974       63564 :   if (!heap()->CanExpandOldGeneration(oom_slack)) {
     975        1104 :     return heap()->OldGenerationSizeOfObjects() / kTargetStepCountAtOOM;
     976             :   }
     977             : 
     978             :   return Min(Max(initial_old_generation_size_ / kTargetStepCount,
     979             :                  IncrementalMarking::kMinStepSizeInBytes),
     980      124922 :              kMaxStepSizeInByte);
     981             : }
     982             : 
     983       64109 : void IncrementalMarking::AdvanceIncrementalMarkingOnAllocation() {
     984             :   // Code using an AlwaysAllocateScope assumes that the GC state does not
     985             :   // change; that implies that no marking steps must be performed.
     986      320001 :   if (heap_->gc_state() != Heap::NOT_IN_GC || !FLAG_incremental_marking ||
     987      191808 :       (state_ != SWEEPING && state_ != MARKING) || heap_->always_allocate()) {
     988         544 :     return;
     989             :   }
     990             : 
     991             :   HistogramTimerScope incremental_marking_scope(
     992      127130 :       heap_->isolate()->counters()->gc_incremental_marking());
     993      190695 :   TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
     994      317825 :   TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
     995             : 
     996             :   double embedder_step_time_ms = 0.0;
     997       63565 :   if (ShouldDoEmbedderStep() && trace_wrappers_toggle_) {
     998           0 :     double start = heap_->MonotonicallyIncreasingTimeInMs();
     999           0 :     EmbedderStep(kMaxStepSizeInMs);
    1000           0 :     embedder_step_time_ms = heap_->MonotonicallyIncreasingTimeInMs() - start;
    1001             :   }
    1002       63565 :   trace_wrappers_toggle_ = !trace_wrappers_toggle_;
    1003             : 
    1004             :   size_t bytes_to_process =
    1005       63565 :       StepSizeToKeepUpWithAllocations() + StepSizeToMakeProgress();
    1006       63565 :   if (bytes_to_process >= IncrementalMarking::kMinStepSizeInBytes &&
    1007             :       embedder_step_time_ms < kMaxStepSizeInMs) {
    1008             :     StepOnAllocation(bytes_to_process,
    1009       63548 :                      kMaxStepSizeInMs - embedder_step_time_ms);
    1010             :   }
    1011             : }
    1012             : 
    1013       63548 : void IncrementalMarking::StepOnAllocation(size_t bytes_to_process,
    1014      126316 :                                           double max_step_size) {
    1015             :   // The first step after Scavenge will see many allocated bytes.
    1016             :   // Cap the step size to distribute the marking work more uniformly.
    1017             :   size_t step_size = GCIdleTimeHandler::EstimateMarkingStepSize(
    1018             :       max_step_size,
    1019       63548 :       heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
    1020             :   bytes_to_process = Min(bytes_to_process, step_size);
    1021             :   size_t bytes_processed = 0;
    1022       63548 :   if (FLAG_concurrent_marking) {
    1023             :     size_t current_bytes_marked_concurrently =
    1024       62768 :         heap()->concurrent_marking()->TotalMarkedBytes();
    1025             :     // The concurrent_marking()->TotalMarkedBytes() is not monothonic for a
    1026             :     // short period of time when a concurrent marking task is finishing.
    1027       62768 :     if (current_bytes_marked_concurrently > bytes_marked_concurrently_) {
    1028             :       bytes_marked_ahead_of_schedule_ +=
    1029       17495 :           current_bytes_marked_concurrently - bytes_marked_concurrently_;
    1030       17495 :       bytes_marked_concurrently_ = current_bytes_marked_concurrently;
    1031             :     }
    1032             :   }
    1033       63548 :   if (bytes_marked_ahead_of_schedule_ >= bytes_to_process) {
    1034             :     // Steps performed in tasks and concurrently have put us ahead of
    1035             :     // schedule. We skip processing of marking dequeue here and thus shift
    1036             :     // marking time from inside V8 to standalone tasks.
    1037       26628 :     bytes_marked_ahead_of_schedule_ -= bytes_to_process;
    1038             :     bytes_processed += bytes_to_process;
    1039             :     bytes_to_process = IncrementalMarking::kMinStepSizeInBytes;
    1040             :   }
    1041             :   bytes_processed +=
    1042       63548 :       Step(bytes_to_process, GC_VIA_STACK_GUARD, StepOrigin::kV8);
    1043      127096 :   bytes_allocated_ -= Min(bytes_allocated_, bytes_processed);
    1044       63548 : }
    1045             : 
    1046     1302552 : size_t IncrementalMarking::Step(size_t bytes_to_process,
    1047             :                                 CompletionAction action,
    1048     2592485 :                                 StepOrigin step_origin) {
    1049     6488676 :   double start = heap_->MonotonicallyIncreasingTimeInMs();
    1050             : 
    1051     1302552 :   if (state_ == SWEEPING) {
    1052       78220 :     TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL_SWEEPING);
    1053       39110 :     FinalizeSweeping();
    1054             :   }
    1055             : 
    1056             :   size_t bytes_processed = 0;
    1057     1302552 :   if (state_ == MARKING) {
    1058     1297604 :     if (FLAG_concurrent_marking) {
    1059     2589764 :       heap_->new_space()->ResetOriginalTop();
    1060             :       // It is safe to merge back all objects that were on hold to the shared
    1061             :       // work list at Step because we are at a safepoint where all objects
    1062             :       // are properly initialized.
    1063             :       marking_worklist()->shared()->MergeGlobalPool(
    1064     1294881 :           marking_worklist()->on_hold());
    1065             :     }
    1066             : 
    1067             : // Only print marking worklist in debug mode to save ~40KB of code size.
    1068             : #ifdef DEBUG
    1069             :     if (FLAG_trace_incremental_marking && FLAG_trace_concurrent_marking &&
    1070             :         FLAG_trace_gc_verbose) {
    1071             :       marking_worklist()->Print();
    1072             :     }
    1073             : #endif
    1074             : 
    1075     2595208 :     bytes_processed = ProcessMarkingWorklist(bytes_to_process);
    1076             : 
    1077     1297604 :     if (step_origin == StepOrigin::kTask) {
    1078       36471 :       bytes_marked_ahead_of_schedule_ += bytes_processed;
    1079             :     }
    1080             : 
    1081     1297604 :     if (marking_worklist()->IsEmpty()) {
    1082     1269305 :       if (heap_->local_embedder_heap_tracer()
    1083     1269305 :               ->ShouldFinalizeIncrementalMarking()) {
    1084     1269305 :         if (!finalize_marking_completed_) {
    1085     1241591 :           FinalizeMarking(action);
    1086             :         } else {
    1087       27714 :           MarkingComplete(action);
    1088             :         }
    1089             :       } else {
    1090           0 :         heap_->local_embedder_heap_tracer()->NotifyV8MarkingWorklistWasEmpty();
    1091             :       }
    1092             :     }
    1093             :   }
    1094     1302552 :   if (FLAG_concurrent_marking) {
    1095     2599660 :     heap_->concurrent_marking()->RescheduleTasksIfNeeded();
    1096             :   }
    1097             : 
    1098     1302552 :   double end = heap_->MonotonicallyIncreasingTimeInMs();
    1099     1302552 :   double duration = (end - start);
    1100             :   // Note that we report zero bytes here when sweeping was in progress or
    1101             :   // when we just started incremental marking. In these cases we did not
    1102             :   // process the marking deque.
    1103     2605104 :   heap_->tracer()->AddIncrementalMarkingStep(duration, bytes_processed);
    1104     1302552 :   if (FLAG_trace_incremental_marking) {
    1105             :     heap_->isolate()->PrintWithTimestamp(
    1106             :         "[IncrementalMarking] Step %s %" PRIuS "KB (%" PRIuS "KB) in %.1f\n",
    1107             :         step_origin == StepOrigin::kV8 ? "in v8" : "in task",
    1108          20 :         bytes_processed / KB, bytes_to_process / KB, duration);
    1109             :   }
    1110     1302552 :   if (FLAG_trace_concurrent_marking) {
    1111             :     heap_->isolate()->PrintWithTimestamp(
    1112             :         "Concurrently marked %" PRIuS "KB\n",
    1113           0 :         heap_->concurrent_marking()->TotalMarkedBytes() / KB);
    1114             :   }
    1115     1302552 :   return bytes_processed;
    1116             : }
    1117             : 
    1118             : }  // namespace internal
    1119      183867 : }  // namespace v8

Generated by: LCOV version 1.10