LCOV - code coverage report
Current view: top level - src/heap - incremental-marking.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 364 409 89.0 %
Date: 2017-10-20 Functions: 48 51 94.1 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/heap/incremental-marking.h"
       6             : 
       7             : #include "src/code-stubs.h"
       8             : #include "src/compilation-cache.h"
       9             : #include "src/conversions.h"
      10             : #include "src/heap/concurrent-marking.h"
      11             : #include "src/heap/gc-idle-time-handler.h"
      12             : #include "src/heap/gc-tracer.h"
      13             : #include "src/heap/heap-inl.h"
      14             : #include "src/heap/mark-compact-inl.h"
      15             : #include "src/heap/object-stats.h"
      16             : #include "src/heap/objects-visiting-inl.h"
      17             : #include "src/heap/objects-visiting.h"
      18             : #include "src/tracing/trace-event.h"
      19             : #include "src/v8.h"
      20             : #include "src/visitors.h"
      21             : #include "src/vm-state-inl.h"
      22             : 
      23             : namespace v8 {
      24             : namespace internal {
      25             : 
      26             : using IncrementalMarkingMarkingVisitor =
      27             :     MarkingVisitor<FixedArrayVisitationMode::kIncremental,
      28             :                    TraceRetainingPathMode::kDisabled,
      29             :                    IncrementalMarking::MarkingState>;
      30             : 
      31       82853 : void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
      32             :                                         size_t size) {
      33      166560 :   Heap* heap = incremental_marking_.heap();
      34             :   VMState<GC> state(heap->isolate());
      35             :   RuntimeCallTimerScope runtime_timer(
      36       82853 :       heap->isolate(), &RuntimeCallStats::GC_Custom_IncrementalMarkingObserver);
      37       82853 :   incremental_marking_.AdvanceIncrementalMarkingOnAllocation();
      38      165706 :   if (incremental_marking_.black_allocation() && addr != nullptr) {
      39             :     // AdvanceIncrementalMarkingOnAllocation can start black allocation.
      40             :     // Ensure that the new object is marked black.
      41       64980 :     HeapObject* object = HeapObject::FromAddress(addr);
      42      125727 :     if (incremental_marking_.marking_state()->IsWhite(object) &&
      43             :         !heap->InNewSpace(object)) {
      44         854 :       if (heap->lo_space()->Contains(object)) {
      45             :         incremental_marking_.marking_state()->WhiteToBlack(object);
      46             :       } else {
      47        1284 :         Page::FromAddress(addr)->CreateBlackArea(addr, addr + size);
      48             :       }
      49             :     }
      50             :   }
      51       82853 : }
      52             : 
      53       54999 : IncrementalMarking::IncrementalMarking(
      54             :     Heap* heap, MarkCompactCollector::MarkingWorklist* marking_worklist)
      55             :     : heap_(heap),
      56             :       marking_worklist_(marking_worklist),
      57             :       initial_old_generation_size_(0),
      58             :       bytes_marked_ahead_of_schedule_(0),
      59             :       unscanned_bytes_of_large_object_(0),
      60             :       is_compacting_(false),
      61             :       should_hurry_(false),
      62             :       was_activated_(false),
      63             :       black_allocation_(false),
      64             :       finalize_marking_completed_(false),
      65             :       trace_wrappers_toggle_(false),
      66             :       request_type_(NONE),
      67             :       new_generation_observer_(*this, kYoungGenerationAllocatedThreshold),
      68      109998 :       old_generation_observer_(*this, kOldGenerationAllocatedThreshold) {
      69             :   DCHECK_NOT_NULL(marking_worklist_);
      70             :   SetState(STOPPED);
      71       54999 : }
      72             : 
      73             : bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
      74             :   HeapObject* value_heap_obj = HeapObject::cast(value);
      75             :   DCHECK(!marking_state()->IsImpossible(value_heap_obj));
      76             :   DCHECK(!marking_state()->IsImpossible(obj));
      77             : #ifdef V8_CONCURRENT_MARKING
      78             :   // The write barrier stub generated with V8_CONCURRENT_MARKING does not
      79             :   // check the color of the source object.
      80             :   const bool need_recording = true;
      81             : #else
      82             :   const bool need_recording = marking_state()->IsBlack(obj);
      83             : #endif
      84             : 
      85   245818356 :   if (need_recording && WhiteToGreyAndPush(value_heap_obj)) {
      86    24500299 :     RestartIfNotMarking();
      87             :   }
      88   245818649 :   return is_compacting_ && need_recording;
      89             : }
      90             : 
      91             : 
      92   245248816 : void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot,
      93             :                                          Object* value) {
      94   245249109 :   if (BaseRecordWrite(obj, value) && slot != nullptr) {
      95             :     // Object is not going to be rescanned we need to record the slot.
      96             :     heap_->mark_compact_collector()->RecordSlot(obj, slot, value);
      97             :   }
      98   245249109 : }
      99             : 
     100    10711057 : int IncrementalMarking::RecordWriteFromCode(HeapObject* obj, Object** slot,
     101             :                                             Isolate* isolate) {
     102             :   DCHECK(obj->IsHeapObject());
     103    10711057 :   isolate->heap()->incremental_marking()->RecordWrite(obj, slot, *slot);
     104             :   // Called by RecordWriteCodeStubAssembler, which doesnt accept void type
     105    10711058 :   return 0;
     106             : }
     107             : 
     108      569540 : void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
     109             :                                                  Object* value) {
     110      569540 :   if (BaseRecordWrite(host, value)) {
     111             :     // Object is not going to be rescanned.  We need to record the slot.
     112       23809 :     heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value);
     113             :   }
     114      569540 : }
     115             : 
     116   548471371 : bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
     117   463109113 :   if (marking_state()->WhiteToGrey(obj)) {
     118             :     marking_worklist()->Push(obj);
     119    85363689 :     return true;
     120             :   }
     121             :   return false;
     122             : }
     123             : 
     124     3778132 : void IncrementalMarking::MarkBlackAndPush(HeapObject* obj) {
     125             :   // Color the object black and push it into the bailout deque.
     126             :   marking_state()->WhiteToGrey(obj);
     127     2408879 :   if (marking_state()->GreyToBlack(obj)) {
     128     1369253 :     if (FLAG_concurrent_marking) {
     129             :       marking_worklist()->PushBailout(obj);
     130             :     } else {
     131             :       marking_worklist()->Push(obj);
     132             :     }
     133             :   }
     134     2408879 : }
     135             : 
     136         310 : void IncrementalMarking::NotifyLeftTrimming(HeapObject* from, HeapObject* to) {
     137             :   DCHECK(IsMarking());
     138             :   DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone());
     139             :   DCHECK_EQ(MemoryChunk::FromAddress(from->address()),
     140             :             MemoryChunk::FromAddress(to->address()));
     141             :   DCHECK_NE(from, to);
     142             : 
     143             :   MarkBit old_mark_bit = marking_state()->MarkBitFrom(from);
     144             :   MarkBit new_mark_bit = marking_state()->MarkBitFrom(to);
     145             : 
     146         310 :   if (black_allocation() && Marking::IsBlack<kAtomicity>(new_mark_bit)) {
     147             :     // Nothing to do if the object is in black area.
     148         155 :     return;
     149             :   }
     150             : 
     151             :   bool marked_black_due_to_left_trimming = false;
     152           0 :   if (FLAG_concurrent_marking) {
     153             :     // We need to mark the array black before overwriting its map and length
     154             :     // so that the concurrent marker does not observe inconsistent state.
     155             :     Marking::WhiteToGrey<kAtomicity>(old_mark_bit);
     156           0 :     if (Marking::GreyToBlack<kAtomicity>(old_mark_bit)) {
     157             :       // The concurrent marker will not mark the array. We need to push the
     158             :       // new array start in marking deque to ensure that it will be marked.
     159             :       marked_black_due_to_left_trimming = true;
     160             :     }
     161             :     DCHECK(Marking::IsBlack<kAtomicity>(old_mark_bit));
     162             :   }
     163             : 
     164           0 :   if (Marking::IsBlack<kAtomicity>(old_mark_bit) &&
     165             :       !marked_black_due_to_left_trimming) {
     166             :     // The array was black before left trimming or was marked black by the
     167             :     // concurrent marker. Simply transfer the color.
     168           0 :     if (from->address() + kPointerSize == to->address()) {
     169             :       // The old and the new markbits overlap. The |to| object has the
     170             :       // grey color. To make it black, we need to set the second bit.
     171             :       DCHECK(new_mark_bit.Get<kAtomicity>());
     172             :       new_mark_bit.Next().Set<kAtomicity>();
     173             :     } else {
     174             :       bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit);
     175             :       DCHECK(success);
     176             :       USE(success);
     177             :     }
     178           0 :   } else if (Marking::IsGrey<kAtomicity>(old_mark_bit) ||
     179             :              marked_black_due_to_left_trimming) {
     180             :     // The array was already grey or was marked black by this function.
     181             :     // Mark the new array grey and push it to marking deque.
     182           0 :     if (from->address() + kPointerSize == to->address()) {
     183             :       // The old and the new markbits overlap. The |to| object is either white
     184             :       // or grey.  Set the first bit to make sure that it is grey.
     185             :       new_mark_bit.Set<kAtomicity>();
     186             :       DCHECK(!new_mark_bit.Next().Get<kAtomicity>());
     187             :     } else {
     188             :       bool success = Marking::WhiteToGrey<kAtomicity>(new_mark_bit);
     189             :       DCHECK(success);
     190             :       USE(success);
     191             :     }
     192             :     marking_worklist()->Push(to);
     193           0 :     RestartIfNotMarking();
     194             :   }
     195             : }
     196             : 
     197           0 : class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
     198             :  public:
     199             :   explicit IncrementalMarkingRootMarkingVisitor(
     200       32057 :       IncrementalMarking* incremental_marking)
     201       64114 :       : heap_(incremental_marking->heap()) {}
     202             : 
     203    95337171 :   void VisitRootPointer(Root root, Object** p) override {
     204             :     MarkObjectByPointer(p);
     205    95337171 :   }
     206             : 
     207      429645 :   void VisitRootPointers(Root root, Object** start, Object** end) override {
     208    53475557 :     for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
     209      429645 :   }
     210             : 
     211             :  private:
     212             :   void MarkObjectByPointer(Object** p) {
     213             :     Object* obj = *p;
     214   148383083 :     if (!obj->IsHeapObject()) return;
     215             : 
     216   142953408 :     heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj));
     217             :   }
     218             : 
     219             :   Heap* heap_;
     220             : };
     221             : 
     222      409059 : void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk,
     223             :                                               bool is_marking,
     224             :                                               bool is_compacting) {
     225      409059 :   if (is_marking) {
     226             :     chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
     227             :     chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
     228             :   } else {
     229             :     chunk->ClearFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
     230             :     chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
     231             :   }
     232      409059 : }
     233             : 
     234             : 
     235      186834 : void IncrementalMarking::SetNewSpacePageFlags(MemoryChunk* chunk,
     236             :                                               bool is_marking) {
     237             :   chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
     238      186834 :   if (is_marking) {
     239             :     chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
     240             :   } else {
     241             :     chunk->ClearFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
     242             :   }
     243      186834 : }
     244             : 
     245             : 
     246       49362 : void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
     247             :     PagedSpace* space) {
     248      340252 :   for (Page* p : *space) {
     249             :     SetOldSpacePageFlags(p, false, false);
     250             :   }
     251       49362 : }
     252             : 
     253             : 
     254       16454 : void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
     255             :     NewSpace* space) {
     256       99786 :   for (Page* p : *space) {
     257             :     SetNewSpacePageFlags(p, false);
     258             :   }
     259       16454 : }
     260             : 
     261             : 
     262       16454 : void IncrementalMarking::DeactivateIncrementalWriteBarrier() {
     263       82270 :   DeactivateIncrementalWriteBarrierForSpace(heap_->old_space());
     264       32908 :   DeactivateIncrementalWriteBarrierForSpace(heap_->map_space());
     265       32908 :   DeactivateIncrementalWriteBarrierForSpace(heap_->code_space());
     266       32908 :   DeactivateIncrementalWriteBarrierForSpace(heap_->new_space());
     267             : 
     268       64488 :   for (LargePage* lop : *heap_->lo_space()) {
     269             :     SetOldSpacePageFlags(lop, false, false);
     270             :   }
     271       16454 : }
     272             : 
     273             : 
     274       50538 : void IncrementalMarking::ActivateIncrementalWriteBarrier(PagedSpace* space) {
     275      300740 :   for (Page* p : *space) {
     276             :     SetOldSpacePageFlags(p, true, is_compacting_);
     277             :   }
     278       50538 : }
     279             : 
     280             : 
     281       16846 : void IncrementalMarking::ActivateIncrementalWriteBarrier(NewSpace* space) {
     282       99330 :   for (Page* p : *space) {
     283             :     SetNewSpacePageFlags(p, true);
     284             :   }
     285       16846 : }
     286             : 
     287             : 
     288       16846 : void IncrementalMarking::ActivateIncrementalWriteBarrier() {
     289       84230 :   ActivateIncrementalWriteBarrier(heap_->old_space());
     290       33692 :   ActivateIncrementalWriteBarrier(heap_->map_space());
     291       33692 :   ActivateIncrementalWriteBarrier(heap_->code_space());
     292       33692 :   ActivateIncrementalWriteBarrier(heap_->new_space());
     293             : 
     294       61296 :   for (LargePage* lop : *heap_->lo_space()) {
     295             :     SetOldSpacePageFlags(lop, true, is_compacting_);
     296             :   }
     297       16846 : }
     298             : 
     299             : 
     300       56796 : bool IncrementalMarking::WasActivated() { return was_activated_; }
     301             : 
     302             : 
     303     1589319 : bool IncrementalMarking::CanBeActivated() {
     304             :   // Only start incremental marking in a safe state: 1) when incremental
     305             :   // marking is turned on, 2) when we are currently not in a GC, and
     306             :   // 3) when we are currently not serializing or deserializing the heap.
     307     1579206 :   return FLAG_incremental_marking && heap_->gc_state() == Heap::NOT_IN_GC &&
     308     2728596 :          heap_->deserialization_complete() &&
     309     2728596 :          !heap_->isolate()->serializer_enabled();
     310             : }
     311             : 
     312             : 
     313           0 : void IncrementalMarking::ActivateGeneratedStub(Code* stub) {
     314             :   DCHECK(RecordWriteStub::GetMode(stub) == RecordWriteStub::STORE_BUFFER_ONLY);
     315             : 
     316           0 :   if (!IsMarking()) {
     317             :     // Initially stub is generated in STORE_BUFFER_ONLY mode thus
     318             :     // we don't need to do anything if incremental marking is
     319             :     // not active.
     320           0 :   } else if (IsCompacting()) {
     321           0 :     RecordWriteStub::Patch(stub, RecordWriteStub::INCREMENTAL_COMPACTION);
     322             :   } else {
     323           0 :     RecordWriteStub::Patch(stub, RecordWriteStub::INCREMENTAL);
     324             :   }
     325           0 : }
     326             : 
     327       33300 : static void PatchIncrementalMarkingRecordWriteStubs(
     328       33300 :     Heap* heap, RecordWriteStub::Mode mode) {
     329             :   UnseededNumberDictionary* stubs = heap->code_stubs();
     330             : 
     331             :   int capacity = stubs->Capacity();
     332             :   Isolate* isolate = heap->isolate();
     333     8558100 :   for (int i = 0; i < capacity; i++) {
     334             :     Object* k = stubs->KeyAt(i);
     335     8524800 :     if (stubs->IsKey(isolate, k)) {
     336     1916741 :       uint32_t key = NumberToUint32(k);
     337             : 
     338     1916741 :       if (CodeStub::MajorKeyFromKey(key) == CodeStub::RecordWrite) {
     339           0 :         Object* e = stubs->ValueAt(i);
     340           0 :         if (e->IsCode()) {
     341           0 :           RecordWriteStub::Patch(Code::cast(e), mode);
     342             :         }
     343             :       }
     344             :     }
     345             :   }
     346       33300 : }
     347             : 
     348       16454 : void IncrementalMarking::Deactivate() {
     349       16454 :   DeactivateIncrementalWriteBarrier();
     350             :   PatchIncrementalMarkingRecordWriteStubs(heap_,
     351       16454 :                                           RecordWriteStub::STORE_BUFFER_ONLY);
     352       16454 : }
     353             : 
     354       67712 : void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
     355       19399 :   if (FLAG_trace_incremental_marking) {
     356             :     int old_generation_size_mb =
     357           5 :         static_cast<int>(heap()->PromotedSpaceSizeOfObjects() / MB);
     358             :     int old_generation_limit_mb =
     359           5 :         static_cast<int>(heap()->old_generation_allocation_limit() / MB);
     360             :     heap()->isolate()->PrintWithTimestamp(
     361             :         "[IncrementalMarking] Start (%s): old generation %dMB, limit %dMB, "
     362             :         "slack %dMB\n",
     363             :         Heap::GarbageCollectionReasonToString(gc_reason),
     364             :         old_generation_size_mb, old_generation_limit_mb,
     365          15 :         Max(0, old_generation_limit_mb - old_generation_size_mb));
     366             :   }
     367             :   DCHECK(FLAG_incremental_marking);
     368             :   DCHECK(state_ == STOPPED);
     369             :   DCHECK(heap_->gc_state() == Heap::NOT_IN_GC);
     370             :   DCHECK(!heap_->isolate()->serializer_enabled());
     371             : 
     372      155192 :   Counters* counters = heap_->isolate()->counters();
     373             : 
     374             :   counters->incremental_marking_reason()->AddSample(
     375       19399 :       static_cast<int>(gc_reason));
     376             :   HistogramTimerScope incremental_marking_scope(
     377       19399 :       counters->gc_incremental_marking_start());
     378       58197 :   TRACE_EVENT0("v8", "V8.GCIncrementalMarkingStart");
     379       77596 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START);
     380       38798 :   heap_->tracer()->NotifyIncrementalMarkingStart();
     381             : 
     382       19399 :   start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
     383       19399 :   initial_old_generation_size_ = heap_->PromotedSpaceSizeOfObjects();
     384       38798 :   old_generation_allocation_counter_ = heap_->OldGenerationAllocationCounter();
     385       19399 :   bytes_allocated_ = 0;
     386       19399 :   bytes_marked_ahead_of_schedule_ = 0;
     387       19399 :   should_hurry_ = false;
     388       19399 :   was_activated_ = true;
     389             : 
     390       38798 :   if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
     391        9899 :     StartMarking();
     392             :   } else {
     393        9500 :     if (FLAG_trace_incremental_marking) {
     394             :       heap()->isolate()->PrintWithTimestamp(
     395           0 :           "[IncrementalMarking] Start sweeping.\n");
     396             :     }
     397             :     SetState(SWEEPING);
     398             :   }
     399             : 
     400       38798 :   SpaceIterator it(heap_);
     401      135793 :   while (it.has_next()) {
     402       96995 :     Space* space = it.next();
     403      193990 :     if (space == heap_->new_space()) {
     404       19399 :       space->AddAllocationObserver(&new_generation_observer_);
     405             :     } else {
     406       77596 :       space->AddAllocationObserver(&old_generation_observer_);
     407             :     }
     408             :   }
     409             : 
     410       19399 :   incremental_marking_job()->Start(heap_);
     411       19399 : }
     412             : 
     413             : 
     414       50543 : void IncrementalMarking::StartMarking() {
     415       84061 :   if (heap_->isolate()->serializer_enabled()) {
     416             :     // Black allocation currently starts when we start incremental marking,
     417             :     // but we cannot enable black allocation while deserializing. Hence, we
     418             :     // have to delay the start of incremental marking in that case.
     419           0 :     if (FLAG_trace_incremental_marking) {
     420             :       heap()->isolate()->PrintWithTimestamp(
     421           0 :           "[IncrementalMarking] Start delayed - serializer\n");
     422             :     }
     423           0 :     return;
     424             :   }
     425       16846 :   if (FLAG_trace_incremental_marking) {
     426             :     heap()->isolate()->PrintWithTimestamp(
     427           5 :         "[IncrementalMarking] Start marking\n");
     428             :   }
     429             : 
     430             :   is_compacting_ =
     431       33692 :       !FLAG_never_compact && heap_->mark_compact_collector()->StartCompaction();
     432             : 
     433             :   SetState(MARKING);
     434             : 
     435             :   {
     436       67384 :     TRACE_GC(heap()->tracer(),
     437             :              GCTracer::Scope::MC_INCREMENTAL_WRAPPER_PROLOGUE);
     438       50538 :     heap_->local_embedder_heap_tracer()->TracePrologue();
     439             :   }
     440             : 
     441             :   RecordWriteStub::Mode mode = is_compacting_
     442             :                                    ? RecordWriteStub::INCREMENTAL_COMPACTION
     443       16846 :                                    : RecordWriteStub::INCREMENTAL;
     444             : 
     445       16846 :   PatchIncrementalMarkingRecordWriteStubs(heap_, mode);
     446             : 
     447       16846 :   ActivateIncrementalWriteBarrier();
     448             : 
     449             : // Marking bits are cleared by the sweeper.
     450             : #ifdef VERIFY_HEAP
     451             :   if (FLAG_verify_heap) {
     452             :     heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
     453             :   }
     454             : #endif
     455             : 
     456       33692 :   heap_->isolate()->compilation_cache()->MarkCompactPrologue();
     457             : 
     458             : #ifdef V8_CONCURRENT_MARKING
     459             :   // The write-barrier does not check the color of the source object.
     460             :   // Start black allocation earlier to ensure faster marking progress.
     461       16846 :   if (!black_allocation_) {
     462       16846 :     StartBlackAllocation();
     463             :   }
     464             : #endif
     465             : 
     466             :   // Mark strong roots grey.
     467             :   IncrementalMarkingRootMarkingVisitor visitor(this);
     468       16846 :   heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
     469             : 
     470       16846 :   if (FLAG_concurrent_marking) {
     471       33354 :     heap_->concurrent_marking()->ScheduleTasks();
     472             :   }
     473             : 
     474             :   // Ready to start incremental marking.
     475       16846 :   if (FLAG_trace_incremental_marking) {
     476           5 :     heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Running\n");
     477             :   }
     478             : }
     479             : 
     480       76001 : void IncrementalMarking::StartBlackAllocation() {
     481             :   DCHECK(FLAG_black_allocation);
     482             :   DCHECK(!black_allocation_);
     483             :   DCHECK(IsMarking());
     484       18999 :   black_allocation_ = true;
     485       18999 :   heap()->old_space()->MarkAllocationInfoBlack();
     486       18999 :   heap()->map_space()->MarkAllocationInfoBlack();
     487       18999 :   heap()->code_space()->MarkAllocationInfoBlack();
     488       18999 :   if (FLAG_trace_incremental_marking) {
     489             :     heap()->isolate()->PrintWithTimestamp(
     490           5 :         "[IncrementalMarking] Black allocation started\n");
     491             :   }
     492       18999 : }
     493             : 
     494        6459 : void IncrementalMarking::PauseBlackAllocation() {
     495             :   DCHECK(FLAG_black_allocation);
     496             :   DCHECK(IsMarking());
     497        2153 :   heap()->old_space()->UnmarkAllocationInfo();
     498        2153 :   heap()->map_space()->UnmarkAllocationInfo();
     499        2153 :   heap()->code_space()->UnmarkAllocationInfo();
     500        2153 :   if (FLAG_trace_incremental_marking) {
     501             :     heap()->isolate()->PrintWithTimestamp(
     502           0 :         "[IncrementalMarking] Black allocation paused\n");
     503             :   }
     504        2153 :   black_allocation_ = false;
     505        2153 : }
     506             : 
     507       18893 : void IncrementalMarking::FinishBlackAllocation() {
     508       18888 :   if (black_allocation_) {
     509       16454 :     black_allocation_ = false;
     510       16454 :     if (FLAG_trace_incremental_marking) {
     511             :       heap()->isolate()->PrintWithTimestamp(
     512           5 :           "[IncrementalMarking] Black allocation finished\n");
     513             :     }
     514             :   }
     515       18888 : }
     516             : 
     517         912 : void IncrementalMarking::AbortBlackAllocation() {
     518         912 :   if (FLAG_trace_incremental_marking) {
     519             :     heap()->isolate()->PrintWithTimestamp(
     520           0 :         "[IncrementalMarking] Black allocation aborted\n");
     521             :   }
     522         912 : }
     523             : 
     524       15211 : void IncrementalMarking::MarkRoots() {
     525             :   DCHECK(!finalize_marking_completed_);
     526             :   DCHECK(IsMarking());
     527             : 
     528             :   IncrementalMarkingRootMarkingVisitor visitor(this);
     529       15211 :   heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
     530       15211 : }
     531             : 
     532          50 : bool ShouldRetainMap(Map* map, int age) {
     533          50 :   if (age == 0) {
     534             :     // The map has aged. Do not retain this map.
     535             :     return false;
     536             :   }
     537          40 :   Object* constructor = map->GetConstructor();
     538             :   Heap* heap = map->GetHeap();
     539          80 :   if (!constructor->IsHeapObject() ||
     540             :       heap->incremental_marking()->marking_state()->IsWhite(
     541             :           HeapObject::cast(constructor))) {
     542             :     // The constructor is dead, no new objects with this map can
     543             :     // be created. Do not retain this map.
     544             :     return false;
     545             :   }
     546          40 :   return true;
     547             : }
     548             : 
     549             : 
     550       30422 : void IncrementalMarking::RetainMaps() {
     551             :   // Do not retain dead maps if flag disables it or there is
     552             :   // - memory pressure (reduce_memory_footprint_),
     553             :   // - GC is requested by tests or dev-tools (abort_incremental_marking_).
     554       30417 :   bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
     555       30417 :                                    heap()->ShouldAbortIncrementalMarking() ||
     556       15206 :                                    FLAG_retain_maps_for_n_gc == 0;
     557       15211 :   ArrayList* retained_maps = heap()->retained_maps();
     558       15211 :   int length = retained_maps->Length();
     559             :   // The number_of_disposed_maps separates maps in the retained_maps
     560             :   // array that were created before and after context disposal.
     561             :   // We do not age and retain disposed maps to avoid memory leaks.
     562       15211 :   int number_of_disposed_maps = heap()->number_of_disposed_maps_;
     563       32483 :   for (int i = 0; i < length; i += 2) {
     564             :     DCHECK(retained_maps->Get(i)->IsWeakCell());
     565             :     WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
     566       17272 :     if (cell->cleared()) continue;
     567             :     int age = Smi::ToInt(retained_maps->Get(i + 1));
     568             :     int new_age;
     569             :     Map* map = Map::cast(cell->value());
     570       14818 :     if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
     571             :         marking_state()->IsWhite(map)) {
     572          50 :       if (ShouldRetainMap(map, age)) {
     573          40 :         WhiteToGreyAndPush(map);
     574             :       }
     575             :       Object* prototype = map->prototype();
     576         130 :       if (age > 0 && prototype->IsHeapObject() &&
     577             :           marking_state()->IsWhite(HeapObject::cast(prototype))) {
     578             :         // The prototype is not marked, age the map.
     579          40 :         new_age = age - 1;
     580             :       } else {
     581             :         // The prototype and the constructor are marked, this map keeps only
     582             :         // transition tree alive, not JSObjects. Do not age the map.
     583             :         new_age = age;
     584             :       }
     585             :     } else {
     586       14718 :       new_age = FLAG_retain_maps_for_n_gc;
     587             :     }
     588             :     // Compact the array and update the age.
     589       14768 :     if (new_age != age) {
     590             :       retained_maps->Set(i + 1, Smi::FromInt(new_age));
     591             :     }
     592             :   }
     593       15211 : }
     594             : 
     595       30427 : void IncrementalMarking::FinalizeIncrementally() {
     596       60844 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE_BODY);
     597             :   DCHECK(!finalize_marking_completed_);
     598             :   DCHECK(IsMarking());
     599             : 
     600       15211 :   double start = heap_->MonotonicallyIncreasingTimeInMs();
     601             : 
     602             :   // After finishing incremental marking, we try to discover all unmarked
     603             :   // objects to reduce the marking load in the final pause.
     604             :   // 1) We scan and mark the roots again to find all changes to the root set.
     605             :   // 2) Age and retain maps embedded in optimized code.
     606             :   // 3) Remove weak cell with live values from the list of weak cells, they
     607             :   // do not need processing during GC.
     608       15211 :   MarkRoots();
     609             : 
     610             :   // Map retaining is needed for perfromance, not correctness,
     611             :   // so we can do it only once at the beginning of the finalization.
     612       15211 :   RetainMaps();
     613             : 
     614       15211 :   finalize_marking_completed_ = true;
     615             : 
     616       60839 :   if (FLAG_black_allocation && !heap()->ShouldReduceMemory() &&
     617       15206 :       !black_allocation_) {
     618             :     // TODO(hpayer): Move to an earlier point as soon as we make faster marking
     619             :     // progress.
     620           0 :     StartBlackAllocation();
     621             :   }
     622             : 
     623       15211 :   if (FLAG_trace_incremental_marking) {
     624           5 :     double end = heap_->MonotonicallyIncreasingTimeInMs();
     625           5 :     double delta = end - start;
     626             :     heap()->isolate()->PrintWithTimestamp(
     627           5 :         "[IncrementalMarking] Finalize incrementally spent %.1f ms.\n", delta);
     628       15211 :   }
     629       15211 : }
     630             : 
     631       31805 : void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
     632       59304 :   if (!IsMarking()) return;
     633             : 
     634        2153 :   Map* filler_map = heap_->one_pointer_filler_map();
     635             : 
     636             :   MinorMarkCompactCollector::MarkingState* minor_marking_state =
     637        2153 :       heap()->minor_mark_compact_collector()->marking_state();
     638             : 
     639             :   marking_worklist()->Update([this, filler_map, minor_marking_state](
     640    10046943 :                                  HeapObject* obj, HeapObject** out) -> bool {
     641             :     DCHECK(obj->IsHeapObject());
     642             :     // Only pointers to from space have to be updated.
     643    10046943 :     if (heap_->InFromSpace(obj)) {
     644             :       MapWord map_word = obj->map_word();
     645     7153837 :       if (!map_word.IsForwardingAddress()) {
     646             :         // There may be objects on the marking deque that do not exist anymore,
     647             :         // e.g. left trimmed objects or objects from the root set (frames).
     648             :         // If these object are dead at scavenging time, their marking deque
     649             :         // entries will not point to forwarding addresses. Hence, we can discard
     650             :         // them.
     651             :         return false;
     652             :       }
     653      938307 :       HeapObject* dest = map_word.ToForwardingAddress();
     654             :       DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
     655      938307 :       *out = dest;
     656             :       return true;
     657     2893106 :     } else if (heap_->InToSpace(obj)) {
     658             :       // The object may be on a page that was moved in new space.
     659             :       DCHECK(
     660             :           Page::FromAddress(obj->address())->IsFlagSet(Page::SWEEP_TO_ITERATE));
     661           0 :       if (minor_marking_state->IsGrey(obj)) {
     662           0 :         *out = obj;
     663             :         return true;
     664             :       }
     665             :       return false;
     666             :     } else {
     667             :       // The object may be on a page that was moved from new to old space.
     668     5786212 :       if (Page::FromAddress(obj->address())
     669             :               ->IsFlagSet(Page::SWEEP_TO_ITERATE)) {
     670           0 :         if (minor_marking_state->IsGrey(obj)) {
     671           0 :           *out = obj;
     672             :           return true;
     673             :         }
     674             :         return false;
     675             :       }
     676             :       DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
     677             :       // Skip one word filler objects that appear on the
     678             :       // stack when we perform in place array shift.
     679     2893106 :       if (obj->map() != filler_map) {
     680     2893106 :         *out = obj;
     681             :         return true;
     682             :       }
     683             :       return false;
     684             :     }
     685        4306 :   });
     686             : }
     687             : 
     688             : bool IncrementalMarking::IsFixedArrayWithProgressBar(HeapObject* obj) {
     689             :   if (!obj->IsFixedArray()) return false;
     690             :   MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
     691             :   return chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR);
     692             : }
     693             : 
     694    54613789 : int IncrementalMarking::VisitObject(Map* map, HeapObject* obj) {
     695             :   DCHECK(marking_state()->IsGrey(obj) || marking_state()->IsBlack(obj));
     696             :   // The object can already be black in two cases:
     697             :   // 1. The object is a fixed array with the progress bar.
     698             :   // 2. The object is a JSObject that was colored black before
     699             :   //    unsafe layout change.
     700             :   // 3. The object is a string that was colored black before
     701             :   //    unsafe layout change.
     702             :   if (!marking_state()->GreyToBlack(obj)) {
     703             :     DCHECK(IsFixedArrayWithProgressBar(obj) || obj->IsJSObject() ||
     704             :            obj->IsString());
     705             :   }
     706             :   DCHECK(marking_state()->IsBlack(obj));
     707    54613791 :   WhiteToGreyAndPush(map);
     708             :   IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
     709   109227578 :                                            marking_state());
     710    54613793 :   return visitor.Visit(map, obj);
     711             : }
     712             : 
     713    19512997 : void IncrementalMarking::ProcessBlackAllocatedObject(HeapObject* obj) {
     714    38949453 :   if (IsMarking() && marking_state()->IsBlack(obj)) {
     715    19436452 :     RevisitObject(obj);
     716             :   }
     717    19513000 : }
     718             : 
     719    39448509 : void IncrementalMarking::RevisitObject(HeapObject* obj) {
     720             :   DCHECK(IsMarking());
     721             :   DCHECK(FLAG_concurrent_marking || marking_state()->IsBlack(obj));
     722    19724254 :   Page* page = Page::FromAddress(obj->address());
     723    39448510 :   if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
     724             :     page->ResetProgressBar();
     725             :   }
     726             :   Map* map = obj->map();
     727    19724254 :   WhiteToGreyAndPush(map);
     728             :   IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
     729    39448510 :                                            marking_state());
     730             :   visitor.Visit(map, obj);
     731    19724255 : }
     732             : 
     733             : intptr_t IncrementalMarking::ProcessMarkingWorklist(
     734    54659714 :     intptr_t bytes_to_process, ForceCompletionAction completion) {
     735             :   intptr_t bytes_processed = 0;
     736    53218076 :   while (bytes_processed < bytes_to_process || completion == FORCE_COMPLETION) {
     737    54659714 :     HeapObject* obj = marking_worklist()->Pop();
     738    54659712 :     if (obj == nullptr) break;
     739             :     // Left trimming may result in white, grey, or black filler objects on the
     740             :     // marking deque. Ignore these objects.
     741    54613778 :     if (obj->IsFiller()) {
     742             :       DCHECK(!marking_state()->IsImpossible(obj));
     743             :       continue;
     744             :     }
     745    54613779 :     unscanned_bytes_of_large_object_ = 0;
     746    54613779 :     int size = VisitObject(obj->map(), obj);
     747    53132948 :     bytes_processed += size - unscanned_bytes_of_large_object_;
     748             :   }
     749             :   // Report all found wrappers to the embedder. This is necessary as the
     750             :   // embedder could potentially invalidate wrappers as soon as V8 is done
     751             :   // with its incremental marking processing. Any cached wrappers could
     752             :   // result in broken pointers at this point.
     753       86144 :   heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
     754             :   return bytes_processed;
     755             : }
     756             : 
     757             : 
     758       16560 : void IncrementalMarking::Hurry() {
     759             :   // A scavenge may have pushed new objects on the marking deque (due to black
     760             :   // allocation) even in COMPLETE state. This may happen if scavenges are
     761             :   // forced e.g. in tests. It should not happen when COMPLETE was set when
     762             :   // incremental marking finished and a regular GC was triggered after that
     763             :   // because should_hurry_ will force a full GC.
     764       15542 :   if (!marking_worklist()->IsEmpty()) {
     765             :     double start = 0.0;
     766        1018 :     if (FLAG_trace_incremental_marking) {
     767           0 :       start = heap_->MonotonicallyIncreasingTimeInMs();
     768           0 :       if (FLAG_trace_incremental_marking) {
     769           0 :         heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Hurry\n");
     770             :       }
     771             :     }
     772             :     // TODO(gc) hurry can mark objects it encounters black as mutator
     773             :     // was stopped.
     774             :     ProcessMarkingWorklist(0, FORCE_COMPLETION);
     775             :     SetState(COMPLETE);
     776        1018 :     if (FLAG_trace_incremental_marking) {
     777           0 :       double end = heap_->MonotonicallyIncreasingTimeInMs();
     778           0 :       double delta = end - start;
     779           0 :       if (FLAG_trace_incremental_marking) {
     780             :         heap()->isolate()->PrintWithTimestamp(
     781             :             "[IncrementalMarking] Complete (hurry), spent %d ms.\n",
     782           0 :             static_cast<int>(delta));
     783             :       }
     784             :     }
     785             :   }
     786       15542 : }
     787             : 
     788             : 
     789       37811 : void IncrementalMarking::Stop() {
     790       18938 :   if (IsStopped()) return;
     791       18888 :   if (FLAG_trace_incremental_marking) {
     792             :     int old_generation_size_mb =
     793           5 :         static_cast<int>(heap()->PromotedSpaceSizeOfObjects() / MB);
     794             :     int old_generation_limit_mb =
     795           5 :         static_cast<int>(heap()->old_generation_allocation_limit() / MB);
     796             :     heap()->isolate()->PrintWithTimestamp(
     797             :         "[IncrementalMarking] Stopping: old generation %dMB, limit %dMB, "
     798             :         "overshoot %dMB\n",
     799             :         old_generation_size_mb, old_generation_limit_mb,
     800          10 :         Max(0, old_generation_size_mb - old_generation_limit_mb));
     801             :   }
     802             : 
     803      113328 :   SpaceIterator it(heap_);
     804      132216 :   while (it.has_next()) {
     805       94440 :     Space* space = it.next();
     806      188880 :     if (space == heap_->new_space()) {
     807       18888 :       space->RemoveAllocationObserver(&new_generation_observer_);
     808             :     } else {
     809       75552 :       space->RemoveAllocationObserver(&old_generation_observer_);
     810             :     }
     811             :   }
     812             : 
     813             :   IncrementalMarking::set_should_hurry(false);
     814       37776 :   heap_->isolate()->stack_guard()->ClearGC();
     815             :   SetState(STOPPED);
     816       18888 :   is_compacting_ = false;
     817       18888 :   FinishBlackAllocation();
     818             : }
     819             : 
     820             : 
     821       15542 : void IncrementalMarking::Finalize() {
     822       15542 :   Hurry();
     823       15542 :   Stop();
     824       15542 : }
     825             : 
     826             : 
     827       28876 : void IncrementalMarking::FinalizeMarking(CompletionAction action) {
     828             :   DCHECK(!finalize_marking_completed_);
     829       28871 :   if (FLAG_trace_incremental_marking) {
     830             :     heap()->isolate()->PrintWithTimestamp(
     831             :         "[IncrementalMarking] requesting finalization of incremental "
     832           5 :         "marking.\n");
     833             :   }
     834       28871 :   request_type_ = FINALIZATION;
     835       28871 :   if (action == GC_VIA_STACK_GUARD) {
     836       29218 :     heap_->isolate()->stack_guard()->RequestGC();
     837             :   }
     838       28871 : }
     839             : 
     840             : 
     841       16092 : void IncrementalMarking::MarkingComplete(CompletionAction action) {
     842             :   SetState(COMPLETE);
     843             :   // We will set the stack guard to request a GC now.  This will mean the rest
     844             :   // of the GC gets performed as soon as possible (we can't do a GC here in a
     845             :   // record-write context).  If a few things get allocated between now and then
     846             :   // that shouldn't make us do a scavenge and keep being incremental, so we set
     847             :   // the should-hurry flag to indicate that there can't be much work left to do.
     848             :   set_should_hurry(true);
     849       16087 :   if (FLAG_trace_incremental_marking) {
     850             :     heap()->isolate()->PrintWithTimestamp(
     851           5 :         "[IncrementalMarking] Complete (normal).\n");
     852             :   }
     853       16087 :   request_type_ = COMPLETE_MARKING;
     854       16087 :   if (action == GC_VIA_STACK_GUARD) {
     855        3490 :     heap_->isolate()->stack_guard()->RequestGC();
     856             :   }
     857       16087 : }
     858             : 
     859             : 
     860       56800 : void IncrementalMarking::Epilogue() {
     861       56800 :   was_activated_ = false;
     862       56800 :   finalize_marking_completed_ = false;
     863       56800 : }
     864             : 
     865       49952 : double IncrementalMarking::AdvanceIncrementalMarking(
     866             :     double deadline_in_ms, CompletionAction completion_action,
     867      102399 :     StepOrigin step_origin) {
     868             :   HistogramTimerScope incremental_marking_scope(
     869      194352 :       heap_->isolate()->counters()->gc_incremental_marking());
     870      149856 :   TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
     871      249760 :   TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
     872             :   DCHECK(!IsStopped());
     873             :   DCHECK_EQ(
     874             :       0, heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace());
     875             : 
     876             :   double remaining_time_in_ms = 0.0;
     877             :   intptr_t step_size_in_bytes = GCIdleTimeHandler::EstimateMarkingStepSize(
     878             :       kStepSizeInMs,
     879       49952 :       heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
     880             : 
     881             :   const bool incremental_wrapper_tracing =
     882       94448 :       state_ == MARKING && FLAG_incremental_marking_wrappers &&
     883       88992 :       heap_->local_embedder_heap_tracer()->InUse();
     884       51144 :   do {
     885       51144 :     if (incremental_wrapper_tracing && trace_wrappers_toggle_) {
     886           0 :       TRACE_GC(heap()->tracer(),
     887             :                GCTracer::Scope::MC_INCREMENTAL_WRAPPER_TRACING);
     888             :       const double wrapper_deadline =
     889           0 :           heap_->MonotonicallyIncreasingTimeInMs() + kStepSizeInMs;
     890           0 :       if (!heap_->local_embedder_heap_tracer()
     891           0 :                ->ShouldFinalizeIncrementalMarking()) {
     892             :         heap_->local_embedder_heap_tracer()->Trace(
     893             :             wrapper_deadline, EmbedderHeapTracer::AdvanceTracingActions(
     894             :                                   EmbedderHeapTracer::ForceCompletionAction::
     895           0 :                                       DO_NOT_FORCE_COMPLETION));
     896           0 :       }
     897             :     } else {
     898       51144 :       Step(step_size_in_bytes, completion_action, step_origin);
     899             :     }
     900       51144 :     trace_wrappers_toggle_ = !trace_wrappers_toggle_;
     901             :     remaining_time_in_ms =
     902       51144 :         deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
     903       53795 :   } while (remaining_time_in_ms >= kStepSizeInMs && !IsComplete() &&
     904        1303 :            !marking_worklist()->IsEmpty());
     905       49952 :   return remaining_time_in_ms;
     906             : }
     907             : 
     908             : 
     909        7715 : void IncrementalMarking::FinalizeSweeping() {
     910             :   DCHECK(state_ == SWEEPING);
     911       34544 :   if (heap_->mark_compact_collector()->sweeping_in_progress() &&
     912        4442 :       (!FLAG_concurrent_sweeping ||
     913        2216 :        !heap_->mark_compact_collector()->sweeper().AreSweeperTasksRunning())) {
     914        2916 :     heap_->mark_compact_collector()->EnsureSweepingCompleted();
     915             :   }
     916       15430 :   if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
     917             : #ifdef DEBUG
     918             :     heap_->VerifyCountersAfterSweeping();
     919             : #endif
     920        6947 :     StartMarking();
     921             :   }
     922        7715 : }
     923             : 
     924       82789 : size_t IncrementalMarking::StepSizeToKeepUpWithAllocations() {
     925             :   // Update bytes_allocated_ based on the allocation counter.
     926       82789 :   size_t current_counter = heap_->OldGenerationAllocationCounter();
     927       82789 :   bytes_allocated_ += current_counter - old_generation_allocation_counter_;
     928       82789 :   old_generation_allocation_counter_ = current_counter;
     929       82789 :   return bytes_allocated_;
     930             : }
     931             : 
     932       84087 : size_t IncrementalMarking::StepSizeToMakeProgress() {
     933             :   // We increase step size gradually based on the time passed in order to
     934             :   // leave marking work to standalone tasks. The ramp up duration and the
     935             :   // target step count are chosen based on benchmarks.
     936             :   const int kRampUpIntervalMs = 300;
     937             :   const size_t kTargetStepCount = 256;
     938             :   const size_t kTargetStepCountAtOOM = 32;
     939      165578 :   size_t oom_slack = heap()->new_space()->Capacity() + 64 * MB;
     940             : 
     941       82789 :   if (heap()->IsCloseToOutOfMemory(oom_slack)) {
     942        1298 :     return heap()->PromotedSpaceSizeOfObjects() / kTargetStepCountAtOOM;
     943             :   }
     944             : 
     945             :   size_t step_size = Max(initial_old_generation_size_ / kTargetStepCount,
     946       81491 :                          IncrementalMarking::kMinStepSizeInBytes);
     947             :   double time_passed_ms =
     948       81491 :       heap_->MonotonicallyIncreasingTimeInMs() - start_time_ms_;
     949       81491 :   double factor = Min(time_passed_ms / kRampUpIntervalMs, 1.0);
     950       81491 :   return static_cast<size_t>(factor * step_size);
     951             : }
     952             : 
     953      161128 : void IncrementalMarking::AdvanceIncrementalMarkingOnAllocation() {
     954             :   // Code using an AlwaysAllocateScope assumes that the GC state does not
     955             :   // change; that implies that no marking steps must be performed.
     956      362538 :   if (heap_->gc_state() != Heap::NOT_IN_GC || !FLAG_incremental_marking ||
     957      248518 :       (state_ != SWEEPING && state_ != MARKING) || heap_->always_allocate()) {
     958       82853 :     return;
     959             :   }
     960             : 
     961             :   size_t bytes_to_process =
     962       82789 :       StepSizeToKeepUpWithAllocations() + StepSizeToMakeProgress();
     963             : 
     964       82789 :   if (bytes_to_process >= IncrementalMarking::kMinStepSizeInBytes) {
     965             :     // The first step after Scavenge will see many allocated bytes.
     966             :     // Cap the step size to distribute the marking work more uniformly.
     967             :     size_t max_step_size = GCIdleTimeHandler::EstimateMarkingStepSize(
     968             :         kMaxStepSizeInMs,
     969       31126 :         heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
     970             :     bytes_to_process = Min(bytes_to_process, max_step_size);
     971             : 
     972       62025 :     if (FLAG_concurrent_marking && marking_worklist()->IsBailoutEmpty()) {
     973             :       // The number of background tasks + the main thread.
     974       16250 :       size_t tasks = heap()->concurrent_marking()->TaskCount() + 1;
     975             :       bytes_to_process = Max(IncrementalMarking::kMinStepSizeInBytes,
     976       16250 :                              bytes_to_process / tasks);
     977             :     }
     978             : 
     979             :     size_t bytes_processed = 0;
     980       31126 :     if (bytes_marked_ahead_of_schedule_ >= bytes_to_process) {
     981             :       // Steps performed in tasks have put us ahead of schedule.
     982             :       // We skip processing of marking dequeue here and thus
     983             :       // shift marking time from inside V8 to standalone tasks.
     984           0 :       bytes_marked_ahead_of_schedule_ -= bytes_to_process;
     985             :       bytes_processed = bytes_to_process;
     986             :     } else {
     987             :       HistogramTimerScope incremental_marking_scope(
     988       62252 :           heap_->isolate()->counters()->gc_incremental_marking());
     989       93378 :       TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
     990      155630 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
     991             :       bytes_processed =
     992       31126 :           Step(bytes_to_process, GC_VIA_STACK_GUARD, StepOrigin::kV8);
     993             :     }
     994       62252 :     bytes_allocated_ -= Min(bytes_allocated_, bytes_processed);
     995             :   }
     996             : }
     997             : 
     998       85911 : size_t IncrementalMarking::Step(size_t bytes_to_process,
     999             :                                 CompletionAction action,
    1000      166042 :                                 StepOrigin step_origin) {
    1001      387103 :   double start = heap_->MonotonicallyIncreasingTimeInMs();
    1002             : 
    1003       85911 :   if (state_ == SWEEPING) {
    1004       38575 :     TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL_SWEEPING);
    1005       15430 :     FinalizeSweeping();
    1006             :   }
    1007             : 
    1008             :   size_t bytes_processed = 0;
    1009       85911 :   if (state_ == MARKING) {
    1010       85129 :     if (FLAG_concurrent_marking) {
    1011      161826 :       heap_->new_space()->ResetOriginalTop();
    1012             :       // It is safe to merge back all objects that were on hold to the shared
    1013             :       // work list at Step because we are at a safepoint where all objects
    1014             :       // are properly initialized.
    1015             :       marking_worklist()->shared()->MergeGlobalPool(
    1016       80913 :           marking_worklist()->on_hold());
    1017             :     }
    1018       85129 :     if (FLAG_trace_incremental_marking && FLAG_trace_concurrent_marking &&
    1019             :         FLAG_trace_gc_verbose) {
    1020           0 :       marking_worklist()->Print();
    1021             :     }
    1022      170258 :     bytes_processed = ProcessMarkingWorklist(bytes_to_process);
    1023       85129 :     if (step_origin == StepOrigin::kTask) {
    1024       49129 :       bytes_marked_ahead_of_schedule_ += bytes_processed;
    1025             :     }
    1026             : 
    1027       85129 :     if (marking_worklist()->IsEmpty()) {
    1028       44958 :       if (heap_->local_embedder_heap_tracer()
    1029       44958 :               ->ShouldFinalizeIncrementalMarking()) {
    1030       44958 :         if (!finalize_marking_completed_) {
    1031       28871 :           FinalizeMarking(action);
    1032             :         } else {
    1033       16087 :           MarkingComplete(action);
    1034             :         }
    1035             :       } else {
    1036           0 :         heap_->local_embedder_heap_tracer()->NotifyV8MarkingWorklistWasEmpty();
    1037             :       }
    1038             :     }
    1039             :   }
    1040       85911 :   if (FLAG_concurrent_marking) {
    1041      163390 :     heap_->concurrent_marking()->RescheduleTasksIfNeeded();
    1042             :   }
    1043             : 
    1044       85911 :   double end = heap_->MonotonicallyIncreasingTimeInMs();
    1045       85911 :   double duration = (end - start);
    1046             :   // Note that we report zero bytes here when sweeping was in progress or
    1047             :   // when we just started incremental marking. In these cases we did not
    1048             :   // process the marking deque.
    1049      171822 :   heap_->tracer()->AddIncrementalMarkingStep(duration, bytes_processed);
    1050       85911 :   if (FLAG_trace_incremental_marking) {
    1051             :     heap_->isolate()->PrintWithTimestamp(
    1052             :         "[IncrementalMarking] Step %s %" PRIuS "KB (%" PRIuS "KB) in %.1f\n",
    1053             :         step_origin == StepOrigin::kV8 ? "in v8" : "in task",
    1054          24 :         bytes_processed / KB, bytes_to_process / KB, duration);
    1055             :   }
    1056       85911 :   if (FLAG_trace_concurrent_marking) {
    1057             :     heap_->isolate()->PrintWithTimestamp(
    1058             :         "Concurrently marked %" PRIuS "KB\n",
    1059           0 :         heap_->concurrent_marking()->TotalMarkedBytes() / KB);
    1060             :   }
    1061       85911 :   return bytes_processed;
    1062             : }
    1063             : 
    1064             : }  // namespace internal
    1065             : }  // namespace v8

Generated by: LCOV version 1.10