LCOV - code coverage report
Current view: top level - src/heap - incremental-marking.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 427 456 93.6 %
Date: 2017-04-26 Functions: 55 58 94.8 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/heap/incremental-marking.h"
       6             : 
       7             : #include "src/code-stubs.h"
       8             : #include "src/compilation-cache.h"
       9             : #include "src/conversions.h"
      10             : #include "src/heap/concurrent-marking.h"
      11             : #include "src/heap/gc-idle-time-handler.h"
      12             : #include "src/heap/gc-tracer.h"
      13             : #include "src/heap/heap-inl.h"
      14             : #include "src/heap/mark-compact-inl.h"
      15             : #include "src/heap/object-stats.h"
      16             : #include "src/heap/objects-visiting-inl.h"
      17             : #include "src/heap/objects-visiting.h"
      18             : #include "src/tracing/trace-event.h"
      19             : #include "src/v8.h"
      20             : #include "src/visitors.h"
      21             : 
      22             : namespace v8 {
      23             : namespace internal {
      24             : 
      25       60782 : IncrementalMarking::IncrementalMarking(Heap* heap)
      26             :     : heap_(heap),
      27             :       initial_old_generation_size_(0),
      28             :       bytes_marked_ahead_of_schedule_(0),
      29             :       unscanned_bytes_of_large_object_(0),
      30             :       state_(STOPPED),
      31             :       idle_marking_delay_counter_(0),
      32             :       incremental_marking_finalization_rounds_(0),
      33             :       is_compacting_(false),
      34             :       should_hurry_(false),
      35             :       was_activated_(false),
      36             :       black_allocation_(false),
      37             :       finalize_marking_completed_(false),
      38             :       trace_wrappers_toggle_(false),
      39             :       request_type_(NONE),
      40             :       new_generation_observer_(*this, kAllocatedThreshold),
      41      182346 :       old_generation_observer_(*this, kAllocatedThreshold) {}
      42             : 
      43             : bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
      44             :   HeapObject* value_heap_obj = HeapObject::cast(value);
      45             :   DCHECK(!ObjectMarking::IsImpossible(value_heap_obj,
      46             :                                       MarkingState::Internal(value_heap_obj)));
      47             :   DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj)));
      48             :   const bool is_black =
      49   223905236 :       ObjectMarking::IsBlack(obj, MarkingState::Internal(obj));
      50             : 
      51   235098807 :   if (is_black && ObjectMarking::IsWhite(
      52   123146189 :                       value_heap_obj, MarkingState::Internal(value_heap_obj))) {
      53     2217012 :     WhiteToGreyAndPush(value_heap_obj);
      54     2217012 :     RestartIfNotMarking();
      55             :   }
      56   111952618 :   return is_compacting_ && is_black;
      57             : }
      58             : 
      59             : 
      60   109821633 : void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot,
      61             :                                          Object* value) {
      62   109821633 :   if (BaseRecordWrite(obj, value) && slot != NULL) {
      63             :     // Object is not going to be rescanned we need to record the slot.
      64             :     heap_->mark_compact_collector()->RecordSlot(obj, slot, value);
      65             :   }
      66   109821633 : }
      67             : 
      68             : 
      69      337744 : void IncrementalMarking::RecordWriteFromCode(HeapObject* obj, Object** slot,
      70             :                                              Isolate* isolate) {
      71             :   DCHECK(obj->IsHeapObject());
      72      337744 :   isolate->heap()->incremental_marking()->RecordWrite(obj, slot, *slot);
      73      337744 : }
      74             : 
      75             : // static
      76       53131 : void IncrementalMarking::RecordWriteOfCodeEntryFromCode(JSFunction* host,
      77             :                                                         Object** slot,
      78             :                                                         Isolate* isolate) {
      79             :   DCHECK(host->IsJSFunction());
      80       53131 :   IncrementalMarking* marking = isolate->heap()->incremental_marking();
      81             :   Code* value = Code::cast(
      82       53131 :       Code::GetObjectFromEntryAddress(reinterpret_cast<Address>(slot)));
      83             :   marking->RecordWriteOfCodeEntry(host, slot, value);
      84       53131 : }
      85             : 
      86        9655 : void IncrementalMarking::RecordCodeTargetPatch(Code* host, Address pc,
      87             :                                                HeapObject* value) {
      88        9655 :   if (IsMarking()) {
      89             :     RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
      90             :     RecordWriteIntoCode(host, &rinfo, value);
      91             :   }
      92        9655 : }
      93             : 
      94             : 
      95      837412 : void IncrementalMarking::RecordCodeTargetPatch(Address pc, HeapObject* value) {
      96      837412 :   if (IsMarking()) {
      97             :     Code* host = heap_->isolate()
      98             :                      ->inner_pointer_to_code_cache()
      99       86870 :                      ->GcSafeFindCodeForInnerPointer(pc);
     100             :     RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
     101             :     RecordWriteIntoCode(host, &rinfo, value);
     102             :   }
     103      837412 : }
     104             : 
     105             : 
     106     1145455 : void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host,
     107             :                                                     Object** slot,
     108             :                                                     Code* value) {
     109     1145455 :   if (BaseRecordWrite(host, value)) {
     110             :     DCHECK(slot != NULL);
     111             :     heap_->mark_compact_collector()->RecordCodeEntrySlot(
     112          35 :         host, reinterpret_cast<Address>(slot), value);
     113             :   }
     114     1145455 : }
     115             : 
     116      985530 : void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
     117             :                                                  Object* value) {
     118      985530 :   if (BaseRecordWrite(host, value)) {
     119             :     // Object is not going to be rescanned.  We need to record the slot.
     120        4519 :     heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value);
     121             :   }
     122      985530 : }
     123             : 
     124    46608782 : void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
     125             :   ObjectMarking::WhiteToGrey(obj, MarkingState::Internal(obj));
     126    46608782 :   heap_->mark_compact_collector()->marking_deque()->Push(obj);
     127    46608782 : }
     128             : 
     129      308031 : void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
     130             :                                       HeapObject* to) {
     131             :   DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone());
     132             :   // This is only used when resizing an object.
     133             :   DCHECK(MemoryChunk::FromAddress(from->address()) ==
     134             :          MemoryChunk::FromAddress(to->address()));
     135             : 
     136      308031 :   if (!heap->incremental_marking()->IsMarking()) return;
     137             : 
     138             :   // If the mark doesn't move, we don't check the color of the object.
     139             :   // It doesn't matter whether the object is black, since it hasn't changed
     140             :   // size, so the adjustment to the live data count will be zero anyway.
     141        1134 :   if (from == to) return;
     142             : 
     143             :   MarkBit new_mark_bit =
     144             :       ObjectMarking::MarkBitFrom(to, MarkingState::Internal(to));
     145             :   MarkBit old_mark_bit =
     146             :       ObjectMarking::MarkBitFrom(from, MarkingState::Internal(from));
     147             : 
     148        1134 :   if (Marking::IsBlack(old_mark_bit)) {
     149             :     Marking::MarkBlack(new_mark_bit);
     150        1132 :   } else if (Marking::IsGrey(old_mark_bit)) {
     151             :     Marking::WhiteToGrey(new_mark_bit);
     152             :     heap->mark_compact_collector()->marking_deque()->Push(to);
     153           0 :     heap->incremental_marking()->RestartIfNotMarking();
     154             :   }
     155             : }
     156             : 
     157             : class IncrementalMarkingMarkingVisitor
     158             :     : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> {
     159             :  public:
     160             :   static void Initialize() {
     161       58018 :     StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
     162             :     table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental);
     163             :     table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
     164             :   }
     165             : 
     166             :   static const int kProgressBarScanningChunk = 32 * 1024;
     167             : 
     168     3382435 :   static void VisitFixedArrayIncremental(Map* map, HeapObject* object) {
     169     3426069 :     MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
     170     3382435 :     if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
     171             :       DCHECK(!FLAG_use_marking_progress_bar ||
     172             :              chunk->owner()->identity() == LO_SPACE);
     173      129658 :       Heap* heap = map->GetHeap();
     174             :       // When using a progress bar for large fixed arrays, scan only a chunk of
     175             :       // the array and try to push it onto the marking deque again until it is
     176             :       // fully scanned. Fall back to scanning it through to the end in case this
     177             :       // fails because of a full deque.
     178             :       int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
     179             :       int start_offset =
     180             :           Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
     181             :       int end_offset =
     182       43634 :           Min(object_size, start_offset + kProgressBarScanningChunk);
     183             :       int already_scanned_offset = start_offset;
     184             :       bool scan_until_end = false;
     185       43634 :       do {
     186             :         VisitPointers(heap, object, HeapObject::RawField(object, start_offset),
     187             :                       HeapObject::RawField(object, end_offset));
     188             :         start_offset = end_offset;
     189       43634 :         end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
     190             :         scan_until_end =
     191       43634 :             heap->mark_compact_collector()->marking_deque()->IsFull();
     192       43634 :       } while (scan_until_end && start_offset < object_size);
     193             :       chunk->set_progress_bar(start_offset);
     194       43634 :       if (start_offset < object_size) {
     195       43012 :         if (ObjectMarking::IsGrey(object, MarkingState::Internal(object))) {
     196             :           heap->mark_compact_collector()->marking_deque()->Unshift(object);
     197             :         } else {
     198             :           DCHECK(
     199             :               ObjectMarking::IsBlack(object, MarkingState::Internal(object)));
     200             :           heap->mark_compact_collector()->UnshiftBlack(object);
     201             :         }
     202             :         heap->incremental_marking()->NotifyIncompleteScanOfObject(
     203       43012 :             object_size - (start_offset - already_scanned_offset));
     204             :       }
     205             :     } else {
     206             :       FixedArrayVisitor::Visit(map, object);
     207             :     }
     208     3382435 :   }
     209             : 
     210        1419 :   static void VisitNativeContextIncremental(Map* map, HeapObject* object) {
     211             :     Context* context = Context::cast(object);
     212             : 
     213             :     // We will mark cache black with a separate pass when we finish marking.
     214             :     // Note that GC can happen when the context is not fully initialized,
     215             :     // so the cache can be undefined.
     216             :     Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX);
     217        1419 :     if (!cache->IsUndefined(map->GetIsolate())) {
     218        1419 :       if (cache->IsHeapObject()) {
     219             :         HeapObject* heap_obj = HeapObject::cast(cache);
     220             :         // Mark the object grey if it is white, do not enque it into the marking
     221             :         // deque.
     222        1419 :         if (ObjectMarking::IsWhite(heap_obj,
     223             :                                    MarkingState::Internal(heap_obj))) {
     224             :           ObjectMarking::WhiteToGrey(heap_obj,
     225             :                                      MarkingState::Internal(heap_obj));
     226             :         }
     227             :       }
     228             :     }
     229             :     VisitNativeContext(map, context);
     230        1419 :   }
     231             : 
     232             :   INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
     233       57792 :     Object* target = *p;
     234       57792 :     if (target->IsHeapObject()) {
     235             :       heap->mark_compact_collector()->RecordSlot(object, p, target);
     236             :       MarkObject(heap, target);
     237             :     }
     238             :   }
     239             : 
     240             :   INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
     241             :                                    Object** start, Object** end)) {
     242   418774903 :     for (Object** p = start; p < end; p++) {
     243   418774903 :       Object* target = *p;
     244   418774903 :       if (target->IsHeapObject()) {
     245             :         heap->mark_compact_collector()->RecordSlot(object, p, target);
     246             :         MarkObject(heap, target);
     247             :       }
     248             :     }
     249             :   }
     250             : 
     251             :   // Marks the object grey and pushes it on the marking stack.
     252             :   INLINE(static void MarkObject(Heap* heap, Object* obj)) {
     253   305872311 :     IncrementalMarking::MarkGrey(heap, HeapObject::cast(obj));
     254             :   }
     255             : 
     256             :   // Marks the object black without pushing it on the marking stack.
     257             :   // Returns true if object needed marking and false otherwise.
     258             :   INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
     259             :     HeapObject* heap_object = HeapObject::cast(obj);
     260      740562 :     if (ObjectMarking::IsWhite(heap_object,
     261     1481124 :                                MarkingState::Internal(heap_object))) {
     262             :       ObjectMarking::WhiteToBlack(heap_object,
     263      320466 :                                   MarkingState::Internal(heap_object));
     264             :       return true;
     265             :     }
     266             :     return false;
     267             :   }
     268             : };
     269             : 
     270     6099766 : void IncrementalMarking::IterateBlackObject(HeapObject* object) {
     271     6266059 :   if (IsMarking() &&
     272             :       ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
     273       36134 :     Page* page = Page::FromAddress(object->address());
     274       36134 :     if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
     275             :       // IterateBlackObject requires us to visit the whole object.
     276             :       page->ResetProgressBar();
     277             :     }
     278             :     Map* map = object->map();
     279       36134 :     MarkGrey(heap_, map);
     280             :     IncrementalMarkingMarkingVisitor::IterateBody(map, object);
     281             :   }
     282     6099766 : }
     283             : 
     284           0 : class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
     285             :  public:
     286             :   explicit IncrementalMarkingRootMarkingVisitor(
     287        2206 :       IncrementalMarking* incremental_marking)
     288        4412 :       : heap_(incremental_marking->heap()) {}
     289             : 
     290     5856111 :   void VisitRootPointer(Root root, Object** p) override {
     291             :     MarkObjectByPointer(p);
     292     5856111 :   }
     293             : 
     294       36510 :   void VisitRootPointers(Root root, Object** start, Object** end) override {
     295     8619241 :     for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
     296       36510 :   }
     297             : 
     298             :  private:
     299             :   void MarkObjectByPointer(Object** p) {
     300             :     Object* obj = *p;
     301    14438842 :     if (!obj->IsHeapObject()) return;
     302             : 
     303    12554540 :     IncrementalMarking::MarkGrey(heap_, HeapObject::cast(obj));
     304             :   }
     305             : 
     306             :   Heap* heap_;
     307             : };
     308             : 
     309             : 
     310       58018 : void IncrementalMarking::Initialize() {
     311             :   IncrementalMarkingMarkingVisitor::Initialize();
     312       58018 : }
     313             : 
     314             : 
     315      526878 : void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk,
     316             :                                               bool is_marking,
     317             :                                               bool is_compacting) {
     318      526878 :   if (is_marking) {
     319             :     chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
     320             :     chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
     321             :   } else {
     322             :     chunk->ClearFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
     323             :     chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
     324             :   }
     325      526878 : }
     326             : 
     327             : 
     328      188999 : void IncrementalMarking::SetNewSpacePageFlags(MemoryChunk* chunk,
     329             :                                               bool is_marking) {
     330             :   chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
     331      188999 :   if (is_marking) {
     332             :     chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
     333             :   } else {
     334             :     chunk->ClearFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
     335             :   }
     336      188999 : }
     337             : 
     338             : 
     339        3531 : void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
     340             :     PagedSpace* space) {
     341       47584 :   for (Page* p : *space) {
     342             :     SetOldSpacePageFlags(p, false, false);
     343             :   }
     344        3531 : }
     345             : 
     346             : 
     347        1177 : void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
     348             :     NewSpace* space) {
     349       18353 :   for (Page* p : *space) {
     350             :     SetNewSpacePageFlags(p, false);
     351             :   }
     352        1177 : }
     353             : 
     354             : 
     355        1177 : void IncrementalMarking::DeactivateIncrementalWriteBarrier() {
     356        5885 :   DeactivateIncrementalWriteBarrierForSpace(heap_->old_space());
     357        2354 :   DeactivateIncrementalWriteBarrierForSpace(heap_->map_space());
     358        2354 :   DeactivateIncrementalWriteBarrierForSpace(heap_->code_space());
     359        2354 :   DeactivateIncrementalWriteBarrierForSpace(heap_->new_space());
     360             : 
     361       18349 :   for (LargePage* lop : *heap_->lo_space()) {
     362             :     SetOldSpacePageFlags(lop, false, false);
     363             :   }
     364        1177 : }
     365             : 
     366             : 
     367        3612 : void IncrementalMarking::ActivateIncrementalWriteBarrier(PagedSpace* space) {
     368       46454 :   for (Page* p : *space) {
     369             :     SetOldSpacePageFlags(p, true, is_compacting_);
     370             :   }
     371        3612 : }
     372             : 
     373             : 
     374        1204 : void IncrementalMarking::ActivateIncrementalWriteBarrier(NewSpace* space) {
     375       17752 :   for (Page* p : *space) {
     376             :     SetNewSpacePageFlags(p, true);
     377             :   }
     378        1204 : }
     379             : 
     380             : 
     381        1204 : void IncrementalMarking::ActivateIncrementalWriteBarrier() {
     382        6020 :   ActivateIncrementalWriteBarrier(heap_->old_space());
     383        2408 :   ActivateIncrementalWriteBarrier(heap_->map_space());
     384        2408 :   ActivateIncrementalWriteBarrier(heap_->code_space());
     385        2408 :   ActivateIncrementalWriteBarrier(heap_->new_space());
     386             : 
     387       14800 :   for (LargePage* lop : *heap_->lo_space()) {
     388             :     SetOldSpacePageFlags(lop, true, is_compacting_);
     389             :   }
     390        1204 : }
     391             : 
     392             : 
     393       53340 : bool IncrementalMarking::WasActivated() { return was_activated_; }
     394             : 
     395             : 
     396     2904093 : bool IncrementalMarking::CanBeActivated() {
     397             :   // Only start incremental marking in a safe state: 1) when incremental
     398             :   // marking is turned on, 2) when we are currently not in a GC, and
     399             :   // 3) when we are currently not serializing or deserializing the heap.
     400     2890618 :   return FLAG_incremental_marking && heap_->gc_state() == Heap::NOT_IN_GC &&
     401     4904825 :          heap_->deserialization_complete() &&
     402     4904825 :          !heap_->isolate()->serializer_enabled();
     403             : }
     404             : 
     405             : 
     406       22803 : void IncrementalMarking::ActivateGeneratedStub(Code* stub) {
     407             :   DCHECK(RecordWriteStub::GetMode(stub) == RecordWriteStub::STORE_BUFFER_ONLY);
     408             : 
     409       22803 :   if (!IsMarking()) {
     410             :     // Initially stub is generated in STORE_BUFFER_ONLY mode thus
     411             :     // we don't need to do anything if incremental marking is
     412             :     // not active.
     413         331 :   } else if (IsCompacting()) {
     414           0 :     RecordWriteStub::Patch(stub, RecordWriteStub::INCREMENTAL_COMPACTION);
     415             :   } else {
     416         331 :     RecordWriteStub::Patch(stub, RecordWriteStub::INCREMENTAL);
     417             :   }
     418       22803 : }
     419             : 
     420             : 
     421        2381 : static void PatchIncrementalMarkingRecordWriteStubs(
     422        2381 :     Heap* heap, RecordWriteStub::Mode mode) {
     423             :   UnseededNumberDictionary* stubs = heap->code_stubs();
     424             : 
     425             :   int capacity = stubs->Capacity();
     426             :   Isolate* isolate = heap->isolate();
     427     1291085 :   for (int i = 0; i < capacity; i++) {
     428             :     Object* k = stubs->KeyAt(i);
     429     1288704 :     if (stubs->IsKey(isolate, k)) {
     430      725659 :       uint32_t key = NumberToUint32(k);
     431             : 
     432      725659 :       if (CodeStub::MajorKeyFromKey(key) == CodeStub::RecordWrite) {
     433      286121 :         Object* e = stubs->ValueAt(i);
     434      286121 :         if (e->IsCode()) {
     435      286121 :           RecordWriteStub::Patch(Code::cast(e), mode);
     436             :         }
     437             :       }
     438             :     }
     439             :   }
     440        2381 : }
     441             : 
     442        2476 : void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
     443        1229 :   if (FLAG_trace_incremental_marking) {
     444             :     int old_generation_size_mb =
     445           6 :         static_cast<int>(heap()->PromotedSpaceSizeOfObjects() / MB);
     446             :     int old_generation_limit_mb =
     447           6 :         static_cast<int>(heap()->old_generation_allocation_limit() / MB);
     448             :     heap()->isolate()->PrintWithTimestamp(
     449             :         "[IncrementalMarking] Start (%s): old generation %dMB, limit %dMB, "
     450             :         "slack %dMB\n",
     451             :         Heap::GarbageCollectionReasonToString(gc_reason),
     452             :         old_generation_size_mb, old_generation_limit_mb,
     453          18 :         Max(0, old_generation_limit_mb - old_generation_size_mb));
     454             :   }
     455             :   DCHECK(FLAG_incremental_marking);
     456             :   DCHECK(state_ == STOPPED);
     457             :   DCHECK(heap_->gc_state() == Heap::NOT_IN_GC);
     458             :   DCHECK(!heap_->isolate()->serializer_enabled());
     459             : 
     460       11061 :   Counters* counters = heap_->isolate()->counters();
     461             : 
     462             :   counters->incremental_marking_reason()->AddSample(
     463        1229 :       static_cast<int>(gc_reason));
     464             :   HistogramTimerScope incremental_marking_scope(
     465        1229 :       counters->gc_incremental_marking_start());
     466        3687 :   TRACE_EVENT0("v8", "V8.GCIncrementalMarkingStart");
     467        2458 :   heap_->tracer()->NotifyIncrementalMarkingStart();
     468             : 
     469        1229 :   start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
     470        1229 :   initial_old_generation_size_ = heap_->PromotedSpaceSizeOfObjects();
     471        2458 :   old_generation_allocation_counter_ = heap_->OldGenerationAllocationCounter();
     472        1229 :   bytes_allocated_ = 0;
     473        1229 :   bytes_marked_ahead_of_schedule_ = 0;
     474        1229 :   should_hurry_ = false;
     475        1229 :   was_activated_ = true;
     476             : 
     477        2458 :   if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
     478        1133 :     StartMarking();
     479             :   } else {
     480          96 :     if (FLAG_trace_incremental_marking) {
     481             :       heap()->isolate()->PrintWithTimestamp(
     482           0 :           "[IncrementalMarking] Start sweeping.\n");
     483             :     }
     484          96 :     state_ = SWEEPING;
     485             :   }
     486             : 
     487        2458 :   SpaceIterator it(heap_);
     488        8603 :   while (it.has_next()) {
     489        6145 :     Space* space = it.next();
     490       12290 :     if (space == heap_->new_space()) {
     491        1229 :       space->AddAllocationObserver(&new_generation_observer_);
     492             :     } else {
     493        4916 :       space->AddAllocationObserver(&old_generation_observer_);
     494             :     }
     495             :   }
     496             : 
     497        1229 :   incremental_marking_job()->Start(heap_);
     498        1229 : }
     499             : 
     500             : 
     501        2414 : void IncrementalMarking::StartMarking() {
     502        6020 :   if (heap_->isolate()->serializer_enabled()) {
     503             :     // Black allocation currently starts when we start incremental marking,
     504             :     // but we cannot enable black allocation while deserializing. Hence, we
     505             :     // have to delay the start of incremental marking in that case.
     506           0 :     if (FLAG_trace_incremental_marking) {
     507             :       heap()->isolate()->PrintWithTimestamp(
     508           0 :           "[IncrementalMarking] Start delayed - serializer\n");
     509             :     }
     510           0 :     return;
     511             :   }
     512        1204 :   if (FLAG_trace_incremental_marking) {
     513             :     heap()->isolate()->PrintWithTimestamp(
     514           6 :         "[IncrementalMarking] Start marking\n");
     515             :   }
     516             : 
     517             :   is_compacting_ =
     518        2408 :       !FLAG_never_compact && heap_->mark_compact_collector()->StartCompaction();
     519             : 
     520        1204 :   state_ = MARKING;
     521             : 
     522             :   {
     523        4816 :     TRACE_GC(heap()->tracer(),
     524             :              GCTracer::Scope::MC_INCREMENTAL_WRAPPER_PROLOGUE);
     525        3612 :     heap_->local_embedder_heap_tracer()->TracePrologue();
     526             :   }
     527             : 
     528             :   RecordWriteStub::Mode mode = is_compacting_
     529             :                                    ? RecordWriteStub::INCREMENTAL_COMPACTION
     530        1204 :                                    : RecordWriteStub::INCREMENTAL;
     531             : 
     532        1204 :   PatchIncrementalMarkingRecordWriteStubs(heap_, mode);
     533             : 
     534        2408 :   heap_->mark_compact_collector()->marking_deque()->StartUsing();
     535             : 
     536        1204 :   ActivateIncrementalWriteBarrier();
     537             : 
     538             : // Marking bits are cleared by the sweeper.
     539             : #ifdef VERIFY_HEAP
     540             :   if (FLAG_verify_heap) {
     541             :     heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
     542             :   }
     543             : #endif
     544             : 
     545        1204 :   heap_->CompletelyClearInstanceofCache();
     546        2408 :   heap_->isolate()->compilation_cache()->MarkCompactPrologue();
     547             : 
     548             :   // Mark strong roots grey.
     549             :   IncrementalMarkingRootMarkingVisitor visitor(this);
     550        1204 :   heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
     551             : 
     552        1204 :   if (FLAG_concurrent_marking) {
     553           0 :     ConcurrentMarking* concurrent_marking = heap_->concurrent_marking();
     554             :     heap_->mark_compact_collector()->marking_deque()->Iterate(
     555             :         [concurrent_marking](HeapObject* obj) {
     556           0 :           concurrent_marking->AddRoot(obj);
     557           0 :         });
     558           0 :     concurrent_marking->StartTask();
     559             :   }
     560             : 
     561             :   // Ready to start incremental marking.
     562        1204 :   if (FLAG_trace_incremental_marking) {
     563           6 :     heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Running\n");
     564             :   }
     565             : }
     566             : 
     567        3914 : void IncrementalMarking::StartBlackAllocation() {
     568             :   DCHECK(FLAG_black_allocation);
     569             :   DCHECK(IsMarking());
     570         977 :   black_allocation_ = true;
     571         977 :   heap()->old_space()->MarkAllocationInfoBlack();
     572         977 :   heap()->map_space()->MarkAllocationInfoBlack();
     573         977 :   heap()->code_space()->MarkAllocationInfoBlack();
     574         977 :   if (FLAG_trace_incremental_marking) {
     575             :     heap()->isolate()->PrintWithTimestamp(
     576           6 :         "[IncrementalMarking] Black allocation started\n");
     577             :   }
     578         977 : }
     579             : 
     580        1208 : void IncrementalMarking::FinishBlackAllocation() {
     581        1202 :   if (black_allocation_) {
     582         977 :     black_allocation_ = false;
     583         977 :     if (FLAG_trace_incremental_marking) {
     584             :       heap()->isolate()->PrintWithTimestamp(
     585           6 :           "[IncrementalMarking] Black allocation finished\n");
     586             :     }
     587             :   }
     588        1202 : }
     589             : 
     590          52 : void IncrementalMarking::AbortBlackAllocation() {
     591          52 :   if (FLAG_trace_incremental_marking) {
     592             :     heap()->isolate()->PrintWithTimestamp(
     593           0 :         "[IncrementalMarking] Black allocation aborted\n");
     594             :   }
     595          52 : }
     596             : 
     597        1002 : void IncrementalMarking::MarkRoots() {
     598             :   DCHECK(!finalize_marking_completed_);
     599             :   DCHECK(IsMarking());
     600             : 
     601             :   IncrementalMarkingRootMarkingVisitor visitor(this);
     602        1002 :   heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
     603        1002 : }
     604             : 
     605        2004 : void IncrementalMarking::ProcessWeakCells() {
     606             :   DCHECK(!finalize_marking_completed_);
     607             :   DCHECK(IsMarking());
     608             : 
     609        1002 :   Object* the_hole_value = heap()->the_hole_value();
     610        1002 :   Object* weak_cell_obj = heap()->encountered_weak_cells();
     611             :   Object* weak_cell_head = Smi::kZero;
     612             :   WeakCell* prev_weak_cell_obj = NULL;
     613     1360388 :   while (weak_cell_obj != Smi::kZero) {
     614             :     WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
     615             :     // We do not insert cleared weak cells into the list, so the value
     616             :     // cannot be a Smi here.
     617             :     HeapObject* value = HeapObject::cast(weak_cell->value());
     618             :     // Remove weak cells with live objects from the list, they do not need
     619             :     // clearing.
     620     1358384 :     if (ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) {
     621             :       // Record slot, if value is pointing to an evacuation candidate.
     622      356090 :       Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
     623      356090 :       heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot);
     624             :       // Remove entry somewhere after top.
     625      356090 :       if (prev_weak_cell_obj != NULL) {
     626      348559 :         prev_weak_cell_obj->set_next(weak_cell->next());
     627             :       }
     628             :       weak_cell_obj = weak_cell->next();
     629             :       weak_cell->clear_next(the_hole_value);
     630             :     } else {
     631     1002294 :       if (weak_cell_head == Smi::kZero) {
     632             :         weak_cell_head = weak_cell;
     633             :       }
     634             :       prev_weak_cell_obj = weak_cell;
     635             :       weak_cell_obj = weak_cell->next();
     636             :     }
     637             :   }
     638             :   // Top may have changed.
     639             :   heap()->set_encountered_weak_cells(weak_cell_head);
     640        1002 : }
     641             : 
     642             : 
     643        1246 : bool ShouldRetainMap(Map* map, int age) {
     644        1246 :   if (age == 0) {
     645             :     // The map has aged. Do not retain this map.
     646             :     return false;
     647             :   }
     648        1228 :   Object* constructor = map->GetConstructor();
     649        2456 :   if (!constructor->IsHeapObject() ||
     650             :       ObjectMarking::IsWhite(
     651             :           HeapObject::cast(constructor),
     652             :           MarkingState::Internal(HeapObject::cast(constructor)))) {
     653             :     // The constructor is dead, no new objects with this map can
     654             :     // be created. Do not retain this map.
     655             :     return false;
     656             :   }
     657         492 :   return true;
     658             : }
     659             : 
     660             : 
     661        2422 : void IncrementalMarking::RetainMaps() {
     662             :   // Do not retain dead maps if flag disables it or there is
     663             :   // - memory pressure (reduce_memory_footprint_),
     664             :   // - GC is requested by tests or dev-tools (abort_incremental_marking_).
     665        1924 :   bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
     666        1924 :                                    heap()->ShouldAbortIncrementalMarking() ||
     667         959 :                                    FLAG_retain_maps_for_n_gc == 0;
     668         965 :   ArrayList* retained_maps = heap()->retained_maps();
     669         965 :   int length = retained_maps->Length();
     670             :   // The number_of_disposed_maps separates maps in the retained_maps
     671             :   // array that were created before and after context disposal.
     672             :   // We do not age and retain disposed maps to avoid memory leaks.
     673         965 :   int number_of_disposed_maps = heap()->number_of_disposed_maps_;
     674        4932 :   for (int i = 0; i < length; i += 2) {
     675             :     DCHECK(retained_maps->Get(i)->IsWeakCell());
     676             :     WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
     677        3967 :     if (cell->cleared()) continue;
     678             :     int age = Smi::cast(retained_maps->Get(i + 1))->value();
     679             :     int new_age;
     680             :     Map* map = Map::cast(cell->value());
     681        4583 :     if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
     682             :         ObjectMarking::IsWhite(map, MarkingState::Internal(map))) {
     683        1246 :       if (ShouldRetainMap(map, age)) {
     684         492 :         MarkGrey(heap(), map);
     685             :       }
     686             :       Object* prototype = map->prototype();
     687        3702 :       if (age > 0 && prototype->IsHeapObject() &&
     688             :           ObjectMarking::IsWhite(
     689             :               HeapObject::cast(prototype),
     690             :               MarkingState::Internal(HeapObject::cast(prototype)))) {
     691             :         // The prototype is not marked, age the map.
     692         826 :         new_age = age - 1;
     693             :       } else {
     694             :         // The prototype and the constructor are marked, this map keeps only
     695             :         // transition tree alive, not JSObjects. Do not age the map.
     696             :         new_age = age;
     697             :       }
     698             :     } else {
     699        1050 :       new_age = FLAG_retain_maps_for_n_gc;
     700             :     }
     701             :     // Compact the array and update the age.
     702        2296 :     if (new_age != age) {
     703             :       retained_maps->Set(i + 1, Smi::FromInt(new_age));
     704             :     }
     705             :   }
     706         965 : }
     707             : 
     708        2010 : void IncrementalMarking::FinalizeIncrementally() {
     709        4008 :   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE_BODY);
     710             :   DCHECK(!finalize_marking_completed_);
     711             :   DCHECK(IsMarking());
     712             : 
     713        4008 :   double start = heap_->MonotonicallyIncreasingTimeInMs();
     714             : 
     715             :   int old_marking_deque_top =
     716        2004 :       heap_->mark_compact_collector()->marking_deque()->top();
     717             : 
     718             :   // After finishing incremental marking, we try to discover all unmarked
     719             :   // objects to reduce the marking load in the final pause.
     720             :   // 1) We scan and mark the roots again to find all changes to the root set.
     721             :   // 2) Age and retain maps embedded in optimized code.
     722             :   // 3) Remove weak cell with live values from the list of weak cells, they
     723             :   // do not need processing during GC.
     724        1002 :   MarkRoots();
     725             : 
     726        1002 :   if (incremental_marking_finalization_rounds_ == 0) {
     727             :     // Map retaining is needed for perfromance, not correctness,
     728             :     // so we can do it only once at the beginning of the finalization.
     729         965 :     RetainMaps();
     730             :   }
     731        1002 :   ProcessWeakCells();
     732             : 
     733             :   int marking_progress =
     734        1002 :       abs(old_marking_deque_top -
     735        3006 :           heap_->mark_compact_collector()->marking_deque()->top());
     736             : 
     737             :   marking_progress += static_cast<int>(
     738        1002 :       heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace());
     739             : 
     740        1002 :   double end = heap_->MonotonicallyIncreasingTimeInMs();
     741        1002 :   double delta = end - start;
     742        1002 :   if (FLAG_trace_incremental_marking) {
     743             :     heap()->isolate()->PrintWithTimestamp(
     744             :         "[IncrementalMarking] Finalize incrementally round %d, "
     745             :         "spent %d ms, marking progress %d.\n",
     746             :         static_cast<int>(delta), incremental_marking_finalization_rounds_,
     747          12 :         marking_progress);
     748             :   }
     749             : 
     750        1002 :   ++incremental_marking_finalization_rounds_;
     751        1002 :   if ((incremental_marking_finalization_rounds_ >=
     752        1000 :        FLAG_max_incremental_marking_finalization_rounds) ||
     753        1000 :       (marking_progress <
     754             :        FLAG_min_progress_during_incremental_marking_finalization)) {
     755         965 :     finalize_marking_completed_ = true;
     756             :   }
     757             : 
     758        4002 :   if (FLAG_black_allocation && !heap()->ShouldReduceMemory() &&
     759         996 :       !black_allocation_) {
     760             :     // TODO(hpayer): Move to an earlier point as soon as we make faster marking
     761             :     // progress.
     762         946 :     StartBlackAllocation();
     763        1002 :   }
     764        1002 : }
     765             : 
     766             : 
     767       69189 : void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
     768      138378 :   if (!IsMarking()) return;
     769             : 
     770         830 :   MarkingDeque* marking_deque =
     771        1660 :       heap_->mark_compact_collector()->marking_deque();
     772             :   int current = marking_deque->bottom();
     773             :   int mask = marking_deque->mask();
     774             :   int limit = marking_deque->top();
     775             :   HeapObject** array = marking_deque->array();
     776             :   int new_top = current;
     777             : 
     778             :   Map* filler_map = heap_->one_pointer_filler_map();
     779             : 
     780     1393291 :   while (current != limit) {
     781     1391631 :     HeapObject* obj = array[current];
     782             :     DCHECK(obj->IsHeapObject());
     783     1391631 :     current = ((current + 1) & mask);
     784             :     // Only pointers to from space have to be updated.
     785     1391631 :     if (heap_->InFromSpace(obj)) {
     786             :       MapWord map_word = obj->map_word();
     787             :       // There may be objects on the marking deque that do not exist anymore,
     788             :       // e.g. left trimmed objects or objects from the root set (frames).
     789             :       // If these object are dead at scavenging time, their marking deque
     790             :       // entries will not point to forwarding addresses. Hence, we can discard
     791             :       // them.
     792       19774 :       if (map_word.IsForwardingAddress()) {
     793       16433 :         HeapObject* dest = map_word.ToForwardingAddress();
     794       16433 :         if (ObjectMarking::IsBlack(dest, MarkingState::Internal(dest)))
     795             :           continue;
     796       16422 :         array[new_top] = dest;
     797       16422 :         new_top = ((new_top + 1) & mask);
     798             :         DCHECK(new_top != marking_deque->bottom());
     799             :         DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) ||
     800             :                (obj->IsFiller() &&
     801             :                 ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))));
     802             :       }
     803     1371857 :     } else if (obj->map() != filler_map) {
     804             :       // Skip one word filler objects that appear on the
     805             :       // stack when we perform in place array shift.
     806     1371857 :       array[new_top] = obj;
     807     1371857 :       new_top = ((new_top + 1) & mask);
     808             :       DCHECK(new_top != marking_deque->bottom());
     809             :       DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) ||
     810             :              (obj->IsFiller() &&
     811             :               ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))) ||
     812             :              (MemoryChunk::FromAddress(obj->address())
     813             :                   ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
     814             :               ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))));
     815             :     }
     816             :   }
     817             :   marking_deque->set_top(new_top);
     818             : }
     819             : 
     820             : 
     821             : void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
     822    46524501 :   MarkGrey(heap_, map);
     823             : 
     824             :   IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
     825             : 
     826             : #if ENABLE_SLOW_DCHECKS
     827             :   MarkBit mark_bit =
     828             :       ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj));
     829             :   MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
     830             :   SLOW_DCHECK(Marking::IsGrey(mark_bit) ||
     831             :               (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
     832             :                Marking::IsBlack(mark_bit)));
     833             : #endif
     834    46524501 :   MarkBlack(obj, size);
     835             : }
     836             : 
     837   409473619 : void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) {
     838   365081849 :   if (ObjectMarking::IsWhite(object, MarkingState::Internal(object))) {
     839    44391770 :     heap->incremental_marking()->WhiteToGreyAndPush(object);
     840             :   }
     841   365081849 : }
     842             : 
     843    46524501 : void IncrementalMarking::MarkBlack(HeapObject* obj, int size) {
     844    93049002 :   if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) return;
     845             :   ObjectMarking::GreyToBlack(obj, MarkingState::Internal(obj));
     846             : }
     847             : 
     848             : intptr_t IncrementalMarking::ProcessMarkingDeque(
     849    46524501 :     intptr_t bytes_to_process, ForceCompletionAction completion) {
     850             :   intptr_t bytes_processed = 0;
     851    46552006 :   MarkingDeque* marking_deque =
     852       55010 :       heap_->mark_compact_collector()->marking_deque();
     853    46552006 :   while (!marking_deque->IsEmpty() && (bytes_processed < bytes_to_process ||
     854             :                                        completion == FORCE_COMPLETION)) {
     855             :     HeapObject* obj = marking_deque->Pop();
     856             : 
     857             :     // Left trimming may result in white, grey, or black filler objects on the
     858             :     // marking deque. Ignore these objects.
     859    46524501 :     if (obj->IsFiller()) {
     860             :       DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj)));
     861             :       continue;
     862             :     }
     863             : 
     864    46524501 :     Map* map = obj->map();
     865    46524501 :     int size = obj->SizeFromMap(map);
     866    46524501 :     unscanned_bytes_of_large_object_ = 0;
     867             :     VisitObject(map, obj, size);
     868    42576596 :     bytes_processed += size - unscanned_bytes_of_large_object_;
     869             :   }
     870             :   // Report all found wrappers to the embedder. This is necessary as the
     871             :   // embedder could potentially invalidate wrappers as soon as V8 is done
     872             :   // with its incremental marking processing. Any cached wrappers could
     873             :   // result in broken pointers at this point.
     874       55010 :   heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
     875             :   return bytes_processed;
     876             : }
     877             : 
     878             : 
     879        1125 : void IncrementalMarking::Hurry() {
     880             :   // A scavenge may have pushed new objects on the marking deque (due to black
     881             :   // allocation) even in COMPLETE state. This may happen if scavenges are
     882             :   // forced e.g. in tests. It should not happen when COMPLETE was set when
     883             :   // incremental marking finished and a regular GC was triggered after that
     884             :   // because should_hurry_ will force a full GC.
     885        2250 :   if (!heap_->mark_compact_collector()->marking_deque()->IsEmpty()) {
     886             :     double start = 0.0;
     887         227 :     if (FLAG_trace_incremental_marking) {
     888           0 :       start = heap_->MonotonicallyIncreasingTimeInMs();
     889           0 :       if (FLAG_trace_incremental_marking) {
     890           0 :         heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Hurry\n");
     891             :       }
     892             :     }
     893             :     // TODO(gc) hurry can mark objects it encounters black as mutator
     894             :     // was stopped.
     895             :     ProcessMarkingDeque(0, FORCE_COMPLETION);
     896         227 :     state_ = COMPLETE;
     897         227 :     if (FLAG_trace_incremental_marking) {
     898           0 :       double end = heap_->MonotonicallyIncreasingTimeInMs();
     899           0 :       double delta = end - start;
     900           0 :       if (FLAG_trace_incremental_marking) {
     901             :         heap()->isolate()->PrintWithTimestamp(
     902             :             "[IncrementalMarking] Complete (hurry), spent %d ms.\n",
     903           0 :             static_cast<int>(delta));
     904             :       }
     905             :     }
     906             :   }
     907             : 
     908        1125 :   Object* context = heap_->native_contexts_list();
     909        7359 :   while (!context->IsUndefined(heap_->isolate())) {
     910             :     // GC can happen when the context is not fully initialized,
     911             :     // so the cache can be undefined.
     912             :     HeapObject* cache = HeapObject::cast(
     913             :         Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
     914        3984 :     if (!cache->IsUndefined(heap_->isolate())) {
     915        1992 :       if (ObjectMarking::IsGrey(cache, MarkingState::Internal(cache))) {
     916             :         ObjectMarking::GreyToBlack(cache, MarkingState::Internal(cache));
     917             :       }
     918             :     }
     919             :     context = Context::cast(context)->next_context_link();
     920             :   }
     921        1125 : }
     922             : 
     923             : 
     924        1254 : void IncrementalMarking::Stop() {
     925        1282 :   if (IsStopped()) return;
     926        1202 :   if (FLAG_trace_incremental_marking) {
     927             :     int old_generation_size_mb =
     928           6 :         static_cast<int>(heap()->PromotedSpaceSizeOfObjects() / MB);
     929             :     int old_generation_limit_mb =
     930           6 :         static_cast<int>(heap()->old_generation_allocation_limit() / MB);
     931             :     heap()->isolate()->PrintWithTimestamp(
     932             :         "[IncrementalMarking] Stopping: old generation %dMB, limit %dMB, "
     933             :         "overshoot %dMB\n",
     934             :         old_generation_size_mb, old_generation_limit_mb,
     935          12 :         Max(0, old_generation_size_mb - old_generation_limit_mb));
     936             :   }
     937             : 
     938        7212 :   SpaceIterator it(heap_);
     939        8414 :   while (it.has_next()) {
     940        6010 :     Space* space = it.next();
     941       12020 :     if (space == heap_->new_space()) {
     942        1202 :       space->RemoveAllocationObserver(&new_generation_observer_);
     943             :     } else {
     944        4808 :       space->RemoveAllocationObserver(&old_generation_observer_);
     945             :     }
     946             :   }
     947             : 
     948             :   IncrementalMarking::set_should_hurry(false);
     949        1202 :   if (IsMarking()) {
     950             :     PatchIncrementalMarkingRecordWriteStubs(heap_,
     951        1177 :                                             RecordWriteStub::STORE_BUFFER_ONLY);
     952        1177 :     DeactivateIncrementalWriteBarrier();
     953             :   }
     954        2404 :   heap_->isolate()->stack_guard()->ClearGC();
     955        1202 :   state_ = STOPPED;
     956        1202 :   is_compacting_ = false;
     957        1202 :   FinishBlackAllocation();
     958             : }
     959             : 
     960             : 
     961        1125 : void IncrementalMarking::Finalize() {
     962        1125 :   Hurry();
     963        1125 :   Stop();
     964        1125 : }
     965             : 
     966             : 
     967        6663 : void IncrementalMarking::FinalizeMarking(CompletionAction action) {
     968             :   DCHECK(!finalize_marking_completed_);
     969        6657 :   if (FLAG_trace_incremental_marking) {
     970             :     heap()->isolate()->PrintWithTimestamp(
     971             :         "[IncrementalMarking] requesting finalization of incremental "
     972           6 :         "marking.\n");
     973             :   }
     974        6657 :   request_type_ = FINALIZATION;
     975        6657 :   if (action == GC_VIA_STACK_GUARD) {
     976       11686 :     heap_->isolate()->stack_guard()->RequestGC();
     977             :   }
     978        6657 : }
     979             : 
     980             : 
     981        1445 : void IncrementalMarking::MarkingComplete(CompletionAction action) {
     982        1439 :   state_ = COMPLETE;
     983             :   // We will set the stack guard to request a GC now.  This will mean the rest
     984             :   // of the GC gets performed as soon as possible (we can't do a GC here in a
     985             :   // record-write context).  If a few things get allocated between now and then
     986             :   // that shouldn't make us do a scavenge and keep being incremental, so we set
     987             :   // the should-hurry flag to indicate that there can't be much work left to do.
     988             :   set_should_hurry(true);
     989        1439 :   if (FLAG_trace_incremental_marking) {
     990             :     heap()->isolate()->PrintWithTimestamp(
     991           6 :         "[IncrementalMarking] Complete (normal).\n");
     992             :   }
     993        1439 :   request_type_ = COMPLETE_MARKING;
     994        1439 :   if (action == GC_VIA_STACK_GUARD) {
     995        1250 :     heap_->isolate()->stack_guard()->RequestGC();
     996             :   }
     997        1439 : }
     998             : 
     999             : 
    1000       53346 : void IncrementalMarking::Epilogue() {
    1001       53346 :   was_activated_ = false;
    1002       53346 :   finalize_marking_completed_ = false;
    1003       53346 :   incremental_marking_finalization_rounds_ = 0;
    1004       53346 : }
    1005             : 
    1006        6454 : double IncrementalMarking::AdvanceIncrementalMarking(
    1007             :     double deadline_in_ms, CompletionAction completion_action,
    1008       14266 :     ForceCompletionAction force_completion, StepOrigin step_origin) {
    1009             :   HistogramTimerScope incremental_marking_scope(
    1010       25792 :       heap_->isolate()->counters()->gc_incremental_marking());
    1011       19362 :   TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
    1012       32270 :   TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
    1013             :   DCHECK(!IsStopped());
    1014             :   DCHECK_EQ(
    1015             :       0, heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace());
    1016             : 
    1017             :   double remaining_time_in_ms = 0.0;
    1018             :   intptr_t step_size_in_bytes = GCIdleTimeHandler::EstimateMarkingStepSize(
    1019             :       kStepSizeInMs,
    1020        6454 :       heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
    1021             : 
    1022             :   const bool incremental_wrapper_tracing =
    1023       12884 :       state_ == MARKING && FLAG_incremental_marking_wrappers &&
    1024       12860 :       heap_->local_embedder_heap_tracer()->InUse();
    1025        7126 :   do {
    1026        7126 :     if (incremental_wrapper_tracing && trace_wrappers_toggle_) {
    1027           0 :       TRACE_GC(heap()->tracer(),
    1028             :                GCTracer::Scope::MC_INCREMENTAL_WRAPPER_TRACING);
    1029             :       const double wrapper_deadline =
    1030           0 :           heap_->MonotonicallyIncreasingTimeInMs() + kStepSizeInMs;
    1031           0 :       if (!heap_->local_embedder_heap_tracer()
    1032           0 :                ->ShouldFinalizeIncrementalMarking()) {
    1033             :         heap_->local_embedder_heap_tracer()->Trace(
    1034             :             wrapper_deadline, EmbedderHeapTracer::AdvanceTracingActions(
    1035             :                                   EmbedderHeapTracer::ForceCompletionAction::
    1036           0 :                                       DO_NOT_FORCE_COMPLETION));
    1037           0 :       }
    1038             :     } else {
    1039             :       Step(step_size_in_bytes, completion_action, force_completion,
    1040        7126 :            step_origin);
    1041             :     }
    1042        7126 :     trace_wrappers_toggle_ = !trace_wrappers_toggle_;
    1043             :     remaining_time_in_ms =
    1044        7126 :         deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
    1045        8511 :   } while (remaining_time_in_ms >= kStepSizeInMs && !IsComplete() &&
    1046         686 :            !heap()->mark_compact_collector()->marking_deque()->IsEmpty());
    1047        6454 :   return remaining_time_in_ms;
    1048             : }
    1049             : 
    1050             : 
    1051          71 : void IncrementalMarking::FinalizeSweeping() {
    1052             :   DCHECK(state_ == SWEEPING);
    1053         422 :   if (heap_->mark_compact_collector()->sweeping_in_progress() &&
    1054         138 :       (!FLAG_concurrent_sweeping ||
    1055          69 :        !heap_->mark_compact_collector()->sweeper().AreSweeperTasksRunning())) {
    1056         138 :     heap_->mark_compact_collector()->EnsureSweepingCompleted();
    1057             :   }
    1058         142 :   if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
    1059          71 :     StartMarking();
    1060             :   }
    1061          71 : }
    1062             : 
    1063       33368 : size_t IncrementalMarking::StepSizeToKeepUpWithAllocations() {
    1064             :   // Update bytes_allocated_ based on the allocation counter.
    1065       33368 :   size_t current_counter = heap_->OldGenerationAllocationCounter();
    1066       33368 :   bytes_allocated_ += current_counter - old_generation_allocation_counter_;
    1067       33368 :   old_generation_allocation_counter_ = current_counter;
    1068       33368 :   return bytes_allocated_;
    1069             : }
    1070             : 
    1071       34649 : size_t IncrementalMarking::StepSizeToMakeProgress() {
    1072             :   // We increase step size gradually based on the time passed in order to
    1073             :   // leave marking work to standalone tasks. The ramp up duration and the
    1074             :   // target step count are chosen based on benchmarks.
    1075             :   const int kRampUpIntervalMs = 300;
    1076             :   const size_t kTargetStepCount = 128;
    1077             :   const size_t kTargetStepCountAtOOM = 16;
    1078       66736 :   size_t oom_slack = heap()->new_space()->Capacity() + 64 * MB;
    1079             : 
    1080       33368 :   if (heap()->IsCloseToOutOfMemory(oom_slack)) {
    1081        1281 :     return heap()->PromotedSpaceSizeOfObjects() / kTargetStepCountAtOOM;
    1082             :   }
    1083             : 
    1084             :   size_t step_size = Max(initial_old_generation_size_ / kTargetStepCount,
    1085       32087 :                          IncrementalMarking::kAllocatedThreshold);
    1086             :   double time_passed_ms =
    1087       32087 :       heap_->MonotonicallyIncreasingTimeInMs() - start_time_ms_;
    1088       32087 :   double factor = Min(time_passed_ms / kRampUpIntervalMs, 1.0);
    1089       32087 :   return static_cast<size_t>(factor * step_size);
    1090             : }
    1091             : 
    1092       48066 : void IncrementalMarking::AdvanceIncrementalMarkingOnAllocation() {
    1093       81576 :   if (heap_->gc_state() != Heap::NOT_IN_GC || !FLAG_incremental_marking ||
    1094       33510 :       (state_ != SWEEPING && state_ != MARKING)) {
    1095       33510 :     return;
    1096             :   }
    1097             : 
    1098             :   size_t bytes_to_process =
    1099       33368 :       StepSizeToKeepUpWithAllocations() + StepSizeToMakeProgress();
    1100             : 
    1101       33368 :   if (bytes_to_process >= IncrementalMarking::kAllocatedThreshold) {
    1102             :     // The first step after Scavenge will see many allocated bytes.
    1103             :     // Cap the step size to distribute the marking work more uniformly.
    1104             :     size_t max_step_size = GCIdleTimeHandler::EstimateMarkingStepSize(
    1105             :         kMaxStepSizeInMs,
    1106       14556 :         heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
    1107             :     bytes_to_process = Min(bytes_to_process, max_step_size);
    1108             : 
    1109             :     size_t bytes_processed = 0;
    1110       14556 :     if (bytes_marked_ahead_of_schedule_ >= bytes_to_process) {
    1111             :       // Steps performed in tasks have put us ahead of schedule.
    1112             :       // We skip processing of marking dequeue here and thus
    1113             :       // shift marking time from inside V8 to standalone tasks.
    1114           0 :       bytes_marked_ahead_of_schedule_ -= bytes_to_process;
    1115             :       bytes_processed = bytes_to_process;
    1116             :     } else {
    1117             :       HistogramTimerScope incremental_marking_scope(
    1118       29112 :           heap_->isolate()->counters()->gc_incremental_marking());
    1119       43668 :       TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
    1120       72780 :       TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
    1121             :       bytes_processed = Step(bytes_to_process, GC_VIA_STACK_GUARD,
    1122       14556 :                              FORCE_COMPLETION, StepOrigin::kV8);
    1123             :     }
    1124       29112 :     bytes_allocated_ -= Min(bytes_allocated_, bytes_processed);
    1125             :   }
    1126             : }
    1127             : 
    1128       27290 : size_t IncrementalMarking::Step(size_t bytes_to_process,
    1129             :                                 CompletionAction action,
    1130             :                                 ForceCompletionAction completion,
    1131             :                                 StepOrigin step_origin) {
    1132       90037 :   double start = heap_->MonotonicallyIncreasingTimeInMs();
    1133             : 
    1134       27290 :   if (state_ == SWEEPING) {
    1135         355 :     TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL_SWEEPING);
    1136         142 :     FinalizeSweeping();
    1137             :   }
    1138             : 
    1139             :   size_t bytes_processed = 0;
    1140       27290 :   if (state_ == MARKING) {
    1141       54556 :     bytes_processed = ProcessMarkingDeque(bytes_to_process);
    1142       27278 :     if (step_origin == StepOrigin::kTask) {
    1143        5846 :       bytes_marked_ahead_of_schedule_ += bytes_processed;
    1144             :     }
    1145             : 
    1146       54556 :     if (heap_->mark_compact_collector()->marking_deque()->IsEmpty()) {
    1147        8108 :       if (heap_->local_embedder_heap_tracer()
    1148             :               ->ShouldFinalizeIncrementalMarking()) {
    1149        8120 :         if (completion == FORCE_COMPLETION ||
    1150             :             IsIdleMarkingDelayCounterLimitReached()) {
    1151        8096 :           if (!finalize_marking_completed_) {
    1152        6657 :             FinalizeMarking(action);
    1153             :           } else {
    1154        1439 :             MarkingComplete(action);
    1155             :           }
    1156             :         } else {
    1157             :           IncrementIdleMarkingDelayCounter();
    1158             :         }
    1159             :       } else {
    1160           0 :         heap_->local_embedder_heap_tracer()->NotifyV8MarkingDequeWasEmpty();
    1161             :       }
    1162             :     }
    1163             :   }
    1164             : 
    1165       27290 :   double end = heap_->MonotonicallyIncreasingTimeInMs();
    1166       27290 :   double duration = (end - start);
    1167             :   // Note that we report zero bytes here when sweeping was in progress or
    1168             :   // when we just started incremental marking. In these cases we did not
    1169             :   // process the marking deque.
    1170       54580 :   heap_->tracer()->AddIncrementalMarkingStep(duration, bytes_processed);
    1171       27290 :   if (FLAG_trace_incremental_marking) {
    1172             :     heap_->isolate()->PrintWithTimestamp(
    1173             :         "[IncrementalMarking] Step %s %zu bytes (%zu) in %.1f\n",
    1174             :         step_origin == StepOrigin::kV8 ? "in v8" : "in task", bytes_processed,
    1175          42 :         bytes_to_process, duration);
    1176             :   }
    1177       27290 :   return bytes_processed;
    1178             : }
    1179             : 
    1180             : 
    1181          18 : bool IncrementalMarking::IsIdleMarkingDelayCounterLimitReached() {
    1182          30 :   return idle_marking_delay_counter_ > kMaxIdleMarkingDelayCounter;
    1183             : }
    1184             : 
    1185             : 
    1186           0 : void IncrementalMarking::IncrementIdleMarkingDelayCounter() {
    1187          12 :   idle_marking_delay_counter_++;
    1188           0 : }
    1189             : 
    1190             : 
    1191       53346 : void IncrementalMarking::ClearIdleMarkingDelayCounter() {
    1192       53346 :   idle_marking_delay_counter_ = 0;
    1193       53346 : }
    1194             : 
    1195             : }  // namespace internal
    1196             : }  // namespace v8

Generated by: LCOV version 1.10