LCOV - code coverage report
Current view: top level - src/heap - incremental-marking.h (source / functions) Hit Total Coverage
Test: app.info Lines: 38 39 97.4 %
Date: 2017-10-20 Functions: 14 17 82.4 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #ifndef V8_HEAP_INCREMENTAL_MARKING_H_
       6             : #define V8_HEAP_INCREMENTAL_MARKING_H_
       7             : 
       8             : #include "src/cancelable-task.h"
       9             : #include "src/heap/heap.h"
      10             : #include "src/heap/incremental-marking-job.h"
      11             : #include "src/heap/mark-compact.h"
      12             : 
      13             : namespace v8 {
      14             : namespace internal {
      15             : 
      16             : class HeapObject;
      17             : class MarkBit;
      18             : class Map;
      19             : class Object;
      20             : class PagedSpace;
      21             : 
      22             : enum class StepOrigin { kV8, kTask };
      23             : 
      24             : // This marking state is used when concurrent marking is running.
      25             : class IncrementalMarkingState final
      26             :     : public MarkingStateBase<IncrementalMarkingState, AccessMode::ATOMIC> {
      27             :  public:
      28  1213646560 :   Bitmap* bitmap(const MemoryChunk* chunk) const {
      29  1213646560 :     return Bitmap::FromAddress(chunk->address() + MemoryChunk::kHeaderSize);
      30             :   }
      31             : 
      32             :   // Concurrent marking uses local live bytes.
      33    56132608 :   void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) {
      34    56392048 :     chunk->live_byte_count_ += by;
      35    56132608 :   }
      36             : 
      37             :   intptr_t live_bytes(MemoryChunk* chunk) const {
      38             :     return chunk->live_byte_count_;
      39             :   }
      40             : 
      41             :   void SetLiveBytes(MemoryChunk* chunk, intptr_t value) {
      42             :     chunk->live_byte_count_ = value;
      43             :   }
      44             : };
      45             : 
      46             : class V8_EXPORT_PRIVATE IncrementalMarking {
      47             :  public:
      48             :   enum State { STOPPED, SWEEPING, MARKING, COMPLETE };
      49             : 
      50             :   enum CompletionAction { GC_VIA_STACK_GUARD, NO_GC_VIA_STACK_GUARD };
      51             : 
      52             :   enum ForceCompletionAction { FORCE_COMPLETION, DO_NOT_FORCE_COMPLETION };
      53             : 
      54             :   enum GCRequestType { NONE, COMPLETE_MARKING, FINALIZATION };
      55             : 
      56             : #ifdef V8_CONCURRENT_MARKING
      57             :   using MarkingState = IncrementalMarkingState;
      58             : #else
      59             :   using MarkingState = MajorNonAtomicMarkingState;
      60             : #endif
      61             :   using AtomicMarkingState = MajorAtomicMarkingState;
      62             :   using NonAtomicMarkingState = MajorNonAtomicMarkingState;
      63             : 
      64             :   class PauseBlackAllocationScope {
      65             :    public:
      66             :     explicit PauseBlackAllocationScope(IncrementalMarking* marking)
      67             :         : marking_(marking), paused_(false) {
      68       29652 :       if (marking_->black_allocation()) {
      69             :         paused_ = true;
      70        2153 :         marking_->PauseBlackAllocation();
      71             :       }
      72             :     }
      73             : 
      74             :     ~PauseBlackAllocationScope() {
      75       29652 :       if (paused_) {
      76        2153 :         marking_->StartBlackAllocation();
      77             :       }
      78             :     }
      79             : 
      80             :    private:
      81             :     IncrementalMarking* marking_;
      82             :     bool paused_;
      83             :   };
      84             : 
      85             :   // It's hard to know how much work the incremental marker should do to make
      86             :   // progress in the face of the mutator creating new work for it.  We start
      87             :   // of at a moderate rate of work and gradually increase the speed of the
      88             :   // incremental marker until it completes.
      89             :   // Do some marking every time this much memory has been allocated or that many
      90             :   // heavy (color-checking) write barriers have been invoked.
      91             :   static const size_t kYoungGenerationAllocatedThreshold = 64 * KB;
      92             :   static const size_t kOldGenerationAllocatedThreshold = 256 * KB;
      93             :   static const size_t kMinStepSizeInBytes = 64 * KB;
      94             : 
      95             :   static const int kStepSizeInMs = 1;
      96             :   static const int kMaxStepSizeInMs = 5;
      97             : 
      98             : #ifndef DEBUG
      99             :   static const intptr_t kActivationThreshold = 8 * MB;
     100             : #else
     101             :   static const intptr_t kActivationThreshold = 0;
     102             : #endif
     103             : 
     104             : #ifdef V8_CONCURRENT_MARKING
     105             :   static const AccessMode kAtomicity = AccessMode::ATOMIC;
     106             : #else
     107             :   static const AccessMode kAtomicity = AccessMode::NON_ATOMIC;
     108             : #endif
     109             : 
     110             :   IncrementalMarking(Heap* heap,
     111             :                      MarkCompactCollector::MarkingWorklist* marking_worklist);
     112             : 
     113    54613789 :   MarkingState* marking_state() { return &marking_state_; }
     114             : 
     115             :   AtomicMarkingState* atomic_marking_state() { return &atomic_marking_state_; }
     116             : 
     117             :   NonAtomicMarkingState* non_atomic_marking_state() {
     118             :     return &non_atomic_marking_state_;
     119             :   }
     120             : 
     121             :   void NotifyLeftTrimming(HeapObject* from, HeapObject* to);
     122             : 
     123             :   V8_INLINE void TransferColor(HeapObject* from, HeapObject* to) {
     124     5244868 :     if (atomic_marking_state()->IsBlack(to)) {
     125             :       DCHECK(black_allocation());
     126             :       return;
     127             :     }
     128             : 
     129             :     DCHECK(atomic_marking_state()->IsWhite(to));
     130     5240815 :     if (atomic_marking_state()->IsGrey(from)) {
     131             :       bool success = atomic_marking_state()->WhiteToGrey(to);
     132             :       DCHECK(success);
     133             :       USE(success);
     134     4310882 :     } else if (atomic_marking_state()->IsBlack(from)) {
     135             :       bool success = atomic_marking_state()->WhiteToBlack(to);
     136             :       DCHECK(success);
     137             :       USE(success);
     138             :     }
     139             :   }
     140             : 
     141             :   State state() const {
     142             :     DCHECK(state_ == STOPPED || FLAG_incremental_marking);
     143             :     return state_;
     144             :   }
     145             : 
     146             :   bool should_hurry() const { return should_hurry_; }
     147       34984 :   void set_should_hurry(bool val) { should_hurry_ = val; }
     148             : 
     149             :   bool finalize_marking_completed() const {
     150             :     return finalize_marking_completed_;
     151             :   }
     152             : 
     153             :   void SetWeakClosureWasOverApproximatedForTesting(bool val) {
     154           5 :     finalize_marking_completed_ = val;
     155             :   }
     156             : 
     157     2139318 :   inline bool IsStopped() const { return state() == STOPPED; }
     158             : 
     159       56992 :   inline bool IsSweeping() const { return state() == SWEEPING; }
     160             : 
     161  2389983047 :   inline bool IsMarking() const { return state() >= MARKING; }
     162             : 
     163             :   inline bool IsMarkingIncomplete() const { return state() == MARKING; }
     164             : 
     165       40398 :   inline bool IsComplete() const { return state() == COMPLETE; }
     166             : 
     167             :   inline bool IsReadyToOverApproximateWeakClosure() const {
     168       49543 :     return request_type_ == FINALIZATION && !finalize_marking_completed_;
     169             :   }
     170             : 
     171             :   inline bool NeedsFinalization() {
     172       35483 :     return IsMarking() &&
     173        3974 :            (request_type_ == FINALIZATION || request_type_ == COMPLETE_MARKING);
     174             :   }
     175             : 
     176             :   GCRequestType request_type() const { return request_type_; }
     177             : 
     178        1825 :   void reset_request_type() { request_type_ = NONE; }
     179             : 
     180             :   bool CanBeActivated();
     181             : 
     182             :   bool WasActivated();
     183             : 
     184             :   void Start(GarbageCollectionReason gc_reason);
     185             : 
     186             :   void FinalizeIncrementally();
     187             : 
     188             :   void UpdateMarkingWorklistAfterScavenge();
     189             : 
     190             :   void Hurry();
     191             : 
     192             :   void Finalize();
     193             : 
     194             :   void Stop();
     195             : 
     196             :   void FinalizeMarking(CompletionAction action);
     197             : 
     198             :   void MarkingComplete(CompletionAction action);
     199             : 
     200             :   void Epilogue();
     201             : 
     202             :   // Performs incremental marking steps until deadline_in_ms is reached. It
     203             :   // returns the remaining time that cannot be used for incremental marking
     204             :   // anymore because a single step would exceed the deadline.
     205             :   double AdvanceIncrementalMarking(double deadline_in_ms,
     206             :                                    CompletionAction completion_action,
     207             :                                    StepOrigin step_origin);
     208             : 
     209             :   void FinalizeSweeping();
     210             : 
     211             :   size_t Step(size_t bytes_to_process, CompletionAction action,
     212             :               StepOrigin step_origin);
     213             : 
     214             :   inline void RestartIfNotMarking();
     215             : 
     216             :   static int RecordWriteFromCode(HeapObject* obj, Object** slot,
     217             :                                  Isolate* isolate);
     218             : 
     219             :   // Record a slot for compaction.  Returns false for objects that are
     220             :   // guaranteed to be rescanned or not guaranteed to survive.
     221             :   //
     222             :   // No slots in white objects should be recorded, as some slots are typed and
     223             :   // cannot be interpreted correctly if the underlying object does not survive
     224             :   // the incremental cycle (stays white).
     225             :   V8_INLINE bool BaseRecordWrite(HeapObject* obj, Object* value);
     226             :   V8_INLINE void RecordWrite(HeapObject* obj, Object** slot, Object* value);
     227             :   V8_INLINE void RecordWriteIntoCode(Code* host, RelocInfo* rinfo,
     228             :                                      Object* value);
     229             :   V8_INLINE void RecordWrites(HeapObject* obj);
     230             : 
     231             :   void RecordWriteSlow(HeapObject* obj, Object** slot, Object* value);
     232             :   void RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, Object* value);
     233             : 
     234             :   // Returns true if the function succeeds in transitioning the object
     235             :   // from white to grey.
     236             :   bool WhiteToGreyAndPush(HeapObject* obj);
     237             : 
     238             :   // This function is used to color the object black before it undergoes an
     239             :   // unsafe layout change. This is a part of synchronization protocol with
     240             :   // the concurrent marker.
     241             :   void MarkBlackAndPush(HeapObject* obj);
     242             : 
     243      409059 :   inline void SetOldSpacePageFlags(MemoryChunk* chunk) {
     244      818118 :     SetOldSpacePageFlags(chunk, IsMarking(), IsCompacting());
     245      409059 :   }
     246             : 
     247             :   inline void SetNewSpacePageFlags(Page* chunk) {
     248      186834 :     SetNewSpacePageFlags(chunk, IsMarking());
     249             :   }
     250             : 
     251      502892 :   bool IsCompacting() { return IsMarking() && is_compacting_; }
     252             : 
     253             :   void ActivateGeneratedStub(Code* stub);
     254             : 
     255        2881 :   void NotifyIncompleteScanOfObject(int unscanned_bytes) {
     256        2881 :     unscanned_bytes_of_large_object_ = unscanned_bytes;
     257        2881 :   }
     258             : 
     259             :   void ProcessBlackAllocatedObject(HeapObject* obj);
     260             : 
     261    54613789 :   Heap* heap() const { return heap_; }
     262             : 
     263             :   IncrementalMarkingJob* incremental_marking_job() {
     264             :     return &incremental_marking_job_;
     265             :   }
     266             : 
     267             :   bool black_allocation() { return black_allocation_; }
     268             : 
     269             :   void StartBlackAllocationForTesting() {
     270          36 :     if (!black_allocation_) {
     271           0 :       StartBlackAllocation();
     272             :     }
     273             :   }
     274             : 
     275             :   void AbortBlackAllocation();
     276             : 
     277    54659714 :   MarkCompactCollector::MarkingWorklist* marking_worklist() const {
     278    54659714 :     return marking_worklist_;
     279             :   }
     280             : 
     281             :   void Deactivate();
     282             : 
     283             :  private:
     284      106730 :   class Observer : public AllocationObserver {
     285             :    public:
     286             :     Observer(IncrementalMarking& incremental_marking, intptr_t step_size)
     287             :         : AllocationObserver(step_size),
     288      109998 :           incremental_marking_(incremental_marking) {}
     289             : 
     290             :     void Step(int bytes_allocated, Address, size_t) override;
     291             : 
     292             :    private:
     293             :     IncrementalMarking& incremental_marking_;
     294             :   };
     295             : 
     296             :   static void SetOldSpacePageFlags(MemoryChunk* chunk, bool is_marking,
     297             :                                    bool is_compacting);
     298             : 
     299             :   static void SetNewSpacePageFlags(MemoryChunk* chunk, bool is_marking);
     300             : 
     301             :   void StartMarking();
     302             : 
     303             :   void StartBlackAllocation();
     304             :   void PauseBlackAllocation();
     305             :   void FinishBlackAllocation();
     306             : 
     307             :   void MarkRoots();
     308             :   // Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to
     309             :   // increase chances of reusing of map transition tree in future.
     310             :   void RetainMaps();
     311             : 
     312             :   void ActivateIncrementalWriteBarrier(PagedSpace* space);
     313             :   void ActivateIncrementalWriteBarrier(NewSpace* space);
     314             :   void ActivateIncrementalWriteBarrier();
     315             : 
     316             :   void DeactivateIncrementalWriteBarrierForSpace(PagedSpace* space);
     317             :   void DeactivateIncrementalWriteBarrierForSpace(NewSpace* space);
     318             :   void DeactivateIncrementalWriteBarrier();
     319             : 
     320             :   V8_INLINE intptr_t ProcessMarkingWorklist(
     321             :       intptr_t bytes_to_process,
     322             :       ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION);
     323             : 
     324             :   V8_INLINE bool IsFixedArrayWithProgressBar(HeapObject* object);
     325             : 
     326             :   // Visits the object and returns its size.
     327             :   V8_INLINE int VisitObject(Map* map, HeapObject* obj);
     328             : 
     329             :   void RevisitObject(HeapObject* obj);
     330             : 
     331             :   void IncrementIdleMarkingDelayCounter();
     332             : 
     333             :   void AdvanceIncrementalMarkingOnAllocation();
     334             : 
     335             :   size_t StepSizeToKeepUpWithAllocations();
     336             :   size_t StepSizeToMakeProgress();
     337             : 
     338             :   void SetState(State s) {
     339      117338 :     state_ = s;
     340             :     heap_->SetIsMarkingFlag(s >= MARKING);
     341             :   }
     342             : 
     343             :   Heap* const heap_;
     344             :   MarkCompactCollector::MarkingWorklist* const marking_worklist_;
     345             : 
     346             :   double start_time_ms_;
     347             :   size_t initial_old_generation_size_;
     348             :   size_t old_generation_allocation_counter_;
     349             :   size_t bytes_allocated_;
     350             :   size_t bytes_marked_ahead_of_schedule_;
     351             :   size_t unscanned_bytes_of_large_object_;
     352             : 
     353             :   // Must use SetState() above to update state_
     354             :   State state_;
     355             : 
     356             :   bool is_compacting_;
     357             :   bool should_hurry_;
     358             :   bool was_activated_;
     359             :   bool black_allocation_;
     360             :   bool finalize_marking_completed_;
     361             :   bool trace_wrappers_toggle_;
     362             :   IncrementalMarkingJob incremental_marking_job_;
     363             : 
     364             :   GCRequestType request_type_;
     365             : 
     366             :   Observer new_generation_observer_;
     367             :   Observer old_generation_observer_;
     368             : 
     369             :   MarkingState marking_state_;
     370             :   AtomicMarkingState atomic_marking_state_;
     371             :   NonAtomicMarkingState non_atomic_marking_state_;
     372             : 
     373             :   DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking);
     374             : };
     375             : }  // namespace internal
     376             : }  // namespace v8
     377             : 
     378             : #endif  // V8_HEAP_INCREMENTAL_MARKING_H_

Generated by: LCOV version 1.10