LCOV - code coverage report
Current view: top level - src/heap - incremental-marking.h (source / functions) Hit Total Coverage
Test: app.info Lines: 20 21 95.2 %
Date: 2019-04-17 Functions: 0 2 0.0 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #ifndef V8_HEAP_INCREMENTAL_MARKING_H_
       6             : #define V8_HEAP_INCREMENTAL_MARKING_H_
       7             : 
       8             : #include "src/cancelable-task.h"
       9             : #include "src/heap/heap.h"
      10             : #include "src/heap/incremental-marking-job.h"
      11             : #include "src/heap/mark-compact.h"
      12             : 
      13             : namespace v8 {
      14             : namespace internal {
      15             : 
      16             : class HeapObject;
      17             : class MarkBit;
      18             : class Map;
      19             : class Object;
      20             : class PagedSpace;
      21             : 
      22             : enum class StepOrigin { kV8, kTask };
      23             : enum class StepResult {
      24             :   kNoImmediateWork,
      25             :   kMoreWorkRemaining,
      26             :   kWaitingForFinalization
      27             : };
      28             : 
      29       62405 : class V8_EXPORT_PRIVATE IncrementalMarking {
      30             :  public:
      31             :   enum State { STOPPED, SWEEPING, MARKING, COMPLETE };
      32             : 
      33             :   enum CompletionAction { GC_VIA_STACK_GUARD, NO_GC_VIA_STACK_GUARD };
      34             : 
      35             :   enum ForceCompletionAction { FORCE_COMPLETION, DO_NOT_FORCE_COMPLETION };
      36             : 
      37             :   enum GCRequestType { NONE, COMPLETE_MARKING, FINALIZATION };
      38             : 
      39             : #ifdef V8_CONCURRENT_MARKING
      40             :   using MarkingState = IncrementalMarkingState;
      41             : #else
      42             :   using MarkingState = MajorNonAtomicMarkingState;
      43             : #endif  // V8_CONCURRENT_MARKING
      44             :   using AtomicMarkingState = MajorAtomicMarkingState;
      45             :   using NonAtomicMarkingState = MajorNonAtomicMarkingState;
      46             : 
      47             :   class PauseBlackAllocationScope {
      48             :    public:
      49             :     explicit PauseBlackAllocationScope(IncrementalMarking* marking)
      50             :         : marking_(marking), paused_(false) {
      51       27814 :       if (marking_->black_allocation()) {
      52             :         paused_ = true;
      53        1064 :         marking_->PauseBlackAllocation();
      54             :       }
      55             :     }
      56             : 
      57             :     ~PauseBlackAllocationScope() {
      58       27814 :       if (paused_) {
      59        1064 :         marking_->StartBlackAllocation();
      60             :       }
      61             :     }
      62             : 
      63             :    private:
      64             :     IncrementalMarking* marking_;
      65             :     bool paused_;
      66             :   };
      67             : 
      68             :   // It's hard to know how much work the incremental marker should do to make
      69             :   // progress in the face of the mutator creating new work for it.  We start
      70             :   // of at a moderate rate of work and gradually increase the speed of the
      71             :   // incremental marker until it completes.
      72             :   // Do some marking every time this much memory has been allocated or that many
      73             :   // heavy (color-checking) write barriers have been invoked.
      74             :   static const size_t kYoungGenerationAllocatedThreshold = 64 * KB;
      75             :   static const size_t kOldGenerationAllocatedThreshold = 256 * KB;
      76             :   static const size_t kMinStepSizeInBytes = 64 * KB;
      77             : 
      78             :   static constexpr double kStepSizeInMs = 1;
      79             :   static constexpr double kMaxStepSizeInMs = 5;
      80             : 
      81             : #ifndef DEBUG
      82             :   static const intptr_t kActivationThreshold = 8 * MB;
      83             : #else
      84             :   static const intptr_t kActivationThreshold = 0;
      85             : #endif
      86             : 
      87             : #ifdef V8_CONCURRENT_MARKING
      88             :   static const AccessMode kAtomicity = AccessMode::ATOMIC;
      89             : #else
      90             :   static const AccessMode kAtomicity = AccessMode::NON_ATOMIC;
      91             : #endif
      92             : 
      93             :   IncrementalMarking(Heap* heap,
      94             :                      MarkCompactCollector::MarkingWorklist* marking_worklist,
      95             :                      WeakObjects* weak_objects);
      96             : 
      97    64541689 :   MarkingState* marking_state() { return &marking_state_; }
      98             : 
      99             :   AtomicMarkingState* atomic_marking_state() { return &atomic_marking_state_; }
     100             : 
     101             :   NonAtomicMarkingState* non_atomic_marking_state() {
     102             :     return &non_atomic_marking_state_;
     103             :   }
     104             : 
     105             :   void NotifyLeftTrimming(HeapObject from, HeapObject to);
     106             : 
     107             :   V8_INLINE void TransferColor(HeapObject from, HeapObject to);
     108             : 
     109             :   State state() const {
     110             :     DCHECK(state_ == STOPPED || FLAG_incremental_marking);
     111             :     return state_;
     112             :   }
     113             : 
     114             :   bool should_hurry() const { return should_hurry_; }
     115       49877 :   void set_should_hurry(bool val) { should_hurry_ = val; }
     116             : 
     117             :   bool finalize_marking_completed() const {
     118             :     return finalize_marking_completed_;
     119             :   }
     120             : 
     121             :   void SetWeakClosureWasOverApproximatedForTesting(bool val) {
     122           5 :     finalize_marking_completed_ = val;
     123             :   }
     124             : 
     125         524 :   inline bool IsStopped() const { return state() == STOPPED; }
     126             : 
     127             :   inline bool IsSweeping() const { return state() == SWEEPING; }
     128             : 
     129    77622889 :   inline bool IsMarking() const { return state() >= MARKING; }
     130             : 
     131             :   inline bool IsMarkingIncomplete() const { return state() == MARKING; }
     132             : 
     133             :   inline bool IsComplete() const { return state() == COMPLETE; }
     134             : 
     135             :   inline bool IsReadyToOverApproximateWeakClosure() const {
     136       38699 :     return request_type_ == FINALIZATION && !finalize_marking_completed_;
     137             :   }
     138             : 
     139             :   inline bool NeedsFinalization() {
     140       30550 :     return IsMarking() &&
     141        1832 :            (request_type_ == FINALIZATION || request_type_ == COMPLETE_MARKING);
     142             :   }
     143             : 
     144             :   GCRequestType request_type() const { return request_type_; }
     145             : 
     146       18805 :   void reset_request_type() { request_type_ = NONE; }
     147             : 
     148             :   bool CanBeActivated();
     149             : 
     150             :   bool WasActivated();
     151             : 
     152             :   void Start(GarbageCollectionReason gc_reason);
     153             : 
     154             :   void FinalizeIncrementally();
     155             : 
     156             :   void UpdateMarkingWorklistAfterScavenge();
     157             :   void UpdateWeakReferencesAfterScavenge();
     158             :   void UpdateMarkedBytesAfterScavenge(size_t dead_bytes_in_new_space);
     159             : 
     160             :   void Hurry();
     161             : 
     162             :   void Finalize();
     163             : 
     164             :   void Stop();
     165             : 
     166             :   void FinalizeMarking(CompletionAction action);
     167             : 
     168             :   void MarkingComplete(CompletionAction action);
     169             : 
     170             :   void Epilogue();
     171             : 
     172             :   // Performs incremental marking steps and returns before the deadline_in_ms is
     173             :   // reached. It may return earlier if the marker is already ahead of the
     174             :   // marking schedule, which is indicated with StepResult::kDone.
     175             :   StepResult AdvanceWithDeadline(double deadline_in_ms,
     176             :                                  CompletionAction completion_action,
     177             :                                  StepOrigin step_origin);
     178             : 
     179             :   void FinalizeSweeping();
     180             : 
     181             :   StepResult V8Step(double max_step_size_in_ms, CompletionAction action,
     182             :                     StepOrigin step_origin);
     183             : 
     184             :   bool ShouldDoEmbedderStep();
     185             :   StepResult EmbedderStep(double duration);
     186             : 
     187             :   inline void RestartIfNotMarking();
     188             : 
     189             :   // {raw_obj} and {slot_address} are raw Address values instead of a
     190             :   // HeapObject and a MaybeObjectSlot because this is called from
     191             :   // generated code via ExternalReference.
     192             :   static int RecordWriteFromCode(Address raw_obj, Address slot_address,
     193             :                                  Isolate* isolate);
     194             : 
     195             :   // Record a slot for compaction.  Returns false for objects that are
     196             :   // guaranteed to be rescanned or not guaranteed to survive.
     197             :   //
     198             :   // No slots in white objects should be recorded, as some slots are typed and
     199             :   // cannot be interpreted correctly if the underlying object does not survive
     200             :   // the incremental cycle (stays white).
     201             :   V8_INLINE bool BaseRecordWrite(HeapObject obj, Object value);
     202             :   V8_INLINE void RecordWrite(HeapObject obj, ObjectSlot slot, Object value);
     203             :   V8_INLINE void RecordMaybeWeakWrite(HeapObject obj, MaybeObjectSlot slot,
     204             :                                       MaybeObject value);
     205             :   void RevisitObject(HeapObject obj);
     206             :   // Ensures that all descriptors int range [0, number_of_own_descripts)
     207             :   // are visited.
     208             :   void VisitDescriptors(HeapObject host, DescriptorArray array,
     209             :                         int number_of_own_descriptors);
     210             : 
     211             :   void RecordWriteSlow(HeapObject obj, HeapObjectSlot slot, Object value);
     212             :   void RecordWriteIntoCode(Code host, RelocInfo* rinfo, HeapObject value);
     213             : 
     214             :   // Returns true if the function succeeds in transitioning the object
     215             :   // from white to grey.
     216             :   bool WhiteToGreyAndPush(HeapObject obj);
     217             : 
     218             :   // This function is used to color the object black before it undergoes an
     219             :   // unsafe layout change. This is a part of synchronization protocol with
     220             :   // the concurrent marker.
     221             :   void MarkBlackAndVisitObjectDueToLayoutChange(HeapObject obj);
     222             : 
     223     4812079 :   bool IsCompacting() { return IsMarking() && is_compacting_; }
     224             : 
     225             :   void ProcessBlackAllocatedObject(HeapObject obj);
     226             : 
     227             :   Heap* heap() const { return heap_; }
     228             : 
     229             :   IncrementalMarkingJob* incremental_marking_job() {
     230     1065668 :     return &incremental_marking_job_;
     231             :   }
     232             : 
     233             :   bool black_allocation() { return black_allocation_; }
     234             : 
     235             :   void StartBlackAllocationForTesting() {
     236          35 :     if (!black_allocation_) {
     237           0 :       StartBlackAllocation();
     238             :     }
     239             :   }
     240             : 
     241             :   MarkCompactCollector::MarkingWorklist* marking_worklist() const {
     242             :     return marking_worklist_;
     243             :   }
     244             : 
     245             :   void Deactivate();
     246             : 
     247             :   // Ensures that the given region is black allocated if it is in the old
     248             :   // generation.
     249             :   void EnsureBlackAllocated(Address allocated, size_t size);
     250             : 
     251             :  private:
     252      187215 :   class Observer : public AllocationObserver {
     253             :    public:
     254             :     Observer(IncrementalMarking& incremental_marking, intptr_t step_size)
     255             :         : AllocationObserver(step_size),
     256      124844 :           incremental_marking_(incremental_marking) {}
     257             : 
     258             :     void Step(int bytes_allocated, Address, size_t) override;
     259             : 
     260             :    private:
     261             :     IncrementalMarking& incremental_marking_;
     262             :   };
     263             : 
     264             :   void StartMarking();
     265             : 
     266             :   void StartBlackAllocation();
     267             :   void PauseBlackAllocation();
     268             :   void FinishBlackAllocation();
     269             : 
     270             :   void MarkRoots();
     271             :   bool ShouldRetainMap(Map map, int age);
     272             :   // Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to
     273             :   // increase chances of reusing of map transition tree in future.
     274             :   void RetainMaps();
     275             : 
     276             :   void ActivateIncrementalWriteBarrier(PagedSpace* space);
     277             :   void ActivateIncrementalWriteBarrier(NewSpace* space);
     278             :   void ActivateIncrementalWriteBarrier();
     279             : 
     280             :   void DeactivateIncrementalWriteBarrierForSpace(PagedSpace* space);
     281             :   void DeactivateIncrementalWriteBarrierForSpace(NewSpace* space);
     282             :   void DeactivateIncrementalWriteBarrier();
     283             : 
     284             :   V8_INLINE intptr_t ProcessMarkingWorklist(
     285             :       intptr_t bytes_to_process,
     286             :       ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION);
     287             : 
     288             :   V8_INLINE bool IsFixedArrayWithProgressBar(HeapObject object);
     289             : 
     290             :   // Visits the object and returns its size.
     291             :   V8_INLINE int VisitObject(Map map, HeapObject obj);
     292             : 
     293             :   // Updates scheduled_bytes_to_mark_ to ensure marking progress based on
     294             :   // time.
     295             :   void ScheduleBytesToMarkBasedOnTime(double time_ms);
     296             :   // Updates scheduled_bytes_to_mark_ to ensure marking progress based on
     297             :   // allocations.
     298             :   void ScheduleBytesToMarkBasedOnAllocation();
     299             :   // Helper functions for ScheduleBytesToMarkBasedOnAllocation.
     300             :   size_t StepSizeToKeepUpWithAllocations();
     301             :   size_t StepSizeToMakeProgress();
     302             :   void AddScheduledBytesToMark(size_t bytes_to_mark);
     303             : 
     304             :   // Schedules more bytes to mark so that the marker is no longer ahead
     305             :   // of schedule.
     306             :   void FastForwardSchedule();
     307             :   void FastForwardScheduleIfCloseToFinalization();
     308             : 
     309             :   // Fetches marked byte counters from the concurrent marker.
     310             :   void FetchBytesMarkedConcurrently();
     311             : 
     312             :   // Returns the bytes to mark in the current step based on the scheduled
     313             :   // bytes and already marked bytes.
     314             :   size_t ComputeStepSizeInBytes(StepOrigin step_origin);
     315             : 
     316             :   void AdvanceOnAllocation();
     317             : 
     318             :   void SetState(State s) {
     319      164888 :     state_ = s;
     320             :     heap_->SetIsMarkingFlag(s >= MARKING);
     321             :   }
     322             : 
     323             :   Heap* const heap_;
     324             :   MarkCompactCollector::MarkingWorklist* const marking_worklist_;
     325             :   WeakObjects* weak_objects_;
     326             : 
     327             :   double start_time_ms_;
     328             :   size_t initial_old_generation_size_;
     329             :   size_t old_generation_allocation_counter_;
     330             :   size_t bytes_marked_;
     331             :   size_t scheduled_bytes_to_mark_;
     332             :   double schedule_update_time_ms_;
     333             :   // A sample of concurrent_marking()->TotalMarkedBytes() at the last
     334             :   // incremental marking step. It is used for updating
     335             :   // bytes_marked_ahead_of_schedule_ with contribution of concurrent marking.
     336             :   size_t bytes_marked_concurrently_;
     337             : 
     338             :   // Must use SetState() above to update state_
     339             :   State state_;
     340             : 
     341             :   bool is_compacting_;
     342             :   bool should_hurry_;
     343             :   bool was_activated_;
     344             :   bool black_allocation_;
     345             :   bool finalize_marking_completed_;
     346             :   IncrementalMarkingJob incremental_marking_job_;
     347             : 
     348             :   GCRequestType request_type_;
     349             : 
     350             :   Observer new_generation_observer_;
     351             :   Observer old_generation_observer_;
     352             : 
     353             :   MarkingState marking_state_;
     354             :   AtomicMarkingState atomic_marking_state_;
     355             :   NonAtomicMarkingState non_atomic_marking_state_;
     356             : 
     357             :   DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking);
     358             : };
     359             : }  // namespace internal
     360             : }  // namespace v8
     361             : 
     362             : #endif  // V8_HEAP_INCREMENTAL_MARKING_H_

Generated by: LCOV version 1.10