Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_HEAP_INCREMENTAL_MARKING_H_
6 : #define V8_HEAP_INCREMENTAL_MARKING_H_
7 :
8 : #include "src/cancelable-task.h"
9 : #include "src/heap/heap.h"
10 : #include "src/heap/incremental-marking-job.h"
11 : #include "src/heap/mark-compact.h"
12 :
13 : namespace v8 {
14 : namespace internal {
15 :
16 : class HeapObject;
17 : class MarkBit;
18 : class Map;
19 : class Object;
20 : class PagedSpace;
21 :
22 : enum class StepOrigin { kV8, kTask };
23 : enum class StepResult { kDone, kMoreWorkRemaining };
24 :
25 : class V8_EXPORT_PRIVATE IncrementalMarking {
26 : public:
27 : enum State { STOPPED, SWEEPING, MARKING, COMPLETE };
28 :
29 : enum CompletionAction { GC_VIA_STACK_GUARD, NO_GC_VIA_STACK_GUARD };
30 :
31 : enum ForceCompletionAction { FORCE_COMPLETION, DO_NOT_FORCE_COMPLETION };
32 :
33 : enum GCRequestType { NONE, COMPLETE_MARKING, FINALIZATION };
34 :
35 : #ifdef V8_CONCURRENT_MARKING
36 : using MarkingState = IncrementalMarkingState;
37 : #else
38 : using MarkingState = MajorNonAtomicMarkingState;
39 : #endif // V8_CONCURRENT_MARKING
40 : using AtomicMarkingState = MajorAtomicMarkingState;
41 : using NonAtomicMarkingState = MajorNonAtomicMarkingState;
42 :
43 : class PauseBlackAllocationScope {
44 : public:
45 : explicit PauseBlackAllocationScope(IncrementalMarking* marking)
46 : : marking_(marking), paused_(false) {
47 23490 : if (marking_->black_allocation()) {
48 : paused_ = true;
49 789 : marking_->PauseBlackAllocation();
50 : }
51 : }
52 :
53 : ~PauseBlackAllocationScope() {
54 23490 : if (paused_) {
55 789 : marking_->StartBlackAllocation();
56 : }
57 : }
58 :
59 : private:
60 : IncrementalMarking* marking_;
61 : bool paused_;
62 : };
63 :
64 : // It's hard to know how much work the incremental marker should do to make
65 : // progress in the face of the mutator creating new work for it. We start
66 : // of at a moderate rate of work and gradually increase the speed of the
67 : // incremental marker until it completes.
68 : // Do some marking every time this much memory has been allocated or that many
69 : // heavy (color-checking) write barriers have been invoked.
70 : static const size_t kYoungGenerationAllocatedThreshold = 64 * KB;
71 : static const size_t kOldGenerationAllocatedThreshold = 256 * KB;
72 : static const size_t kMinStepSizeInBytes = 64 * KB;
73 :
74 : static constexpr double kStepSizeInMs = 1;
75 : static constexpr double kMaxStepSizeInMs = 5;
76 :
77 : #ifndef DEBUG
78 : static const intptr_t kActivationThreshold = 8 * MB;
79 : #else
80 : static const intptr_t kActivationThreshold = 0;
81 : #endif
82 :
83 : #ifdef V8_CONCURRENT_MARKING
84 : static const AccessMode kAtomicity = AccessMode::ATOMIC;
85 : #else
86 : static const AccessMode kAtomicity = AccessMode::NON_ATOMIC;
87 : #endif
88 :
89 : IncrementalMarking(Heap* heap,
90 : MarkCompactCollector::MarkingWorklist* marking_worklist,
91 : WeakObjects* weak_objects);
92 :
93 51747139 : MarkingState* marking_state() { return &marking_state_; }
94 :
95 : AtomicMarkingState* atomic_marking_state() { return &atomic_marking_state_; }
96 :
97 : NonAtomicMarkingState* non_atomic_marking_state() {
98 : return &non_atomic_marking_state_;
99 : }
100 :
101 : void NotifyLeftTrimming(HeapObject from, HeapObject to);
102 :
103 : V8_INLINE void TransferColor(HeapObject from, HeapObject to);
104 :
105 : State state() const {
106 : DCHECK(state_ == STOPPED || FLAG_incremental_marking);
107 : return state_;
108 : }
109 :
110 : bool should_hurry() const { return should_hurry_; }
111 47017 : void set_should_hurry(bool val) { should_hurry_ = val; }
112 :
113 : bool finalize_marking_completed() const {
114 : return finalize_marking_completed_;
115 : }
116 :
117 : void SetWeakClosureWasOverApproximatedForTesting(bool val) {
118 5 : finalize_marking_completed_ = val;
119 : }
120 :
121 2939211 : inline bool IsStopped() const { return state() == STOPPED; }
122 :
123 74817 : inline bool IsSweeping() const { return state() == SWEEPING; }
124 :
125 62362778 : inline bool IsMarking() const { return state() >= MARKING; }
126 :
127 : inline bool IsMarkingIncomplete() const { return state() == MARKING; }
128 :
129 1002051 : inline bool IsComplete() const { return state() == COMPLETE; }
130 :
131 : inline bool IsReadyToOverApproximateWeakClosure() const {
132 40278 : return request_type_ == FINALIZATION && !finalize_marking_completed_;
133 : }
134 :
135 : inline bool NeedsFinalization() {
136 28115 : return IsMarking() &&
137 2674 : (request_type_ == FINALIZATION || request_type_ == COMPLETE_MARKING);
138 : }
139 :
140 : GCRequestType request_type() const { return request_type_; }
141 :
142 17083 : void reset_request_type() { request_type_ = NONE; }
143 :
144 : bool CanBeActivated();
145 :
146 : bool WasActivated();
147 :
148 : void Start(GarbageCollectionReason gc_reason);
149 :
150 : void FinalizeIncrementally();
151 :
152 : void UpdateMarkingWorklistAfterScavenge();
153 : void UpdateWeakReferencesAfterScavenge();
154 : void UpdateMarkedBytesAfterScavenge(size_t dead_bytes_in_new_space);
155 :
156 : void Hurry();
157 :
158 : void Finalize();
159 :
160 : void Stop();
161 :
162 : void FinalizeMarking(CompletionAction action);
163 :
164 : void MarkingComplete(CompletionAction action);
165 :
166 : void Epilogue();
167 :
168 : // Performs incremental marking steps and returns before the deadline_in_ms is
169 : // reached. It may return earlier if the marker is already ahead of the
170 : // marking schedule, which is indicated with StepResult::kDone.
171 : StepResult AdvanceWithDeadline(double deadline_in_ms,
172 : CompletionAction completion_action,
173 : StepOrigin step_origin);
174 :
175 : void FinalizeSweeping();
176 :
177 : StepResult V8Step(double max_step_size_in_ms, CompletionAction action,
178 : StepOrigin step_origin);
179 :
180 : bool ShouldDoEmbedderStep();
181 : StepResult EmbedderStep(double duration);
182 :
183 : inline void RestartIfNotMarking();
184 :
185 : // {raw_obj} and {slot_address} are raw Address values instead of a
186 : // HeapObject and a MaybeObjectSlot because this is called from
187 : // generated code via ExternalReference.
188 : static int RecordWriteFromCode(Address raw_obj, Address slot_address,
189 : Isolate* isolate);
190 :
191 : // Record a slot for compaction. Returns false for objects that are
192 : // guaranteed to be rescanned or not guaranteed to survive.
193 : //
194 : // No slots in white objects should be recorded, as some slots are typed and
195 : // cannot be interpreted correctly if the underlying object does not survive
196 : // the incremental cycle (stays white).
197 : V8_INLINE bool BaseRecordWrite(HeapObject obj, Object value);
198 : V8_INLINE void RecordWrite(HeapObject obj, ObjectSlot slot, Object value);
199 : V8_INLINE void RecordMaybeWeakWrite(HeapObject obj, MaybeObjectSlot slot,
200 : MaybeObject value);
201 : void RevisitObject(HeapObject obj);
202 : // Ensures that all descriptors int range [0, number_of_own_descripts)
203 : // are visited.
204 : void VisitDescriptors(HeapObject host, DescriptorArray array,
205 : int number_of_own_descriptors);
206 :
207 : void RecordWriteSlow(HeapObject obj, HeapObjectSlot slot, Object value);
208 : void RecordWriteIntoCode(Code host, RelocInfo* rinfo, HeapObject value);
209 :
210 : // Returns true if the function succeeds in transitioning the object
211 : // from white to grey.
212 : bool WhiteToGreyAndPush(HeapObject obj);
213 :
214 : // This function is used to color the object black before it undergoes an
215 : // unsafe layout change. This is a part of synchronization protocol with
216 : // the concurrent marker.
217 : void MarkBlackAndVisitObjectDueToLayoutChange(HeapObject obj);
218 :
219 4928567 : bool IsCompacting() { return IsMarking() && is_compacting_; }
220 :
221 239498 : void NotifyIncompleteScanOfObject(int unscanned_bytes) {
222 239498 : unscanned_bytes_of_large_object_ = unscanned_bytes;
223 239498 : }
224 :
225 : void ProcessBlackAllocatedObject(HeapObject obj);
226 :
227 51747139 : Heap* heap() const { return heap_; }
228 :
229 : IncrementalMarkingJob* incremental_marking_job() {
230 : return &incremental_marking_job_;
231 : }
232 :
233 : bool black_allocation() { return black_allocation_; }
234 :
235 : void StartBlackAllocationForTesting() {
236 35 : if (!black_allocation_) {
237 0 : StartBlackAllocation();
238 : }
239 : }
240 :
241 52776439 : MarkCompactCollector::MarkingWorklist* marking_worklist() const {
242 52776439 : return marking_worklist_;
243 : }
244 :
245 : void Deactivate();
246 :
247 : // Ensures that the given region is black allocated if it is in the old
248 : // generation.
249 : void EnsureBlackAllocated(Address allocated, size_t size);
250 :
251 : private:
252 122068 : class Observer : public AllocationObserver {
253 : public:
254 : Observer(IncrementalMarking& incremental_marking, intptr_t step_size)
255 : : AllocationObserver(step_size),
256 122098 : incremental_marking_(incremental_marking) {}
257 :
258 : void Step(int bytes_allocated, Address, size_t) override;
259 :
260 : private:
261 : IncrementalMarking& incremental_marking_;
262 : };
263 :
264 : void StartMarking();
265 :
266 : void StartBlackAllocation();
267 : void PauseBlackAllocation();
268 : void FinishBlackAllocation();
269 :
270 : void MarkRoots();
271 : bool ShouldRetainMap(Map map, int age);
272 : // Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to
273 : // increase chances of reusing of map transition tree in future.
274 : void RetainMaps();
275 :
276 : void ActivateIncrementalWriteBarrier(PagedSpace* space);
277 : void ActivateIncrementalWriteBarrier(NewSpace* space);
278 : void ActivateIncrementalWriteBarrier();
279 :
280 : void DeactivateIncrementalWriteBarrierForSpace(PagedSpace* space);
281 : void DeactivateIncrementalWriteBarrierForSpace(NewSpace* space);
282 : void DeactivateIncrementalWriteBarrier();
283 :
284 : V8_INLINE intptr_t ProcessMarkingWorklist(
285 : intptr_t bytes_to_process,
286 : ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION);
287 :
288 : V8_INLINE bool IsFixedArrayWithProgressBar(HeapObject object);
289 :
290 : // Visits the object and returns its size.
291 : V8_INLINE int VisitObject(Map map, HeapObject obj);
292 :
293 : // Updates scheduled_bytes_to_mark_ to ensure marking progress based on
294 : // time.
295 : void ScheduleBytesToMarkBasedOnTime(double time_ms);
296 : // Updates scheduled_bytes_to_mark_ to ensure marking progress based on
297 : // allocations.
298 : void ScheduleBytesToMarkBasedOnAllocation();
299 : // Helper functions for ScheduleBytesToMarkBasedOnAllocation.
300 : size_t StepSizeToKeepUpWithAllocations();
301 : size_t StepSizeToMakeProgress();
302 : void AddScheduledBytesToMark(size_t bytes_to_mark);
303 :
304 : // Schedules more bytes to mark so that the marker is no longer ahead
305 : // of schedule.
306 : void FastForwardSchedule();
307 : void FastForwardScheduleIfCloseToFinalization();
308 :
309 : // Fetches marked byte counters from the concurrent marker.
310 : void FetchBytesMarkedConcurrently();
311 :
312 : // Returns the bytes to mark in the current step based on the scheduled
313 : // bytes and already marked bytes.
314 : size_t ComputeStepSizeInBytes(StepOrigin step_origin);
315 :
316 : void AdvanceOnAllocation();
317 :
318 : void SetState(State s) {
319 159037 : state_ = s;
320 : heap_->SetIsMarkingFlag(s >= MARKING);
321 : }
322 :
323 : Heap* const heap_;
324 : MarkCompactCollector::MarkingWorklist* const marking_worklist_;
325 : WeakObjects* weak_objects_;
326 :
327 : double start_time_ms_;
328 : size_t initial_old_generation_size_;
329 : size_t old_generation_allocation_counter_;
330 : size_t bytes_marked_;
331 : size_t scheduled_bytes_to_mark_;
332 : double schedule_update_time_ms_;
333 : // A sample of concurrent_marking()->TotalMarkedBytes() at the last
334 : // incremental marking step. It is used for updating
335 : // bytes_marked_ahead_of_schedule_ with contribution of concurrent marking.
336 : size_t bytes_marked_concurrently_;
337 : size_t unscanned_bytes_of_large_object_;
338 :
339 : // Must use SetState() above to update state_
340 : State state_;
341 :
342 : bool is_compacting_;
343 : bool should_hurry_;
344 : bool was_activated_;
345 : bool black_allocation_;
346 : bool finalize_marking_completed_;
347 : IncrementalMarkingJob incremental_marking_job_;
348 :
349 : GCRequestType request_type_;
350 :
351 : Observer new_generation_observer_;
352 : Observer old_generation_observer_;
353 :
354 : MarkingState marking_state_;
355 : AtomicMarkingState atomic_marking_state_;
356 : NonAtomicMarkingState non_atomic_marking_state_;
357 :
358 : DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking);
359 : };
360 : } // namespace internal
361 : } // namespace v8
362 :
363 : #endif // V8_HEAP_INCREMENTAL_MARKING_H_
|