Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_HEAP_MARK_COMPACT_INL_H_
6 : #define V8_HEAP_MARK_COMPACT_INL_H_
7 :
8 : #include "src/heap/mark-compact.h"
9 :
10 : #include "src/assembler-inl.h"
11 : #include "src/base/bits.h"
12 : #include "src/heap/heap-inl.h"
13 : #include "src/heap/incremental-marking.h"
14 : #include "src/heap/objects-visiting-inl.h"
15 : #include "src/heap/remembered-set.h"
16 : #include "src/objects/js-collection-inl.h"
17 : #include "src/objects/js-weak-refs-inl.h"
18 : #include "src/objects/slots-inl.h"
19 : #include "src/transitions.h"
20 :
21 : namespace v8 {
22 : namespace internal {
23 :
24 : template <typename ConcreteState, AccessMode access_mode>
25 : bool MarkingStateBase<ConcreteState, access_mode>::GreyToBlack(HeapObject obj) {
26 : MemoryChunk* p = MemoryChunk::FromHeapObject(obj);
27 : MarkBit markbit = MarkBitFrom(p, obj->address());
28 682645303 : if (!Marking::GreyToBlack<access_mode>(markbit)) return false;
29 653919063 : static_cast<ConcreteState*>(this)->IncrementLiveBytes(p, obj->Size());
30 : return true;
31 : }
32 :
33 : template <typename ConcreteState, AccessMode access_mode>
34 : bool MarkingStateBase<ConcreteState, access_mode>::WhiteToGrey(HeapObject obj) {
35 : return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj));
36 : }
37 :
38 : template <typename ConcreteState, AccessMode access_mode>
39 : bool MarkingStateBase<ConcreteState, access_mode>::WhiteToBlack(
40 : HeapObject obj) {
41 1972085 : return WhiteToGrey(obj) && GreyToBlack(obj);
42 : }
43 :
44 : template <FixedArrayVisitationMode fixed_array_mode,
45 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
46 : MarkingVisitor<fixed_array_mode, retaining_path_mode,
47 : MarkingState>::MarkingVisitor(MarkCompactCollector* collector,
48 : MarkingState* marking_state)
49 : : heap_(collector->heap()),
50 : collector_(collector),
51 : marking_state_(marking_state),
52 63212579 : mark_compact_epoch_(collector->epoch()) {}
53 :
54 : template <FixedArrayVisitationMode fixed_array_mode,
55 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
56 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
57 : MarkingState>::VisitBytecodeArray(Map map,
58 : BytecodeArray array) {
59 : int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
60 518626 : BytecodeArray::BodyDescriptor::IterateBody(map, array, size, this);
61 :
62 518626 : if (!heap_->is_current_gc_forced()) {
63 369469 : array->MakeOlder();
64 : }
65 : return size;
66 : }
67 :
68 : template <FixedArrayVisitationMode fixed_array_mode,
69 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
70 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
71 : MarkingState>::VisitDescriptorArray(Map map,
72 : DescriptorArray array) {
73 : int size = DescriptorArray::BodyDescriptor::SizeOf(map, array);
74 : VisitPointers(array, array->GetFirstPointerSlot(),
75 : array->GetDescriptorSlot(0));
76 : VisitDescriptors(array, array->number_of_descriptors());
77 : return size;
78 : }
79 :
80 : template <FixedArrayVisitationMode fixed_array_mode,
81 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
82 : int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
83 : VisitSharedFunctionInfo(Map map, SharedFunctionInfo shared_info) {
84 3584961 : int size = SharedFunctionInfo::BodyDescriptor::SizeOf(map, shared_info);
85 20673913 : SharedFunctionInfo::BodyDescriptor::IterateBody(map, shared_info, size, this);
86 :
87 : // If the SharedFunctionInfo has old bytecode, mark it as flushable,
88 : // otherwise visit the function data field strongly.
89 20673913 : if (shared_info->ShouldFlushBytecode()) {
90 100461 : collector_->AddBytecodeFlushingCandidate(shared_info);
91 : } else {
92 3579673 : VisitPointer(shared_info,
93 : shared_info.RawField(SharedFunctionInfo::kFunctionDataOffset));
94 : }
95 : return size;
96 : }
97 :
98 : template <FixedArrayVisitationMode fixed_array_mode,
99 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
100 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
101 : MarkingState>::VisitJSFunction(Map map, JSFunction object) {
102 : int size = Parent::VisitJSFunction(map, object);
103 :
104 : // Check if the JSFunction needs reset due to bytecode being flushed.
105 23681937 : if (FLAG_flush_bytecode && object->NeedsResetDueToFlushedBytecode()) {
106 1091 : collector_->AddFlushedJSFunction(object);
107 : }
108 :
109 : return size;
110 : }
111 : template <FixedArrayVisitationMode fixed_array_mode,
112 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
113 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
114 : MarkingState>::VisitFixedArray(Map map, FixedArray object) {
115 : return (fixed_array_mode == FixedArrayVisitationMode::kRegular)
116 : ? Parent::VisitFixedArray(map, object)
117 : : VisitFixedArrayIncremental(map, object);
118 : }
119 :
120 : template <FixedArrayVisitationMode fixed_array_mode,
121 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
122 : template <typename T>
123 : V8_INLINE int
124 : MarkingVisitor<fixed_array_mode, retaining_path_mode,
125 : MarkingState>::VisitEmbedderTracingSubclass(Map map, T object) {
126 504434 : if (heap_->local_embedder_heap_tracer()->InUse()) {
127 35 : marking_worklist()->embedder()->Push(MarkCompactCollectorBase::kMainThread,
128 : object);
129 : }
130 13376 : int size = T::BodyDescriptor::SizeOf(map, object);
131 205776 : T::BodyDescriptor::IterateBody(map, object, size, this);
132 : return size;
133 : }
134 :
135 : template <FixedArrayVisitationMode fixed_array_mode,
136 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
137 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
138 : MarkingState>::VisitJSApiObject(Map map, JSObject object) {
139 : return VisitEmbedderTracingSubclass(map, object);
140 : }
141 :
142 : template <FixedArrayVisitationMode fixed_array_mode,
143 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
144 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
145 : MarkingState>::VisitJSArrayBuffer(Map map,
146 : JSArrayBuffer object) {
147 : return VisitEmbedderTracingSubclass(map, object);
148 : }
149 :
150 : template <FixedArrayVisitationMode fixed_array_mode,
151 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
152 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
153 : MarkingState>::VisitJSDataView(Map map, JSDataView object) {
154 : return VisitEmbedderTracingSubclass(map, object);
155 : }
156 :
157 : template <FixedArrayVisitationMode fixed_array_mode,
158 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
159 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
160 : MarkingState>::VisitJSTypedArray(Map map,
161 : JSTypedArray object) {
162 : return VisitEmbedderTracingSubclass(map, object);
163 : }
164 :
165 : template <FixedArrayVisitationMode fixed_array_mode,
166 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
167 : int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
168 : VisitEphemeronHashTable(Map map, EphemeronHashTable table) {
169 16063 : collector_->AddEphemeronHashTable(table);
170 :
171 163423 : for (int i = 0; i < table->Capacity(); i++) {
172 : ObjectSlot key_slot =
173 : table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
174 73680 : HeapObject key = HeapObject::cast(table->KeyAt(i));
175 : collector_->RecordSlot(table, key_slot, key);
176 :
177 : ObjectSlot value_slot =
178 : table->RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
179 :
180 73680 : if (marking_state()->IsBlackOrGrey(key)) {
181 : VisitPointer(table, value_slot);
182 :
183 : } else {
184 : Object value_obj = *value_slot;
185 :
186 387 : if (value_obj->IsHeapObject()) {
187 : HeapObject value = HeapObject::cast(value_obj);
188 : collector_->RecordSlot(table, value_slot, value);
189 :
190 : // Revisit ephemerons with both key and value unreachable at end
191 : // of concurrent marking cycle.
192 140 : if (marking_state()->IsWhite(value)) {
193 106 : collector_->AddEphemeron(key, value);
194 : }
195 : }
196 : }
197 : }
198 :
199 16063 : return table->SizeFromMap(map);
200 : }
201 :
202 : template <FixedArrayVisitationMode fixed_array_mode,
203 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
204 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
205 : MarkingState>::VisitMap(Map meta_map, Map map) {
206 : int size = Map::BodyDescriptor::SizeOf(meta_map, map);
207 12123849 : if (map->CanTransition()) {
208 : // Maps that can transition share their descriptor arrays and require
209 : // special visiting logic to avoid memory leaks.
210 : // Since descriptor arrays are potentially shared, ensure that only the
211 : // descriptors that belong to this map are marked. The first time a
212 : // non-empty descriptor array is marked, its header is also visited. The
213 : // slot holding the descriptor array will be implicitly recorded when the
214 : // pointer fields of this map are visited.
215 : DescriptorArray descriptors = map->instance_descriptors();
216 : MarkDescriptorArrayBlack(map, descriptors);
217 1746618 : int number_of_own_descriptors = map->NumberOfOwnDescriptors();
218 12039307 : if (number_of_own_descriptors) {
219 : DCHECK_LE(number_of_own_descriptors,
220 : descriptors->number_of_descriptors());
221 : VisitDescriptors(descriptors, number_of_own_descriptors);
222 : }
223 : // Mark the pointer fields of the Map. Since the transitions array has
224 : // been marked already, it is fine that one of these fields contains a
225 : // pointer to it.
226 : }
227 12123849 : Map::BodyDescriptor::IterateBody(meta_map, map, size, this);
228 : return size;
229 : }
230 :
231 : template <FixedArrayVisitationMode fixed_array_mode,
232 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
233 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
234 : MarkingState>::VisitTransitionArray(Map map,
235 : TransitionArray array) {
236 13556 : int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
237 13556 : TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
238 272717 : collector_->AddTransitionArray(array);
239 : return size;
240 : }
241 :
242 : template <FixedArrayVisitationMode fixed_array_mode,
243 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
244 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
245 : MarkingState>::VisitJSWeakRef(Map map, JSWeakRef weak_ref) {
246 156 : if (weak_ref->target()->IsHeapObject()) {
247 : HeapObject target = HeapObject::cast(weak_ref->target());
248 156 : if (marking_state()->IsBlackOrGrey(target)) {
249 : // Record the slot inside the JSWeakRef, since the IterateBody below
250 : // won't visit it.
251 : ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
252 : collector_->RecordSlot(weak_ref, slot, target);
253 : } else {
254 : // JSWeakRef points to a potentially dead object. We have to process
255 : // them when we know the liveness of the whole transitive closure.
256 87 : collector_->AddWeakRef(weak_ref);
257 : }
258 : }
259 : int size = JSWeakRef::BodyDescriptor::SizeOf(map, weak_ref);
260 156 : JSWeakRef::BodyDescriptor::IterateBody(map, weak_ref, size, this);
261 : return size;
262 : }
263 :
264 : template <FixedArrayVisitationMode fixed_array_mode,
265 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
266 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
267 : MarkingState>::VisitWeakCell(Map map, WeakCell weak_cell) {
268 346 : if (weak_cell->target()->IsHeapObject()) {
269 : HeapObject target = HeapObject::cast(weak_cell->target());
270 346 : if (marking_state()->IsBlackOrGrey(target)) {
271 : // Record the slot inside the WeakCell, since the IterateBody below
272 : // won't visit it.
273 : ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
274 : collector_->RecordSlot(weak_cell, slot, target);
275 : } else {
276 : // WeakCell points to a potentially dead object. We have to process
277 : // them when we know the liveness of the whole transitive closure.
278 273 : collector_->AddWeakCell(weak_cell);
279 : }
280 : }
281 : int size = WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
282 346 : WeakCell::BodyDescriptor::IterateBody(map, weak_cell, size, this);
283 : return size;
284 : }
285 :
286 : // class template arguments
287 : template <FixedArrayVisitationMode fixed_array_mode,
288 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
289 : // method template arguments
290 : template <typename TSlot>
291 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
292 : MarkingState>::VisitPointerImpl(HeapObject host,
293 : TSlot slot) {
294 : static_assert(std::is_same<TSlot, ObjectSlot>::value ||
295 : std::is_same<TSlot, MaybeObjectSlot>::value,
296 : "Only ObjectSlot and MaybeObjectSlot are expected here");
297 13872476 : typename TSlot::TObject object = *slot;
298 13379977 : HeapObject target_object;
299 3373392477 : if (object.GetHeapObjectIfStrong(&target_object)) {
300 0 : collector_->RecordSlot(host, HeapObjectSlot(slot), target_object);
301 : MarkObject(host, target_object);
302 166455728 : } else if (TSlot::kCanBeWeak && object.GetHeapObjectIfWeak(&target_object)) {
303 36266986 : if (marking_state()->IsBlackOrGrey(target_object)) {
304 : // Weak references with live values are directly processed here to reduce
305 : // the processing time of weak cells during the main GC pause.
306 0 : collector_->RecordSlot(host, HeapObjectSlot(slot), target_object);
307 : } else {
308 : // If we do not know about liveness of values of weak cells, we have to
309 : // process them when we know the liveness of the whole transitive
310 : // closure.
311 9930467 : collector_->AddWeakReference(host, HeapObjectSlot(slot));
312 : }
313 : }
314 : }
315 :
316 : // class template arguments
317 : template <FixedArrayVisitationMode fixed_array_mode,
318 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
319 : // method template arguments
320 : template <typename TSlot>
321 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
322 : MarkingState>::VisitPointersImpl(HeapObject host,
323 : TSlot start, TSlot end) {
324 3550508678 : for (TSlot p = start; p < end; ++p) {
325 : VisitPointer(host, p);
326 : }
327 : }
328 :
329 : template <FixedArrayVisitationMode fixed_array_mode,
330 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
331 2306300 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
332 : MarkingState>::VisitEmbeddedPointer(Code host,
333 : RelocInfo* rinfo) {
334 : DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
335 : HeapObject object = HeapObject::cast(rinfo->target_object());
336 2306300 : collector_->RecordRelocSlot(host, rinfo, object);
337 4572813 : if (!marking_state()->IsBlackOrGrey(object)) {
338 170838 : if (host->IsWeakObject(object)) {
339 16830 : collector_->AddWeakObjectInCode(object, host);
340 : } else {
341 : MarkObject(host, object);
342 : }
343 : }
344 2306300 : }
345 :
346 : template <FixedArrayVisitationMode fixed_array_mode,
347 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
348 178477 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
349 : MarkingState>::VisitCodeTarget(Code host,
350 : RelocInfo* rinfo) {
351 : DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
352 178477 : Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
353 178477 : collector_->RecordRelocSlot(host, rinfo, target);
354 : MarkObject(host, target);
355 178477 : }
356 :
357 : template <FixedArrayVisitationMode fixed_array_mode,
358 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
359 : void MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
360 : MarkDescriptorArrayBlack(HeapObject host, DescriptorArray descriptors) {
361 : // Note that WhiteToBlack is not sufficient here because it fails if the
362 : // descriptor array is grey. So we need to do two steps: WhiteToGrey and
363 : // GreyToBlack. Alternatively, we could check WhiteToGrey || WhiteToBlack.
364 6972118 : if (marking_state()->WhiteToGrey(descriptors)) {
365 4368657 : if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
366 4368657 : V8_UNLIKELY(FLAG_track_retaining_path)) {
367 0 : heap_->AddRetainer(host, descriptors);
368 : }
369 : }
370 15494833 : if (marking_state()->GreyToBlack(descriptors)) {
371 : VisitPointers(descriptors, descriptors->GetFirstPointerSlot(),
372 : descriptors->GetDescriptorSlot(0));
373 : }
374 : DCHECK(marking_state()->IsBlack(descriptors));
375 : }
376 :
377 : template <FixedArrayVisitationMode fixed_array_mode,
378 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
379 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
380 : MarkingState>::MarkObject(HeapObject host,
381 : HeapObject object) {
382 2905625585 : if (marking_state()->WhiteToGrey(object)) {
383 699326 : marking_worklist()->Push(object);
384 118404445 : if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
385 118404445 : V8_UNLIKELY(FLAG_track_retaining_path)) {
386 0 : heap_->AddRetainer(host, object);
387 : }
388 : }
389 : }
390 :
391 : template <FixedArrayVisitationMode fixed_array_mode,
392 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
393 : int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
394 : VisitFixedArrayIncremental(Map map, FixedArray object) {
395 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
396 : int size = FixedArray::BodyDescriptor::SizeOf(map, object);
397 2374790 : if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
398 : DCHECK(FLAG_use_marking_progress_bar);
399 : DCHECK(heap_->IsLargeObject(object));
400 : size_t current_progress_bar = chunk->ProgressBar();
401 163362 : if (current_progress_bar == 0) {
402 : // Try to move the progress bar forward to start offset. This solves the
403 : // problem of not being able to observe a progress bar reset when
404 : // processing the first kProgressBarScanningChunk.
405 2478 : if (!chunk->TrySetProgressBar(0,
406 : FixedArray::BodyDescriptor::kStartOffset))
407 : return 0;
408 : current_progress_bar = FixedArray::BodyDescriptor::kStartOffset;
409 : }
410 163362 : int start = static_cast<int>(current_progress_bar);
411 163362 : int end = Min(size, start + kProgressBarScanningChunk);
412 163362 : if (start < end) {
413 : VisitPointers(object, object.RawField(start), object.RawField(end));
414 : // Setting the progress bar can fail if the object that is currently
415 : // scanned is also revisited. In this case, there may be two tasks racing
416 : // on the progress counter. The looser can bail out because the progress
417 : // bar is reset before the tasks race on the object.
418 163331 : if (chunk->TrySetProgressBar(current_progress_bar, end) && (end < size)) {
419 : DCHECK(marking_state()->IsBlack(object));
420 : // The object can be pushed back onto the marking worklist only after
421 : // progress bar was updated.
422 : marking_worklist()->Push(object);
423 : }
424 : }
425 25973 : return end - start;
426 : }
427 :
428 : // Non-batched processing.
429 : FixedArray::BodyDescriptor::IterateBody(map, object, size, this);
430 : return size;
431 : }
432 :
433 : template <FixedArrayVisitationMode fixed_array_mode,
434 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
435 : void MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
436 : VisitDescriptors(DescriptorArray descriptors,
437 : int number_of_own_descriptors) {
438 : // Updating the number of marked descriptor is supported only for black
439 : // descriptor arrays.
440 : DCHECK(marking_state()->IsBlack(descriptors));
441 12570964 : int16_t new_marked = static_cast<int16_t>(number_of_own_descriptors);
442 17386630 : int16_t old_marked = descriptors->UpdateNumberOfMarkedDescriptors(
443 17386630 : mark_compact_epoch_, new_marked);
444 17386635 : if (old_marked < new_marked) {
445 1488555 : VisitPointers(descriptors,
446 10816925 : MaybeObjectSlot(descriptors->GetDescriptorSlot(old_marked)),
447 : MaybeObjectSlot(descriptors->GetDescriptorSlot(new_marked)));
448 : }
449 : }
450 :
451 : void MarkCompactCollector::MarkObject(HeapObject host, HeapObject obj) {
452 165297332 : if (marking_state()->WhiteToGrey(obj)) {
453 : marking_worklist()->Push(obj);
454 3633759 : if (V8_UNLIKELY(FLAG_track_retaining_path)) {
455 0 : heap_->AddRetainer(host, obj);
456 : }
457 : }
458 : }
459 :
460 : void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) {
461 257605897 : if (marking_state()->WhiteToGrey(obj)) {
462 : marking_worklist()->Push(obj);
463 144759637 : if (V8_UNLIKELY(FLAG_track_retaining_path)) {
464 0 : heap_->AddRetainingRoot(root, obj);
465 : }
466 : }
467 : }
468 :
469 : #ifdef ENABLE_MINOR_MC
470 :
471 : void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) {
472 0 : if (Heap::InYoungGeneration(obj) &&
473 : non_atomic_marking_state_.WhiteToGrey(obj)) {
474 0 : worklist_->Push(kMainThread, obj);
475 : }
476 : }
477 :
478 : #endif
479 :
480 : void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject obj) {
481 5 : if (marking_state()->WhiteToGrey(obj)) {
482 5 : marking_worklist()->Push(obj);
483 5 : if (V8_UNLIKELY(FLAG_track_retaining_path)) {
484 0 : heap_->AddRetainingRoot(Root::kWrapperTracing, obj);
485 : }
486 : }
487 : }
488 :
489 : void MarkCompactCollector::RecordSlot(HeapObject object, ObjectSlot slot,
490 : HeapObject target) {
491 437484 : RecordSlot(object, HeapObjectSlot(slot), target);
492 : }
493 :
494 : void MarkCompactCollector::RecordSlot(HeapObject object, HeapObjectSlot slot,
495 : HeapObject target) {
496 0 : Page* target_page = Page::FromHeapObject(target);
497 0 : Page* source_page = Page::FromHeapObject(object);
498 6054848100 : if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
499 537791 : !source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
500 17681202 : RememberedSet<OLD_TO_OLD>::Insert(source_page, slot.address());
501 : }
502 : }
503 :
504 13556 : void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
505 306503 : weak_objects_.transition_arrays.Push(kMainThread, array);
506 13556 : }
507 :
508 5288 : void MarkCompactCollector::AddBytecodeFlushingCandidate(
509 : SharedFunctionInfo flush_candidate) {
510 100461 : weak_objects_.bytecode_flushing_candidates.Push(kMainThread, flush_candidate);
511 5288 : }
512 :
513 : void MarkCompactCollector::AddFlushedJSFunction(JSFunction flushed_function) {
514 1091 : weak_objects_.flushed_js_functions.Push(kMainThread, flushed_function);
515 : }
516 :
517 : template <LiveObjectIterationMode mode>
518 1099624 : LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap,
519 : Address start)
520 : : chunk_(chunk),
521 : one_word_filler_map_(
522 : ReadOnlyRoots(chunk->heap()).one_pointer_filler_map()),
523 : two_word_filler_map_(
524 : ReadOnlyRoots(chunk->heap()).two_pointer_filler_map()),
525 : free_space_map_(ReadOnlyRoots(chunk->heap()).free_space_map()),
526 4398496 : it_(chunk, bitmap) {
527 : it_.Advance(Bitmap::IndexToCell(
528 : Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(start))));
529 1099624 : if (!it_.Done()) {
530 550822 : cell_base_ = it_.CurrentCellBase();
531 550822 : current_cell_ = *it_.CurrentCell();
532 550822 : AdvanceToNextValidObject();
533 : }
534 1099653 : }
535 :
536 : template <LiveObjectIterationMode mode>
537 : typename LiveObjectRange<mode>::iterator& LiveObjectRange<mode>::iterator::
538 : operator++() {
539 614882530 : AdvanceToNextValidObject();
540 : return *this;
541 : }
542 :
543 : template <LiveObjectIterationMode mode>
544 : typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::iterator::
545 : operator++(int) {
546 : iterator retval = *this;
547 : ++(*this);
548 : return retval;
549 : }
550 :
551 : template <LiveObjectIterationMode mode>
552 620174329 : void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
553 1095131093 : while (!it_.Done()) {
554 3157218 : HeapObject object;
555 : int size = 0;
556 1094473051 : while (current_cell_ != 0) {
557 : uint32_t trailing_zeros = base::bits::CountTrailingZeros(current_cell_);
558 614740660 : Address addr = cell_base_ + trailing_zeros * kTaggedSize;
559 :
560 : // Clear the first bit of the found object..
561 614740660 : current_cell_ &= ~(1u << trailing_zeros);
562 :
563 : uint32_t second_bit_index = 0;
564 614740660 : if (trailing_zeros >= Bitmap::kBitIndexMask) {
565 : second_bit_index = 0x1;
566 : // The overlapping case; there has to exist a cell after the current
567 : // cell.
568 : // However, if there is a black area at the end of the page, and the
569 : // last word is a one word filler, we are not allowed to advance. In
570 : // that case we can return immediately.
571 15198475 : if (!it_.Advance()) {
572 : DCHECK(HeapObject::FromAddress(addr)->map() == one_word_filler_map_);
573 0 : current_object_ = HeapObject();
574 : return;
575 : }
576 15203860 : cell_base_ = it_.CurrentCellBase();
577 15203860 : current_cell_ = *it_.CurrentCell();
578 : } else {
579 599542185 : second_bit_index = 1u << (trailing_zeros + 1);
580 : }
581 :
582 : Map map;
583 614746045 : if (current_cell_ & second_bit_index) {
584 : // We found a black object. If the black object is within a black area,
585 : // make sure that we skip all set bits in the black area until the
586 : // object ends.
587 613148863 : HeapObject black_object = HeapObject::FromAddress(addr);
588 613148863 : map = Map::cast(ObjectSlot(addr).Acquire_Load());
589 612256447 : size = black_object->SizeFromMap(map);
590 609047320 : Address end = addr + size - kTaggedSize;
591 : // One word filler objects do not borrow the second mark bit. We have
592 : // to jump over the advancing and clearing part.
593 : // Note that we know that we are at a one word filler when
594 : // object_start + object_size - kTaggedSize == object_start.
595 609047320 : if (addr != end) {
596 : DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
597 608174763 : uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
598 : unsigned int end_cell_index =
599 608174763 : end_mark_bit_index >> Bitmap::kBitsPerCellLog2;
600 : MarkBit::CellType end_index_mask =
601 608174763 : 1u << Bitmap::IndexInCell(end_mark_bit_index);
602 608174763 : if (it_.Advance(end_cell_index)) {
603 137580312 : cell_base_ = it_.CurrentCellBase();
604 137580312 : current_cell_ = *it_.CurrentCell();
605 : }
606 :
607 : // Clear all bits in current_cell, including the end index.
608 608174763 : current_cell_ &= ~(end_index_mask + end_index_mask - 1);
609 : }
610 :
611 : if (mode == kBlackObjects || mode == kAllLiveObjects) {
612 609047315 : object = black_object;
613 : }
614 : } else if ((mode == kGreyObjects || mode == kAllLiveObjects)) {
615 5 : map = Map::cast(ObjectSlot(addr).Acquire_Load());
616 5 : object = HeapObject::FromAddress(addr);
617 5 : size = object->SizeFromMap(map);
618 : }
619 :
620 : // We found a live object.
621 610651888 : if (!object.is_null()) {
622 : // Do not use IsFiller() here. This may cause a data race for reading
623 : // out the instance type when a new map concurrently is written into
624 : // this object while iterating over the object.
625 608980807 : if (map == one_word_filler_map_ || map == two_word_filler_map_ ||
626 : map == free_space_map_) {
627 : // There are two reasons why we can get black or grey fillers:
628 : // 1) Black areas together with slack tracking may result in black one
629 : // word filler objects.
630 : // 2) Left trimming may leave black or grey fillers behind because we
631 : // do not clear the old location of the object start.
632 : // We filter these objects out in the iterator.
633 5 : object = HeapObject();
634 : } else {
635 : break;
636 : }
637 : }
638 : }
639 :
640 1090363530 : if (current_cell_ == 0) {
641 569675864 : if (it_.Advance()) {
642 569234434 : cell_base_ = it_.CurrentCellBase();
643 569234434 : current_cell_ = *it_.CurrentCell();
644 : }
645 : }
646 1090363530 : if (!object.is_null()) {
647 615533951 : current_object_ = object;
648 615533951 : current_size_ = size;
649 615533951 : return;
650 : }
651 : }
652 551606 : current_object_ = HeapObject();
653 : }
654 :
655 : template <LiveObjectIterationMode mode>
656 : typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::begin() {
657 550782 : return iterator(chunk_, bitmap_, start_);
658 : }
659 :
660 : template <LiveObjectIterationMode mode>
661 : typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::end() {
662 550764 : return iterator(chunk_, bitmap_, end_);
663 : }
664 :
665 : Isolate* MarkCompactCollectorBase::isolate() { return heap()->isolate(); }
666 :
667 : } // namespace internal
668 : } // namespace v8
669 :
670 : #endif // V8_HEAP_MARK_COMPACT_INL_H_
|