Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_HEAP_MARK_COMPACT_INL_H_
6 : #define V8_HEAP_MARK_COMPACT_INL_H_
7 :
8 : #include "src/heap/mark-compact.h"
9 :
10 : #include "src/assembler-inl.h"
11 : #include "src/base/bits.h"
12 : #include "src/heap/heap-inl.h"
13 : #include "src/heap/incremental-marking.h"
14 : #include "src/heap/objects-visiting-inl.h"
15 : #include "src/heap/remembered-set.h"
16 : #include "src/objects/js-collection-inl.h"
17 : #include "src/objects/js-weak-refs-inl.h"
18 : #include "src/objects/slots-inl.h"
19 : #include "src/transitions.h"
20 :
21 : namespace v8 {
22 : namespace internal {
23 :
24 : template <typename ConcreteState, AccessMode access_mode>
25 : bool MarkingStateBase<ConcreteState, access_mode>::GreyToBlack(HeapObject obj) {
26 724751778 : MemoryChunk* p = MemoryChunk::FromHeapObject(obj);
27 725042832 : MarkBit markbit = MarkBitFrom(p, obj->address());
28 732708563 : if (!Marking::GreyToBlack<access_mode>(markbit)) return false;
29 703770963 : static_cast<ConcreteState*>(this)->IncrementLiveBytes(p, obj->Size());
30 : return true;
31 : }
32 :
33 : template <typename ConcreteState, AccessMode access_mode>
34 : bool MarkingStateBase<ConcreteState, access_mode>::WhiteToGrey(HeapObject obj) {
35 : return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj));
36 : }
37 :
38 : template <typename ConcreteState, AccessMode access_mode>
39 : bool MarkingStateBase<ConcreteState, access_mode>::WhiteToBlack(
40 : HeapObject obj) {
41 5953134 : return WhiteToGrey(obj) && GreyToBlack(obj);
42 : }
43 :
44 : template <FixedArrayVisitationMode fixed_array_mode,
45 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
46 : MarkingVisitor<fixed_array_mode, retaining_path_mode,
47 76340965 : MarkingState>::MarkingVisitor(MarkCompactCollector* collector,
48 : MarkingState* marking_state)
49 76340966 : : heap_(collector->heap()),
50 : collector_(collector),
51 : marking_state_(marking_state),
52 229022896 : mark_compact_epoch_(collector->epoch()) {}
53 :
54 : template <FixedArrayVisitationMode fixed_array_mode,
55 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
56 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
57 : MarkingState>::VisitBytecodeArray(Map map,
58 : BytecodeArray array) {
59 500208 : int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
60 500208 : BytecodeArray::BodyDescriptor::IterateBody(map, array, size, this);
61 :
62 500208 : if (!heap_->is_current_gc_forced()) {
63 368032 : array->MakeOlder();
64 : }
65 : return size;
66 : }
67 :
68 : template <FixedArrayVisitationMode fixed_array_mode,
69 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
70 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
71 : MarkingState>::VisitDescriptorArray(Map map,
72 : DescriptorArray array) {
73 4599075 : int size = DescriptorArray::BodyDescriptor::SizeOf(map, array);
74 : VisitPointers(array, array->GetFirstPointerSlot(),
75 4599075 : array->GetDescriptorSlot(0));
76 4599075 : VisitDescriptors(array, array->number_of_descriptors());
77 : return size;
78 : }
79 :
80 : template <FixedArrayVisitationMode fixed_array_mode,
81 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
82 : int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
83 : VisitSharedFunctionInfo(Map map, SharedFunctionInfo shared_info) {
84 19013041 : int size = SharedFunctionInfo::BodyDescriptor::SizeOf(map, shared_info);
85 19013041 : SharedFunctionInfo::BodyDescriptor::IterateBody(map, shared_info, size, this);
86 :
87 : // If the SharedFunctionInfo has old bytecode, mark it as flushable,
88 : // otherwise visit the function data field strongly.
89 19013041 : if (shared_info->ShouldFlushBytecode()) {
90 94682 : collector_->AddBytecodeFlushingCandidate(shared_info);
91 : } else {
92 : VisitPointer(shared_info,
93 : HeapObject::RawField(shared_info,
94 18918359 : SharedFunctionInfo::kFunctionDataOffset));
95 : }
96 : return size;
97 : }
98 :
99 : template <FixedArrayVisitationMode fixed_array_mode,
100 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
101 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
102 : MarkingState>::VisitJSFunction(Map map, JSFunction object) {
103 : int size = Parent::VisitJSFunction(map, object);
104 :
105 : // Check if the JSFunction needs reset due to bytecode being flushed.
106 21511789 : if (FLAG_flush_bytecode && object->NeedsResetDueToFlushedBytecode()) {
107 1024 : collector_->AddFlushedJSFunction(object);
108 : }
109 :
110 : return size;
111 : }
112 : template <FixedArrayVisitationMode fixed_array_mode,
113 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
114 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
115 : MarkingState>::VisitFixedArray(Map map, FixedArray object) {
116 : return (fixed_array_mode == FixedArrayVisitationMode::kRegular)
117 : ? Parent::VisitFixedArray(map, object)
118 : : VisitFixedArrayIncremental(map, object);
119 : }
120 :
121 : template <FixedArrayVisitationMode fixed_array_mode,
122 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
123 : template <typename T>
124 : V8_INLINE int
125 : MarkingVisitor<fixed_array_mode, retaining_path_mode,
126 : MarkingState>::VisitEmbedderTracingSubclass(Map map, T object) {
127 221360 : if (heap_->local_embedder_heap_tracer()->InUse()) {
128 35 : marking_worklist()->embedder()->Push(MarkCompactCollectorBase::kMainThread,
129 : object);
130 : }
131 221360 : int size = T::BodyDescriptor::SizeOf(map, object);
132 221360 : T::BodyDescriptor::IterateBody(map, object, size, this);
133 : return size;
134 : }
135 :
136 : template <FixedArrayVisitationMode fixed_array_mode,
137 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
138 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
139 : MarkingState>::VisitJSApiObject(Map map, JSObject object) {
140 : return VisitEmbedderTracingSubclass(map, object);
141 : }
142 :
143 : template <FixedArrayVisitationMode fixed_array_mode,
144 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
145 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
146 : MarkingState>::VisitJSArrayBuffer(Map map,
147 : JSArrayBuffer object) {
148 : return VisitEmbedderTracingSubclass(map, object);
149 : }
150 :
151 : template <FixedArrayVisitationMode fixed_array_mode,
152 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
153 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
154 : MarkingState>::VisitJSDataView(Map map, JSDataView object) {
155 : return VisitEmbedderTracingSubclass(map, object);
156 : }
157 :
158 : template <FixedArrayVisitationMode fixed_array_mode,
159 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
160 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
161 : MarkingState>::VisitJSTypedArray(Map map,
162 : JSTypedArray object) {
163 : return VisitEmbedderTracingSubclass(map, object);
164 : }
165 :
166 : template <FixedArrayVisitationMode fixed_array_mode,
167 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
168 : int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
169 4763 : VisitEphemeronHashTable(Map map, EphemeronHashTable table) {
170 16625 : collector_->AddEphemeronHashTable(table);
171 :
172 73660 : for (int i = 0; i < table->Capacity(); i++) {
173 : ObjectSlot key_slot =
174 73660 : table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
175 73660 : HeapObject key = HeapObject::cast(table->KeyAt(i));
176 : collector_->RecordSlot(table, key_slot, key);
177 :
178 : ObjectSlot value_slot =
179 73660 : table->RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
180 :
181 78324 : if (marking_state()->IsBlackOrGrey(key)) {
182 : VisitPointer(table, value_slot);
183 :
184 : } else {
185 422 : Object value_obj = *value_slot;
186 :
187 844 : if (value_obj->IsHeapObject()) {
188 : HeapObject value = HeapObject::cast(value_obj);
189 : collector_->RecordSlot(table, value_slot, value);
190 :
191 : // Revisit ephemerons with both key and value unreachable at end
192 : // of concurrent marking cycle.
193 217 : if (marking_state()->IsWhite(value)) {
194 87 : collector_->AddEphemeron(key, value);
195 : }
196 : }
197 : }
198 : }
199 :
200 16625 : return table->SizeFromMap(map);
201 : }
202 :
203 : template <FixedArrayVisitationMode fixed_array_mode,
204 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
205 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
206 : MarkingState>::VisitMap(Map meta_map, Map map) {
207 1503559 : int size = Map::BodyDescriptor::SizeOf(meta_map, map);
208 10808952 : if (map->CanTransition()) {
209 : // Maps that can transition share their descriptor arrays and require
210 : // special visiting logic to avoid memory leaks.
211 : // Since descriptor arrays are potentially shared, ensure that only the
212 : // descriptors that belong to this map are marked. The first time a
213 : // non-empty descriptor array is marked, its header is also visited. The
214 : // slot holding the descriptor array will be implicitly recorded when the
215 : // pointer fields of this map are visited.
216 10730708 : DescriptorArray descriptors = map->instance_descriptors();
217 : MarkDescriptorArrayBlack(map, descriptors);
218 10730708 : int number_of_own_descriptors = map->NumberOfOwnDescriptors();
219 10730708 : if (number_of_own_descriptors) {
220 : DCHECK_LE(number_of_own_descriptors,
221 : descriptors->number_of_descriptors());
222 : VisitDescriptors(descriptors, number_of_own_descriptors);
223 : }
224 : // Mark the pointer fields of the Map. Since the transitions array has
225 : // been marked already, it is fine that one of these fields contains a
226 : // pointer to it.
227 : }
228 10808952 : Map::BodyDescriptor::IterateBody(meta_map, map, size, this);
229 : return size;
230 : }
231 :
232 : template <FixedArrayVisitationMode fixed_array_mode,
233 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
234 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
235 : MarkingState>::VisitTransitionArray(Map map,
236 : TransitionArray array) {
237 252161 : int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
238 252161 : TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
239 252161 : collector_->AddTransitionArray(array);
240 : return size;
241 : }
242 :
243 : template <FixedArrayVisitationMode fixed_array_mode,
244 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
245 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
246 : MarkingState>::VisitJSWeakRef(Map map, JSWeakRef weak_ref) {
247 314 : if (weak_ref->target()->IsHeapObject()) {
248 314 : HeapObject target = HeapObject::cast(weak_ref->target());
249 294 : if (marking_state()->IsBlackOrGrey(target)) {
250 : // Record the slot inside the JSWeakRef, since the IterateBody below
251 : // won't visit it.
252 : ObjectSlot slot =
253 61 : HeapObject::RawField(weak_ref, JSWeakRef::kTargetOffset);
254 : collector_->RecordSlot(weak_ref, slot, target);
255 : } else {
256 : // JSWeakRef points to a potentially dead object. We have to process
257 : // them when we know the liveness of the whole transitive closure.
258 96 : collector_->AddWeakRef(weak_ref);
259 : }
260 : }
261 157 : int size = JSWeakRef::BodyDescriptor::SizeOf(map, weak_ref);
262 157 : JSWeakRef::BodyDescriptor::IterateBody(map, weak_ref, size, this);
263 : return size;
264 : }
265 :
266 : template <FixedArrayVisitationMode fixed_array_mode,
267 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
268 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
269 : MarkingState>::VisitWeakCell(Map map, WeakCell weak_cell) {
270 684 : if (weak_cell->target()->IsHeapObject()) {
271 684 : HeapObject target = HeapObject::cast(weak_cell->target());
272 684 : if (marking_state()->IsBlackOrGrey(target)) {
273 : // Record the slot inside the WeakCell, since the IterateBody below
274 : // won't visit it.
275 : ObjectSlot slot =
276 74 : HeapObject::RawField(weak_cell, WeakCell::kTargetOffset);
277 : collector_->RecordSlot(weak_cell, slot, target);
278 : } else {
279 : // WeakCell points to a potentially dead object. We have to process
280 : // them when we know the liveness of the whole transitive closure.
281 268 : collector_->AddWeakCell(weak_cell);
282 : }
283 : }
284 342 : int size = WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
285 342 : WeakCell::BodyDescriptor::IterateBody(map, weak_cell, size, this);
286 : return size;
287 : }
288 :
289 : // class template arguments
290 : template <FixedArrayVisitationMode fixed_array_mode,
291 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
292 : // method template arguments
293 : template <typename TSlot>
294 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
295 : MarkingState>::VisitPointerImpl(HeapObject host,
296 25651929 : TSlot slot) {
297 : static_assert(std::is_same<TSlot, ObjectSlot>::value ||
298 : std::is_same<TSlot, MaybeObjectSlot>::value,
299 : "Only ObjectSlot and MaybeObjectSlot are expected here");
300 3442124419 : typename TSlot::TObject object = *slot;
301 3442124424 : HeapObject target_object;
302 3442124424 : if (object.GetHeapObjectIfStrong(&target_object)) {
303 6263176228 : collector_->RecordSlot(host, HeapObjectSlot(slot), target_object);
304 : MarkObject(host, target_object);
305 157938977 : } else if (TSlot::kCanBeWeak && object.GetHeapObjectIfWeak(&target_object)) {
306 59925629 : if (marking_state()->IsBlackOrGrey(target_object)) {
307 : // Weak references with live values are directly processed here to reduce
308 : // the processing time of weak cells during the main GC pause.
309 48407350 : collector_->RecordSlot(host, HeapObjectSlot(slot), target_object);
310 : } else {
311 : // If we do not know about liveness of values of weak cells, we have to
312 : // process them when we know the liveness of the whole transitive
313 : // closure.
314 10070025 : collector_->AddWeakReference(host, HeapObjectSlot(slot));
315 : }
316 : }
317 : }
318 :
319 : // class template arguments
320 : template <FixedArrayVisitationMode fixed_array_mode,
321 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
322 : // method template arguments
323 : template <typename TSlot>
324 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
325 : MarkingState>::VisitPointersImpl(HeapObject host,
326 : TSlot start, TSlot end) {
327 3604872749 : for (TSlot p = start; p < end; ++p) {
328 : VisitPointer(host, p);
329 : }
330 : }
331 :
332 : template <FixedArrayVisitationMode fixed_array_mode,
333 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
334 2555186 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
335 : MarkingState>::VisitEmbeddedPointer(Code host,
336 5054932 : RelocInfo* rinfo) {
337 : DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
338 : HeapObject object = HeapObject::cast(rinfo->target_object());
339 2555186 : collector_->RecordRelocSlot(host, rinfo, object);
340 5054932 : if (!marking_state()->IsBlackOrGrey(object)) {
341 186923 : if (host->IsWeakObject(object)) {
342 22242 : collector_->AddWeakObjectInCode(object, host);
343 : } else {
344 : MarkObject(host, object);
345 : }
346 : }
347 2555186 : }
348 :
349 : template <FixedArrayVisitationMode fixed_array_mode,
350 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
351 162685 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
352 : MarkingState>::VisitCodeTarget(Code host,
353 162685 : RelocInfo* rinfo) {
354 : DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
355 162685 : Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
356 162685 : collector_->RecordRelocSlot(host, rinfo, target);
357 : MarkObject(host, target);
358 162685 : }
359 :
360 : template <FixedArrayVisitationMode fixed_array_mode,
361 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
362 : void MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
363 11906858 : MarkDescriptorArrayBlack(HeapObject host, DescriptorArray descriptors) {
364 : // Note that WhiteToBlack is not sufficient here because it fails if the
365 : // descriptor array is grey. So we need to do two steps: WhiteToGrey and
366 : // GreyToBlack. Alternatively, we could check WhiteToGrey || WhiteToBlack.
367 11906858 : if (marking_state()->WhiteToGrey(descriptors)) {
368 3728920 : if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
369 3728920 : V8_UNLIKELY(FLAG_track_retaining_path)) {
370 0 : heap_->AddRetainer(host, descriptors);
371 : }
372 : }
373 20146559 : if (marking_state()->GreyToBlack(descriptors)) {
374 : VisitPointers(descriptors, descriptors->GetFirstPointerSlot(),
375 6489086 : descriptors->GetDescriptorSlot(0));
376 : }
377 : DCHECK(marking_state()->IsBlack(descriptors));
378 : }
379 :
380 : template <FixedArrayVisitationMode fixed_array_mode,
381 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
382 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
383 : MarkingState>::MarkObject(HeapObject host,
384 1877843279 : HeapObject object) {
385 4882912420 : if (marking_state()->WhiteToGrey(object)) {
386 126846389 : marking_worklist()->Push(object);
387 106032076 : if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
388 106032076 : V8_UNLIKELY(FLAG_track_retaining_path)) {
389 0 : heap_->AddRetainer(host, object);
390 : }
391 : }
392 : }
393 :
394 : template <FixedArrayVisitationMode fixed_array_mode,
395 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
396 : int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
397 239498 : VisitFixedArrayIncremental(Map map, FixedArray object) {
398 2657495 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
399 2657495 : int size = FixedArray::BodyDescriptor::SizeOf(map, object);
400 2657495 : if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
401 : DCHECK(FLAG_use_marking_progress_bar);
402 : DCHECK(heap_->IsLargeObject(object));
403 : int start =
404 240565 : Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
405 240565 : int end = Min(size, start + kProgressBarScanningChunk);
406 240565 : if (start < end) {
407 : VisitPointers(object, HeapObject::RawField(object, start),
408 240560 : HeapObject::RawField(object, end));
409 240560 : chunk->set_progress_bar(end);
410 240560 : if (end < size) {
411 : DCHECK(marking_state()->IsBlack(object));
412 : // The object can be pushed back onto the marking worklist only after
413 : // progress bar was updated.
414 239498 : marking_worklist()->Push(object);
415 239498 : heap_->incremental_marking()->NotifyIncompleteScanOfObject(
416 239498 : size - (end - start));
417 : }
418 : }
419 : } else {
420 2416930 : FixedArray::BodyDescriptor::IterateBody(map, object, size, this);
421 : }
422 : return size;
423 : }
424 :
425 : template <FixedArrayVisitationMode fixed_array_mode,
426 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
427 : void MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
428 : VisitDescriptors(DescriptorArray descriptors,
429 : int number_of_own_descriptors) {
430 : // Updating the number of marked descriptor is supported only for black
431 : // descriptor arrays.
432 : DCHECK(marking_state()->IsBlack(descriptors));
433 11616760 : int16_t new_marked = static_cast<int16_t>(number_of_own_descriptors);
434 : int16_t old_marked = descriptors->UpdateNumberOfMarkedDescriptors(
435 16215835 : mark_compact_epoch_, new_marked);
436 16215835 : if (old_marked < new_marked) {
437 : VisitPointers(descriptors,
438 10124400 : MaybeObjectSlot(descriptors->GetDescriptorSlot(old_marked)),
439 30373200 : MaybeObjectSlot(descriptors->GetDescriptorSlot(new_marked)));
440 : }
441 : }
442 :
443 : void MarkCompactCollector::MarkObject(HeapObject host, HeapObject obj) {
444 148283372 : if (marking_state()->WhiteToGrey(obj)) {
445 3142502 : marking_worklist()->Push(obj);
446 3142502 : if (V8_UNLIKELY(FLAG_track_retaining_path)) {
447 0 : heap_->AddRetainer(host, obj);
448 : }
449 : }
450 : }
451 :
452 : void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) {
453 258247666 : if (marking_state()->WhiteToGrey(obj)) {
454 146749080 : marking_worklist()->Push(obj);
455 146749080 : if (V8_UNLIKELY(FLAG_track_retaining_path)) {
456 0 : heap_->AddRetainingRoot(root, obj);
457 : }
458 : }
459 : }
460 :
461 : #ifdef ENABLE_MINOR_MC
462 :
463 : void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) {
464 0 : if (Heap::InYoungGeneration(obj) &&
465 : non_atomic_marking_state_.WhiteToGrey(obj)) {
466 0 : worklist_->Push(kMainThread, obj);
467 : }
468 : }
469 :
470 : #endif
471 :
472 : void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject obj) {
473 5 : if (marking_state()->WhiteToGrey(obj)) {
474 5 : marking_worklist()->Push(obj);
475 5 : if (V8_UNLIKELY(FLAG_track_retaining_path)) {
476 0 : heap_->AddRetainingRoot(Root::kWrapperTracing, obj);
477 : }
478 : }
479 : }
480 :
481 : void MarkCompactCollector::RecordSlot(HeapObject object, ObjectSlot slot,
482 : HeapObject target) {
483 1110905252 : RecordSlot(object, HeapObjectSlot(slot), target);
484 : }
485 :
486 : void MarkCompactCollector::RecordSlot(HeapObject object, HeapObjectSlot slot,
487 : HeapObject target) {
488 6432711495 : Page* target_page = Page::FromHeapObject(target);
489 6436057777 : Page* source_page = Page::FromHeapObject(object);
490 6487727541 : if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
491 50005659 : !source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
492 35990760 : RememberedSet<OLD_TO_OLD>::Insert(source_page, slot.address());
493 : }
494 : }
495 :
496 252161 : void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
497 289832 : weak_objects_.transition_arrays.Push(kMainThread, array);
498 252161 : }
499 :
500 94682 : void MarkCompactCollector::AddBytecodeFlushingCandidate(
501 : SharedFunctionInfo flush_candidate) {
502 94682 : weak_objects_.bytecode_flushing_candidates.Push(kMainThread, flush_candidate);
503 94682 : }
504 :
505 1024 : void MarkCompactCollector::AddFlushedJSFunction(JSFunction flushed_function) {
506 1024 : weak_objects_.flushed_js_functions.Push(kMainThread, flushed_function);
507 1024 : }
508 :
509 : template <LiveObjectIterationMode mode>
510 4482014 : LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap,
511 : Address start)
512 : : chunk_(chunk),
513 : one_word_filler_map_(
514 : ReadOnlyRoots(chunk->heap()).one_pointer_filler_map()),
515 : two_word_filler_map_(
516 : ReadOnlyRoots(chunk->heap()).two_pointer_filler_map()),
517 : free_space_map_(ReadOnlyRoots(chunk->heap()).free_space_map()),
518 7282555 : it_(chunk, bitmap) {
519 : it_.Advance(Bitmap::IndexToCell(
520 1120146 : Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(start))));
521 1120146 : if (!it_.Done()) {
522 560466 : cell_base_ = it_.CurrentCellBase();
523 560466 : current_cell_ = *it_.CurrentCell();
524 560466 : AdvanceToNextValidObject();
525 : }
526 1120384 : }
527 :
528 : template <LiveObjectIterationMode mode>
529 : typename LiveObjectRange<mode>::iterator& LiveObjectRange<mode>::iterator::
530 : operator++() {
531 668097286 : AdvanceToNextValidObject();
532 : return *this;
533 : }
534 :
535 : template <LiveObjectIterationMode mode>
536 : typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::iterator::
537 : operator++(int) {
538 : iterator retval = *this;
539 : ++(*this);
540 : return retval;
541 : }
542 :
543 : template <LiveObjectIterationMode mode>
544 666538685 : void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
545 1838742690 : while (!it_.Done()) {
546 6004322 : HeapObject object;
547 : int size = 0;
548 1177651614 : while (current_cell_ != 0) {
549 : uint32_t trailing_zeros = base::bits::CountTrailingZeros(current_cell_);
550 666301361 : Address addr = cell_base_ + trailing_zeros * kTaggedSize;
551 :
552 : // Clear the first bit of the found object..
553 666301361 : current_cell_ &= ~(1u << trailing_zeros);
554 :
555 : uint32_t second_bit_index = 0;
556 666301361 : if (trailing_zeros >= Bitmap::kBitIndexMask) {
557 : second_bit_index = 0x1;
558 : // The overlapping case; there has to exist a cell after the current
559 : // cell.
560 : // However, if there is a black area at the end of the page, and the
561 : // last word is a one word filler, we are not allowed to advance. In
562 : // that case we can return immediately.
563 1978880204 : if (!it_.Advance()) {
564 : DCHECK(HeapObject::FromAddress(addr)->map() == one_word_filler_map_);
565 0 : current_object_ = HeapObject();
566 : return;
567 : }
568 23145733 : cell_base_ = it_.CurrentCellBase();
569 23145733 : current_cell_ = *it_.CurrentCell();
570 : } else {
571 648068467 : second_bit_index = 1u << (trailing_zeros + 1);
572 : }
573 :
574 : Map map;
575 671214200 : if (current_cell_ & second_bit_index) {
576 : // We found a black object. If the black object is within a black area,
577 : // make sure that we skip all set bits in the black area until the
578 : // object ends.
579 669213546 : HeapObject black_object = HeapObject::FromAddress(addr);
580 : map = Map::cast(ObjectSlot(addr).Acquire_Load());
581 667987054 : size = black_object->SizeFromMap(map);
582 665528844 : Address end = addr + size - kTaggedSize;
583 : // One word filler objects do not borrow the second mark bit. We have
584 : // to jump over the advancing and clearing part.
585 : // Note that we know that we are at a one word filler when
586 : // object_start + object_size - kTaggedSize == object_start.
587 665528844 : if (addr != end) {
588 : DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
589 664615339 : uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
590 : unsigned int end_cell_index =
591 664615339 : end_mark_bit_index >> Bitmap::kBitsPerCellLog2;
592 : MarkBit::CellType end_index_mask =
593 664615339 : 1u << Bitmap::IndexInCell(end_mark_bit_index);
594 664615339 : if (it_.Advance(end_cell_index)) {
595 98301170 : cell_base_ = it_.CurrentCellBase();
596 98301170 : current_cell_ = *it_.CurrentCell();
597 : }
598 :
599 : // Clear all bits in current_cell, including the end index.
600 664615339 : current_cell_ &= ~(end_index_mask + end_index_mask - 1);
601 : }
602 :
603 : if (mode == kBlackObjects || mode == kAllLiveObjects) {
604 665528839 : object = black_object;
605 : }
606 : } else if ((mode == kGreyObjects || mode == kAllLiveObjects)) {
607 : map = Map::cast(ObjectSlot(addr).Acquire_Load());
608 5 : object = HeapObject::FromAddress(addr);
609 5 : size = object->SizeFromMap(map);
610 : }
611 :
612 : // We found a live object.
613 667701735 : if (!object.is_null()) {
614 : // Do not use IsFiller() here. This may cause a data race for reading
615 : // out the instance type when a new map concurrently is written into
616 : // this object while iterating over the object.
617 666395906 : if (map == one_word_filler_map_ || map == two_word_filler_map_ ||
618 : map == free_space_map_) {
619 : // There are two reasons why we can get black or grey fillers:
620 : // 1) Black areas together with slack tracking may result in black one
621 : // word filler objects.
622 : // 2) Left trimming may leave black or grey fillers behind because we
623 : // do not clear the old location of the object start.
624 : // We filter these objects out in the iterator.
625 5 : object = HeapObject();
626 : } else {
627 : break;
628 : }
629 : }
630 : }
631 :
632 1173043327 : if (current_cell_ == 0) {
633 569228587 : if (it_.Advance()) {
634 568695232 : cell_base_ = it_.CurrentCellBase();
635 568695232 : current_cell_ = *it_.CurrentCell();
636 : }
637 : }
638 1173043327 : if (!object.is_null()) {
639 667378007 : current_object_ = object;
640 667378007 : current_size_ = size;
641 667378007 : return;
642 : }
643 : }
644 561052 : current_object_ = HeapObject();
645 : }
646 :
647 : template <LiveObjectIterationMode mode>
648 : typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::begin() {
649 560610 : return iterator(chunk_, bitmap_, start_);
650 : }
651 :
652 : template <LiveObjectIterationMode mode>
653 : typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::end() {
654 560769 : return iterator(chunk_, bitmap_, end_);
655 : }
656 :
657 1639531 : Isolate* MarkCompactCollectorBase::isolate() { return heap()->isolate(); }
658 :
659 : } // namespace internal
660 : } // namespace v8
661 :
662 : #endif // V8_HEAP_MARK_COMPACT_INL_H_
|