Line data Source code
1 : // Copyright 2015 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_HEAP_SCAVENGER_INL_H_
6 : #define V8_HEAP_SCAVENGER_INL_H_
7 :
8 : #include "src/heap/scavenger.h"
9 :
10 : #include "src/heap/incremental-marking-inl.h"
11 : #include "src/heap/local-allocator-inl.h"
12 : #include "src/objects-inl.h"
13 : #include "src/objects/map.h"
14 : #include "src/objects/slots-inl.h"
15 :
16 : namespace v8 {
17 : namespace internal {
18 :
19 45656361 : void Scavenger::PromotionList::View::PushRegularObject(HeapObject object,
20 : int size) {
21 45656361 : promotion_list_->PushRegularObject(task_id_, object, size);
22 45702658 : }
23 :
24 0 : void Scavenger::PromotionList::View::PushLargeObject(HeapObject object, Map map,
25 : int size) {
26 0 : promotion_list_->PushLargeObject(task_id_, object, map, size);
27 0 : }
28 :
29 : bool Scavenger::PromotionList::View::IsEmpty() {
30 : return promotion_list_->IsEmpty();
31 : }
32 :
33 : size_t Scavenger::PromotionList::View::LocalPushSegmentSize() {
34 : return promotion_list_->LocalPushSegmentSize(task_id_);
35 : }
36 :
37 : bool Scavenger::PromotionList::View::Pop(struct PromotionListEntry* entry) {
38 45618578 : return promotion_list_->Pop(task_id_, entry);
39 : }
40 :
41 : bool Scavenger::PromotionList::View::IsGlobalPoolEmpty() {
42 350852 : return promotion_list_->IsGlobalPoolEmpty();
43 : }
44 :
45 : bool Scavenger::PromotionList::View::ShouldEagerlyProcessPromotionList() {
46 : return promotion_list_->ShouldEagerlyProcessPromotionList(task_id_);
47 : }
48 :
49 : void Scavenger::PromotionList::PushRegularObject(int task_id, HeapObject object,
50 : int size) {
51 45656361 : regular_object_promotion_list_.Push(task_id, ObjectAndSize(object, size));
52 : }
53 :
54 : void Scavenger::PromotionList::PushLargeObject(int task_id, HeapObject object,
55 : Map map, int size) {
56 0 : large_object_promotion_list_.Push(task_id, {object, map, size});
57 : }
58 :
59 : bool Scavenger::PromotionList::IsEmpty() {
60 : return regular_object_promotion_list_.IsEmpty() &&
61 : large_object_promotion_list_.IsEmpty();
62 : }
63 :
64 : size_t Scavenger::PromotionList::LocalPushSegmentSize(int task_id) {
65 : return regular_object_promotion_list_.LocalPushSegmentSize(task_id) +
66 63770452 : large_object_promotion_list_.LocalPushSegmentSize(task_id);
67 : }
68 :
69 45629811 : bool Scavenger::PromotionList::Pop(int task_id,
70 : struct PromotionListEntry* entry) {
71 45629811 : ObjectAndSize regular_object;
72 45629811 : if (regular_object_promotion_list_.Pop(task_id, ®ular_object)) {
73 45542579 : entry->heap_object = regular_object.first;
74 45542579 : entry->size = regular_object.second;
75 45542579 : entry->map = entry->heap_object->map();
76 45542579 : return true;
77 : }
78 127210 : return large_object_promotion_list_.Pop(task_id, entry);
79 : }
80 :
81 350850 : bool Scavenger::PromotionList::IsGlobalPoolEmpty() {
82 578232 : return regular_object_promotion_list_.IsGlobalPoolEmpty() &&
83 350852 : large_object_promotion_list_.IsGlobalPoolEmpty();
84 : }
85 :
86 : bool Scavenger::PromotionList::ShouldEagerlyProcessPromotionList(int task_id) {
87 : // Threshold when to prioritize processing of the promotion list. Right
88 : // now we only look into the regular object list.
89 : const int kProcessPromotionListThreshold =
90 : kRegularObjectPromotionListSegmentSize / 2;
91 : return LocalPushSegmentSize(task_id) < kProcessPromotionListThreshold;
92 : }
93 :
94 : // White list for objects that for sure only contain data.
95 52054884 : bool Scavenger::ContainsOnlyData(VisitorId visitor_id) {
96 52054884 : switch (visitor_id) {
97 : case kVisitSeqOneByteString:
98 : return true;
99 : case kVisitSeqTwoByteString:
100 : return true;
101 : case kVisitByteArray:
102 : return true;
103 : case kVisitFixedDoubleArray:
104 : return true;
105 : case kVisitDataObject:
106 : return true;
107 : default:
108 : break;
109 : }
110 45649354 : return false;
111 : }
112 :
113 : void Scavenger::PageMemoryFence(MaybeObject object) {
114 : #ifdef THREAD_SANITIZER
115 : // Perform a dummy acquire load to tell TSAN that there is no data race
116 : // with page initialization.
117 : HeapObject heap_object;
118 : if (object->GetHeapObject(&heap_object)) {
119 : MemoryChunk* chunk = MemoryChunk::FromAddress(heap_object->address());
120 : CHECK_NOT_NULL(chunk->synchronized_heap());
121 : }
122 : #endif
123 : }
124 :
125 : bool Scavenger::MigrateObject(Map map, HeapObject source, HeapObject target,
126 116746721 : int size) {
127 : // Copy the content of source to target.
128 116054532 : target->set_map_word(MapWord::FromMap(map));
129 116129244 : heap()->CopyBlock(target->address() + kTaggedSize,
130 232328026 : source->address() + kTaggedSize, size - kTaggedSize);
131 :
132 : Object old = source->map_slot().Release_CompareAndSwap(
133 116343647 : map, MapWord::FromForwardingAddress(target).ToMap());
134 117335775 : if (old != map) {
135 : // Other task migrated the object.
136 : return false;
137 : }
138 :
139 116695577 : if (V8_UNLIKELY(is_logging_)) {
140 45144 : heap()->OnMoveEvent(target, source, size);
141 : }
142 :
143 116702666 : if (is_incremental_marking_) {
144 : heap()->incremental_marking()->TransferColor(source, target);
145 : }
146 233403154 : heap()->UpdateAllocationSite(map, source, &local_pretenuring_feedback_);
147 : return true;
148 : }
149 :
150 : template <typename THeapObjectSlot>
151 : CopyAndForwardResult Scavenger::SemiSpaceCopyObject(Map map,
152 : THeapObjectSlot slot,
153 : HeapObject object,
154 : int object_size) {
155 : static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
156 : std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
157 : "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
158 : DCHECK(heap()->AllowedToBeMigrated(object, NEW_SPACE));
159 64751355 : AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
160 : AllocationResult allocation =
161 64831952 : allocator_.Allocate(NEW_SPACE, object_size, alignment);
162 :
163 64806492 : HeapObject target;
164 64806492 : if (allocation.To(&target)) {
165 : DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
166 : target));
167 63787530 : const bool self_success = MigrateObject(map, object, target, object_size);
168 64370183 : if (!self_success) {
169 156945 : allocator_.FreeLast(NEW_SPACE, target, object_size);
170 156945 : MapWord map_word = object->synchronized_map_word();
171 156945 : HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
172 : DCHECK(!Heap::InFromSpace(*slot));
173 313890 : return Heap::InToSpace(*slot)
174 : ? CopyAndForwardResult::SUCCESS_YOUNG_GENERATION
175 156945 : : CopyAndForwardResult::SUCCESS_OLD_GENERATION;
176 : }
177 : HeapObjectReference::Update(slot, target);
178 :
179 63922874 : copied_list_.Push(ObjectAndSize(target, object_size));
180 63943762 : copied_size_ += object_size;
181 : return CopyAndForwardResult::SUCCESS_YOUNG_GENERATION;
182 : }
183 : return CopyAndForwardResult::FAILURE;
184 : }
185 :
186 : template <typename THeapObjectSlot>
187 : CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot,
188 : HeapObject object,
189 : int object_size) {
190 : static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
191 : std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
192 : "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
193 52299728 : AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
194 : AllocationResult allocation =
195 52397108 : allocator_.Allocate(OLD_SPACE, object_size, alignment);
196 :
197 52313161 : HeapObject target;
198 52313161 : if (allocation.To(&target)) {
199 : DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
200 : target));
201 : const bool self_success = MigrateObject(map, object, target, object_size);
202 52676914 : if (!self_success) {
203 321773 : allocator_.FreeLast(OLD_SPACE, target, object_size);
204 320202 : MapWord map_word = object->synchronized_map_word();
205 318197 : HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
206 : DCHECK(!Heap::InFromSpace(*slot));
207 626045 : return Heap::InToSpace(*slot)
208 : ? CopyAndForwardResult::SUCCESS_YOUNG_GENERATION
209 313648 : : CopyAndForwardResult::SUCCESS_OLD_GENERATION;
210 : }
211 : HeapObjectReference::Update(slot, target);
212 52098592 : if (!ContainsOnlyData(map->visitor_id())) {
213 45645789 : promotion_list_.PushRegularObject(target, object_size);
214 : }
215 52107508 : promoted_size_ += object_size;
216 : return CopyAndForwardResult::SUCCESS_OLD_GENERATION;
217 : }
218 : return CopyAndForwardResult::FAILURE;
219 : }
220 :
221 : SlotCallbackResult Scavenger::RememberedSetEntryNeeded(
222 : CopyAndForwardResult result) {
223 : DCHECK_NE(CopyAndForwardResult::FAILURE, result);
224 : return result == CopyAndForwardResult::SUCCESS_YOUNG_GENERATION ? KEEP_SLOT
225 116429163 : : REMOVE_SLOT;
226 : }
227 :
228 : bool Scavenger::HandleLargeObject(Map map, HeapObject object, int object_size) {
229 : // TODO(hpayer): Make this check size based, i.e.
230 : // object_size > kMaxRegularHeapObjectSize
231 116503487 : if (V8_UNLIKELY(
232 : FLAG_young_generation_large_objects &&
233 : MemoryChunk::FromHeapObject(object)->IsInNewLargeObjectSpace())) {
234 : DCHECK_EQ(NEW_LO_SPACE,
235 : MemoryChunk::FromHeapObject(object)->owner()->identity());
236 0 : if (object->map_slot().Release_CompareAndSwap(
237 0 : map, MapWord::FromForwardingAddress(object).ToMap()) == map) {
238 0 : surviving_new_large_objects_.insert({object, map});
239 :
240 0 : if (!ContainsOnlyData(map->visitor_id())) {
241 0 : promotion_list_.PushLargeObject(object, map, object_size);
242 : }
243 : }
244 : return true;
245 : }
246 : return false;
247 : }
248 :
249 : template <typename THeapObjectSlot>
250 : SlotCallbackResult Scavenger::EvacuateObjectDefault(Map map,
251 : THeapObjectSlot slot,
252 : HeapObject object,
253 116405256 : int object_size) {
254 : static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
255 : std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
256 : "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
257 : SLOW_DCHECK(object->SizeFromMap(map) == object_size);
258 : CopyAndForwardResult result;
259 :
260 116217691 : if (HandleLargeObject(map, object, object_size)) {
261 : return REMOVE_SLOT;
262 : }
263 :
264 : SLOW_DCHECK(static_cast<size_t>(object_size) <=
265 : MemoryChunkLayout::AllocatableMemoryInDataPage());
266 :
267 232626630 : if (!heap()->ShouldBePromoted(object->address())) {
268 : // A semi-space copy may fail due to fragmentation. In that case, we
269 : // try to promote the object.
270 : result = SemiSpaceCopyObject(map, slot, object, object_size);
271 64864920 : if (result != CopyAndForwardResult::FAILURE) {
272 : return RememberedSetEntryNeeded(result);
273 : }
274 : }
275 :
276 : // We may want to promote this object if the object was already semi-space
277 : // copied in a previes young generation GC or if the semi-space copy above
278 : // failed.
279 : result = PromoteObject(map, slot, object, object_size);
280 52362989 : if (result != CopyAndForwardResult::FAILURE) {
281 : return RememberedSetEntryNeeded(result);
282 : }
283 :
284 : // If promotion failed, we try to copy the object to the other semi-space.
285 : result = SemiSpaceCopyObject(map, slot, object, object_size);
286 0 : if (result != CopyAndForwardResult::FAILURE) {
287 : return RememberedSetEntryNeeded(result);
288 : }
289 :
290 0 : heap()->FatalProcessOutOfMemory("Scavenger: semi-space copy");
291 0 : UNREACHABLE();
292 : }
293 :
294 : template <typename THeapObjectSlot>
295 632524 : SlotCallbackResult Scavenger::EvacuateThinString(Map map, THeapObjectSlot slot,
296 : ThinString object,
297 : int object_size) {
298 : static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
299 : std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
300 : "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
301 632524 : if (!is_incremental_marking_) {
302 : // The ThinString should die after Scavenge, so avoid writing the proper
303 : // forwarding pointer and instead just signal the actual object as forwarded
304 : // reference.
305 604089 : String actual = object->actual();
306 : // ThinStrings always refer to internalized strings, which are always in old
307 : // space.
308 : DCHECK(!Heap::InNewSpace(actual));
309 : slot.StoreHeapObject(actual);
310 : return REMOVE_SLOT;
311 : }
312 :
313 28439 : return EvacuateObjectDefault(map, slot, object, object_size);
314 : }
315 :
316 : template <typename THeapObjectSlot>
317 55723127 : SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map map,
318 : THeapObjectSlot slot,
319 : ConsString object,
320 52268366 : int object_size) {
321 : static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
322 : std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
323 : "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
324 : DCHECK(IsShortcutCandidate(map->instance_type()));
325 163708076 : if (!is_incremental_marking_ &&
326 107984877 : object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) {
327 1043755 : HeapObject first = HeapObject::cast(object->unchecked_first());
328 :
329 : slot.StoreHeapObject(first);
330 :
331 521872 : if (!Heap::InNewSpace(first)) {
332 280 : object->map_slot().Release_Store(
333 559 : MapWord::FromForwardingAddress(first).ToMap());
334 279 : return REMOVE_SLOT;
335 : }
336 :
337 521570 : MapWord first_word = first->synchronized_map_word();
338 521563 : if (first_word.IsForwardingAddress()) {
339 7398 : HeapObject target = first_word.ToForwardingAddress();
340 :
341 : slot.StoreHeapObject(target);
342 7396 : object->map_slot().Release_Store(
343 14792 : MapWord::FromForwardingAddress(target).ToMap());
344 7396 : return Heap::InToSpace(target) ? KEEP_SLOT : REMOVE_SLOT;
345 : }
346 : Map map = first_word.ToMap();
347 : SlotCallbackResult result =
348 514168 : EvacuateObjectDefault(map, slot, first, first->SizeFromMap(map));
349 514117 : object->map_slot().Release_Store(
350 514176 : MapWord::FromForwardingAddress(slot.ToHeapObject()).ToMap());
351 514084 : return result;
352 : }
353 :
354 55199451 : return EvacuateObjectDefault(map, slot, object, object_size);
355 : }
356 :
357 : template <typename THeapObjectSlot>
358 : SlotCallbackResult Scavenger::EvacuateObject(THeapObjectSlot slot, Map map,
359 : HeapObject source) {
360 : static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
361 : std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
362 : "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
363 : SLOW_DCHECK(Heap::InFromSpace(source));
364 : SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
365 117033154 : int size = source->SizeFromMap(map);
366 : // Cannot use ::cast() below because that would add checks in debug mode
367 : // that require re-reading the map.
368 117082365 : switch (map->visitor_id()) {
369 : case kVisitThinString:
370 : // At the moment we don't allow weak pointers to thin strings.
371 : DCHECK(!(*slot)->IsWeak());
372 : return EvacuateThinString(map, slot, ThinString::unchecked_cast(source),
373 632522 : size);
374 : case kVisitShortcutCandidate:
375 : DCHECK(!(*slot)->IsWeak());
376 : // At the moment we don't allow weak pointers to cons strings.
377 : return EvacuateShortcutCandidate(
378 55722784 : map, slot, ConsString::unchecked_cast(source), size);
379 : default:
380 : return EvacuateObjectDefault(map, slot, source, size);
381 : }
382 : }
383 :
384 : template <typename THeapObjectSlot>
385 154488973 : SlotCallbackResult Scavenger::ScavengeObject(THeapObjectSlot p,
386 : HeapObject object) {
387 : static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
388 : std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
389 : "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
390 : DCHECK(Heap::InFromSpace(object));
391 :
392 : // Synchronized load that consumes the publishing CAS of MigrateObject.
393 : MapWord first_word = object->synchronized_map_word();
394 :
395 : // If the first word is a forwarding address, the object has already been
396 : // copied.
397 154488973 : if (first_word.IsForwardingAddress()) {
398 : HeapObject dest = first_word.ToForwardingAddress();
399 : DCHECK(Heap::InFromSpace(*p));
400 37455819 : if ((*p)->IsWeak()) {
401 : p.store(HeapObjectReference::Weak(dest));
402 : } else {
403 : DCHECK((*p)->IsStrong());
404 : p.store(HeapObjectReference::Strong(dest));
405 : }
406 : DCHECK_IMPLIES(Heap::InNewSpace(dest),
407 : (Heap::InToSpace(dest) ||
408 : MemoryChunk::FromHeapObject(dest)->owner()->identity() ==
409 : NEW_LO_SPACE));
410 :
411 37455819 : return Heap::InToSpace(dest) ? KEEP_SLOT : REMOVE_SLOT;
412 : }
413 :
414 : Map map = first_word.ToMap();
415 : // AllocationMementos are unrooted and shouldn't survive a scavenge
416 : DCHECK_NE(ReadOnlyRoots(heap()).allocation_memento_map(), map);
417 : // Call the slow part of scavenge object.
418 116892760 : return EvacuateObject(p, map, object);
419 : }
420 :
421 : template <typename TSlot>
422 40708986 : SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap, TSlot slot) {
423 : static_assert(
424 : std::is_same<TSlot, FullMaybeObjectSlot>::value ||
425 : std::is_same<TSlot, MaybeObjectSlot>::value,
426 : "Only FullMaybeObjectSlot and MaybeObjectSlot are expected here");
427 : using THeapObjectSlot = typename TSlot::THeapObjectSlot;
428 40708986 : MaybeObject object = *slot;
429 40708986 : if (Heap::InFromSpace(object)) {
430 40384836 : HeapObject heap_object = object->GetHeapObject();
431 :
432 : SlotCallbackResult result =
433 40397693 : ScavengeObject(THeapObjectSlot(slot), heap_object);
434 : DCHECK_IMPLIES(result == REMOVE_SLOT,
435 : !heap->IsInYoungGeneration((*slot)->GetHeapObject()));
436 : return result;
437 204555 : } else if (Heap::InToSpace(object)) {
438 : // Already updated slot. This can happen when processing of the work list
439 : // is interleaved with processing roots.
440 : return KEEP_SLOT;
441 : }
442 : // Slots can point to "to" space if the slot has been recorded multiple
443 : // times in the remembered set. We remove the redundant slot now.
444 : return REMOVE_SLOT;
445 : }
446 :
447 90710 : void ScavengeVisitor::VisitPointers(HeapObject host, ObjectSlot start,
448 : ObjectSlot end) {
449 90710 : return VisitPointersImpl(host, start, end);
450 : }
451 :
452 15 : void ScavengeVisitor::VisitPointers(HeapObject host, MaybeObjectSlot start,
453 : MaybeObjectSlot end) {
454 15 : return VisitPointersImpl(host, start, end);
455 : }
456 :
457 0 : void ScavengeVisitor::VisitCodeTarget(Code host, RelocInfo* rinfo) {
458 0 : Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
459 : #ifdef DEBUG
460 : Code old_target = target;
461 : #endif
462 0 : FullObjectSlot slot(&target);
463 : VisitHeapObjectImpl(slot, target);
464 : // Code objects are never in new-space, so the slot contents must not change.
465 : DCHECK_EQ(old_target, target);
466 0 : }
467 :
468 0 : void ScavengeVisitor::VisitEmbeddedPointer(Code host, RelocInfo* rinfo) {
469 0 : HeapObject heap_object = rinfo->target_object();
470 : #ifdef DEBUG
471 : HeapObject old_heap_object = heap_object;
472 : #endif
473 0 : FullObjectSlot slot(&heap_object);
474 : VisitHeapObjectImpl(slot, heap_object);
475 : // We don't embed new-space objects into code, so the slot contents must not
476 : // change.
477 : DCHECK_EQ(old_heap_object, heap_object);
478 0 : }
479 :
480 : template <typename TSlot>
481 : void ScavengeVisitor::VisitHeapObjectImpl(TSlot slot, HeapObject heap_object) {
482 356125780 : if (Heap::InNewSpace(heap_object)) {
483 58272436 : scavenger_->ScavengeObject(HeapObjectSlot(slot), heap_object);
484 : }
485 : }
486 :
487 : template <typename TSlot>
488 : void ScavengeVisitor::VisitPointersImpl(HeapObject host, TSlot start,
489 : TSlot end) {
490 551343546 : for (TSlot slot = start; slot < end; ++slot) {
491 496941094 : typename TSlot::TObject object = *slot;
492 496432997 : HeapObject heap_object;
493 : // Treat weak references as strong.
494 496432997 : if (object.GetHeapObject(&heap_object)) {
495 : VisitHeapObjectImpl(slot, heap_object);
496 : }
497 : }
498 : }
499 :
500 : } // namespace internal
501 : } // namespace v8
502 :
503 : #endif // V8_HEAP_SCAVENGER_INL_H_
|