Line data Source code
1 : // Copyright 2015 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/heap/scavenger.h"
6 :
7 : #include "src/contexts.h"
8 : #include "src/heap/heap-inl.h"
9 : #include "src/heap/incremental-marking.h"
10 : #include "src/heap/objects-visiting-inl.h"
11 : #include "src/heap/scavenger-inl.h"
12 : #include "src/isolate.h"
13 : #include "src/log.h"
14 : #include "src/profiler/heap-profiler.h"
15 :
16 : namespace v8 {
17 : namespace internal {
18 :
19 : enum LoggingAndProfiling {
20 : LOGGING_AND_PROFILING_ENABLED,
21 : LOGGING_AND_PROFILING_DISABLED
22 : };
23 :
24 :
25 : enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS };
26 :
27 : template <MarksHandling marks_handling,
28 : LoggingAndProfiling logging_and_profiling_mode>
29 : class ScavengingVisitor : public StaticVisitorBase {
30 : public:
31 232072 : static void Initialize() {
32 : table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
33 : table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
34 : table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
35 : table_.Register(kVisitThinString, &EvacuateThinString);
36 : table_.Register(kVisitByteArray, &EvacuateByteArray);
37 : table_.Register(kVisitFixedArray, &EvacuateFixedArray);
38 : table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
39 : table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
40 : table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
41 : table_.Register(kVisitJSArrayBuffer,
42 : &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
43 :
44 : table_.Register(
45 : kVisitNativeContext,
46 : &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
47 : Context::kSize>);
48 :
49 : table_.Register(
50 : kVisitConsString,
51 : &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
52 : ConsString::kSize>);
53 :
54 : table_.Register(
55 : kVisitSlicedString,
56 : &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
57 : SlicedString::kSize>);
58 :
59 : table_.Register(
60 : kVisitSymbol,
61 : &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
62 : Symbol::kSize>);
63 :
64 : table_.Register(
65 : kVisitSharedFunctionInfo,
66 : &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
67 : SharedFunctionInfo::kSize>);
68 :
69 : table_.Register(kVisitJSWeakCollection,
70 : &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
71 :
72 : table_.Register(kVisitJSRegExp,
73 : &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
74 :
75 : table_.Register(kVisitJSFunction, &EvacuateJSFunction);
76 :
77 : table_.Register(kVisitDataObject,
78 : &ObjectEvacuationStrategy<DATA_OBJECT>::Visit);
79 :
80 : table_.Register(kVisitJSObjectFast,
81 : &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
82 : table_.Register(kVisitJSObject,
83 : &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
84 :
85 : table_.Register(kVisitJSApiObject,
86 : &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
87 :
88 : table_.Register(kVisitStruct,
89 : &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
90 232072 : }
91 :
92 : static VisitorDispatchTable<ScavengingCallback>* GetTable() {
93 : return &table_;
94 : }
95 :
96 199 : static void EvacuateThinStringNoShortcut(Map* map, HeapObject** slot,
97 : HeapObject* object) {
98 199 : EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
99 : ThinString::kSize);
100 199 : }
101 :
102 : private:
103 : enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
104 :
105 5844 : static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
106 : bool should_record = false;
107 : #ifdef DEBUG
108 : should_record = FLAG_heap_stats;
109 : #endif
110 5844 : should_record = should_record || FLAG_log_gc;
111 5844 : if (should_record) {
112 0 : if (heap->new_space()->Contains(obj)) {
113 0 : heap->new_space()->RecordAllocation(obj);
114 : } else {
115 0 : heap->new_space()->RecordPromotion(obj);
116 : }
117 : }
118 5844 : }
119 :
120 : // Helper function used by CopyObject to copy a source object to an
121 : // allocated target object and update the forwarding pointer in the source
122 : // object. Returns the target object.
123 : INLINE(static void MigrateObject(Heap* heap, HeapObject* source,
124 : HeapObject* target, int size)) {
125 : // If we migrate into to-space, then the to-space top pointer should be
126 : // right after the target object. Incorporate double alignment
127 : // over-allocation.
128 : DCHECK(!heap->InToSpace(target) ||
129 : target->address() + size == heap->new_space()->top() ||
130 : target->address() + size + kPointerSize == heap->new_space()->top());
131 :
132 : // Make sure that we do not overwrite the promotion queue which is at
133 : // the end of to-space.
134 : DCHECK(!heap->InToSpace(target) ||
135 : heap->promotion_queue()->IsBelowPromotionQueue(
136 : heap->new_space()->top()));
137 :
138 : // Copy the content of source to target.
139 116355462 : heap->CopyBlock(target->address(), source->address(), size);
140 :
141 : // Set the forwarding address.
142 116355462 : source->set_map_word(MapWord::FromForwardingAddress(target));
143 :
144 : if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
145 : // Update NewSpace stats if necessary.
146 5844 : RecordCopiedObject(heap, target);
147 5844 : heap->OnMoveEvent(target, source, size);
148 : }
149 :
150 : if (marks_handling == TRANSFER_MARKS) {
151 : IncrementalMarking::TransferColor(source, target);
152 : }
153 : }
154 :
155 : template <AllocationAlignment alignment>
156 70507794 : static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot,
157 : HeapObject* object, int object_size) {
158 141011729 : Heap* heap = map->GetHeap();
159 :
160 : DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE));
161 : AllocationResult allocation =
162 : heap->new_space()->AllocateRaw(object_size, alignment);
163 :
164 : HeapObject* target = NULL; // Initialization to please compiler.
165 70507794 : if (allocation.To(&target)) {
166 : // Order is important here: Set the promotion limit before storing a
167 : // filler for double alignment or migrating the object. Otherwise we
168 : // may end up overwriting promotion queue entries when we migrate the
169 : // object.
170 70503935 : heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
171 :
172 : MigrateObject(heap, object, target, object_size);
173 :
174 : // Update slot to new target.
175 70503935 : *slot = target;
176 :
177 : heap->IncrementSemiSpaceCopiedObjectSize(object_size);
178 70503935 : return true;
179 : }
180 : return false;
181 : }
182 :
183 :
184 : template <ObjectContents object_contents, AllocationAlignment alignment>
185 45851527 : static inline bool PromoteObject(Map* map, HeapObject** slot,
186 : HeapObject* object, int object_size) {
187 45851527 : Heap* heap = map->GetHeap();
188 :
189 : AllocationResult allocation =
190 45851527 : heap->old_space()->AllocateRaw(object_size, alignment);
191 :
192 : HeapObject* target = NULL; // Initialization to please compiler.
193 45851527 : if (allocation.To(&target)) {
194 : MigrateObject(heap, object, target, object_size);
195 :
196 : // Update slot to new target using CAS. A concurrent sweeper thread my
197 : // filter the slot concurrently.
198 45851527 : HeapObject* old = *slot;
199 : base::Release_CompareAndSwap(reinterpret_cast<base::AtomicWord*>(slot),
200 : reinterpret_cast<base::AtomicWord>(old),
201 45851527 : reinterpret_cast<base::AtomicWord>(target));
202 :
203 : if (object_contents == POINTER_OBJECT) {
204 : // TODO(mlippautz): Query collector for marking state.
205 36206312 : heap->promotion_queue()->insert(
206 : target, object_size,
207 36206312 : ObjectMarking::IsBlack(object, MarkingState::Internal(object)));
208 : }
209 45851527 : heap->IncrementPromotedObjectsSize(object_size);
210 45851527 : return true;
211 : }
212 : return false;
213 : }
214 :
215 : template <ObjectContents object_contents, AllocationAlignment alignment>
216 116355462 : static inline void EvacuateObject(Map* map, HeapObject** slot,
217 : HeapObject* object, int object_size) {
218 : SLOW_DCHECK(object_size <= Page::kAllocatableMemory);
219 : SLOW_DCHECK(object->Size() == object_size);
220 116355462 : Heap* heap = map->GetHeap();
221 :
222 232710924 : if (!heap->ShouldBePromoted(object->address(), object_size)) {
223 : // A semi-space copy may fail due to fragmentation. In that case, we
224 : // try to promote the object.
225 70507794 : if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) {
226 : return;
227 : }
228 : }
229 :
230 45851527 : if (PromoteObject<object_contents, alignment>(map, slot, object,
231 : object_size)) {
232 : return;
233 : }
234 :
235 : // If promotion failed, we try to copy the object to the other semi-space
236 0 : if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return;
237 :
238 0 : FatalProcessOutOfMemory("Scavenger: semi-space copy\n");
239 : }
240 :
241 2538480 : static inline void EvacuateJSFunction(Map* map, HeapObject** slot,
242 : HeapObject* object) {
243 2538480 : ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
244 :
245 2538480 : if (marks_handling == IGNORE_MARKS) return;
246 :
247 : MapWord map_word = object->map_word();
248 : DCHECK(map_word.IsForwardingAddress());
249 50989 : HeapObject* target = map_word.ToForwardingAddress();
250 :
251 : // TODO(mlippautz): Notify collector of this object so we don't have to
252 : // retrieve the state our of thin air.
253 50989 : if (ObjectMarking::IsBlack(target, MarkingState::Internal(target))) {
254 : // This object is black and it might not be rescanned by marker.
255 : // We should explicitly record code entry slot for compaction because
256 : // promotion queue processing (IteratePromotedObjectPointers) will
257 : // miss it as it is not HeapObject-tagged.
258 : Address code_entry_slot =
259 4679 : target->address() + JSFunction::kCodeEntryOffset;
260 4679 : Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
261 4679 : map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot(
262 4679 : target, code_entry_slot, code);
263 : }
264 : }
265 :
266 12673986 : static inline void EvacuateFixedArray(Map* map, HeapObject** slot,
267 : HeapObject* object) {
268 : int length = reinterpret_cast<FixedArray*>(object)->synchronized_length();
269 : int object_size = FixedArray::SizeFor(length);
270 12673986 : EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
271 : object_size);
272 12673986 : }
273 :
274 111351 : static inline void EvacuateFixedDoubleArray(Map* map, HeapObject** slot,
275 : HeapObject* object) {
276 : int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
277 : int object_size = FixedDoubleArray::SizeFor(length);
278 111351 : EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size);
279 111351 : }
280 :
281 6595 : static inline void EvacuateFixedTypedArray(Map* map, HeapObject** slot,
282 : HeapObject* object) {
283 : int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size();
284 6595 : EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
285 : object_size);
286 6595 : }
287 :
288 240293 : static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot,
289 : HeapObject* object) {
290 : int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size();
291 240293 : EvacuateObject<POINTER_OBJECT, kDoubleAligned>(map, slot, object,
292 : object_size);
293 240293 : }
294 :
295 25407 : static inline void EvacuateByteArray(Map* map, HeapObject** slot,
296 : HeapObject* object) {
297 : int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
298 25407 : EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
299 25407 : }
300 :
301 13846402 : static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot,
302 : HeapObject* object) {
303 : int object_size = SeqOneByteString::cast(object)
304 : ->SeqOneByteStringSize(map->instance_type());
305 13846402 : EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
306 13846402 : }
307 :
308 9437858 : static inline void EvacuateSeqTwoByteString(Map* map, HeapObject** slot,
309 : HeapObject* object) {
310 : int object_size = SeqTwoByteString::cast(object)
311 : ->SeqTwoByteStringSize(map->instance_type());
312 9437858 : EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
313 9437858 : }
314 :
315 56418237 : static inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
316 : HeapObject* object) {
317 : DCHECK(IsShortcutCandidate(map->instance_type()));
318 :
319 55049349 : Heap* heap = map->GetHeap();
320 :
321 55049349 : if (marks_handling == IGNORE_MARKS &&
322 : ConsString::cast(object)->unchecked_second() == heap->empty_string()) {
323 : HeapObject* first =
324 : HeapObject::cast(ConsString::cast(object)->unchecked_first());
325 :
326 925906 : *slot = first;
327 :
328 925906 : if (!heap->InNewSpace(first)) {
329 : object->set_map_word(MapWord::FromForwardingAddress(first));
330 : return;
331 : }
332 :
333 : MapWord first_word = first->map_word();
334 924135 : if (first_word.IsForwardingAddress()) {
335 615 : HeapObject* target = first_word.ToForwardingAddress();
336 :
337 615 : *slot = target;
338 : object->set_map_word(MapWord::FromForwardingAddress(target));
339 : return;
340 : }
341 :
342 923520 : Scavenger::ScavengeObjectSlow(slot, first);
343 923520 : object->set_map_word(MapWord::FromForwardingAddress(*slot));
344 : return;
345 : }
346 :
347 : int object_size = ConsString::kSize;
348 55492331 : EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
349 : object_size);
350 : }
351 :
352 288929 : static inline void EvacuateThinString(Map* map, HeapObject** slot,
353 : HeapObject* object) {
354 : if (marks_handling == IGNORE_MARKS) {
355 : HeapObject* actual = ThinString::cast(object)->actual();
356 284972 : *slot = actual;
357 : // ThinStrings always refer to internalized strings, which are
358 : // always in old space.
359 : DCHECK(!map->GetHeap()->InNewSpace(actual));
360 : object->set_map_word(MapWord::FromForwardingAddress(actual));
361 288929 : return;
362 : }
363 :
364 3957 : EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
365 : ThinString::kSize);
366 : }
367 :
368 : template <ObjectContents object_contents>
369 : class ObjectEvacuationStrategy {
370 : public:
371 : template <int object_size>
372 202725 : static inline void VisitSpecialized(Map* map, HeapObject** slot,
373 : HeapObject* object) {
374 202725 : EvacuateObject<object_contents, kWordAligned>(map, slot, object,
375 : object_size);
376 202725 : }
377 :
378 24314358 : static inline void Visit(Map* map, HeapObject** slot, HeapObject* object) {
379 : int object_size = map->instance_size();
380 24314358 : EvacuateObject<object_contents, kWordAligned>(map, slot, object,
381 : object_size);
382 24314358 : }
383 : };
384 :
385 : static VisitorDispatchTable<ScavengingCallback> table_;
386 : };
387 :
388 : template <MarksHandling marks_handling,
389 : LoggingAndProfiling logging_and_profiling_mode>
390 : VisitorDispatchTable<ScavengingCallback>
391 : ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_;
392 :
393 : // static
394 58018 : void Scavenger::Initialize() {
395 : ScavengingVisitor<TRANSFER_MARKS,
396 58018 : LOGGING_AND_PROFILING_DISABLED>::Initialize();
397 58018 : ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize();
398 : ScavengingVisitor<TRANSFER_MARKS,
399 58018 : LOGGING_AND_PROFILING_ENABLED>::Initialize();
400 58018 : ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize();
401 58018 : }
402 :
403 :
404 : // static
405 117566340 : void Scavenger::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
406 : SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
407 : MapWord first_word = object->map_word();
408 : SLOW_DCHECK(!first_word.IsForwardingAddress());
409 : Map* map = first_word.ToMap();
410 117566340 : Scavenger* scavenger = map->GetHeap()->scavenge_collector_;
411 117566340 : scavenger->scavenging_visitors_table_.GetVisitor(map)(map, p, object);
412 117566340 : }
413 :
414 :
415 70019 : void Scavenger::SelectScavengingVisitorsTable() {
416 : bool logging_and_profiling =
417 138378 : FLAG_verify_predictable || isolate()->logger()->is_logging() ||
418 135895 : isolate()->is_profiling() ||
419 133412 : (isolate()->heap_profiler() != NULL &&
420 66706 : isolate()->heap_profiler()->is_tracking_object_moves());
421 :
422 138378 : if (!heap()->incremental_marking()->IsMarking()) {
423 68359 : if (!logging_and_profiling) {
424 : scavenging_visitors_table_.CopyFrom(
425 : ScavengingVisitor<IGNORE_MARKS,
426 : LOGGING_AND_PROFILING_DISABLED>::GetTable());
427 : } else {
428 : scavenging_visitors_table_.CopyFrom(
429 : ScavengingVisitor<IGNORE_MARKS,
430 : LOGGING_AND_PROFILING_ENABLED>::GetTable());
431 : }
432 : } else {
433 830 : if (!logging_and_profiling) {
434 : scavenging_visitors_table_.CopyFrom(
435 : ScavengingVisitor<TRANSFER_MARKS,
436 : LOGGING_AND_PROFILING_DISABLED>::GetTable());
437 : } else {
438 : scavenging_visitors_table_.CopyFrom(
439 : ScavengingVisitor<TRANSFER_MARKS,
440 : LOGGING_AND_PROFILING_ENABLED>::GetTable());
441 : }
442 :
443 1660 : if (heap()->incremental_marking()->IsCompacting()) {
444 : // When compacting forbid short-circuiting of cons-strings.
445 : // Scavenging code relies on the fact that new space object
446 : // can't be evacuated into evacuation candidate but
447 : // short-circuiting violates this assumption.
448 : scavenging_visitors_table_.Register(
449 : StaticVisitorBase::kVisitShortcutCandidate,
450 : scavenging_visitors_table_.GetVisitorById(
451 : StaticVisitorBase::kVisitConsString));
452 : scavenging_visitors_table_.Register(
453 : StaticVisitorBase::kVisitThinString,
454 : &ScavengingVisitor<TRANSFER_MARKS, LOGGING_AND_PROFILING_DISABLED>::
455 : EvacuateThinStringNoShortcut);
456 : }
457 : }
458 69189 : }
459 :
460 :
461 69189 : Isolate* Scavenger::isolate() { return heap()->isolate(); }
462 :
463 122970889 : void RootScavengeVisitor::VisitRootPointer(Root root, Object** p) {
464 122970889 : ScavengePointer(p);
465 122970889 : }
466 :
467 4562077 : void RootScavengeVisitor::VisitRootPointers(Root root, Object** start,
468 : Object** end) {
469 : // Copy all HeapObject pointers in [start, end)
470 4562077 : for (Object** p = start; p < end; p++) ScavengePointer(p);
471 4562077 : }
472 :
473 231464421 : void RootScavengeVisitor::ScavengePointer(Object** p) {
474 231464421 : Object* object = *p;
475 462928842 : if (!heap_->InNewSpace(object)) return;
476 :
477 : Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p),
478 8933718 : reinterpret_cast<HeapObject*>(object));
479 : }
480 :
481 : } // namespace internal
482 : } // namespace v8
|