Line data Source code
1 : // Copyright 2015 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_HEAP_SCAVENGER_INL_H_
6 : #define V8_HEAP_SCAVENGER_INL_H_
7 :
8 : #include "src/heap/scavenger.h"
9 : #include "src/objects/map.h"
10 :
11 : namespace v8 {
12 : namespace internal {
13 :
14 : // White list for objects that for sure only contain data.
15 45818658 : bool Scavenger::ContainsOnlyData(VisitorId visitor_id) {
16 45818658 : switch (visitor_id) {
17 : case kVisitSeqOneByteString:
18 : return true;
19 : case kVisitSeqTwoByteString:
20 : return true;
21 : case kVisitByteArray:
22 : return true;
23 : case kVisitFixedDoubleArray:
24 : return true;
25 : case kVisitDataObject:
26 : return true;
27 : default:
28 : break;
29 : }
30 37497063 : return false;
31 : }
32 :
33 : void Scavenger::PageMemoryFence(Object* object) {
34 : #ifdef THREAD_SANITIZER
35 : // Perform a dummy acquire load to tell TSAN that there is no data race
36 : // with page initialization.
37 : if (object->IsHeapObject()) {
38 : MemoryChunk* chunk =
39 : MemoryChunk::FromAddress(HeapObject::cast(object)->address());
40 : CHECK_NOT_NULL(chunk->synchronized_heap());
41 : }
42 : #endif
43 : }
44 :
45 : bool Scavenger::MigrateObject(Map* map, HeapObject* source, HeapObject* target,
46 105551555 : int size) {
47 : // Copy the content of source to target.
48 106399887 : target->set_map_word(MapWord::FromMap(map));
49 106292846 : heap()->CopyBlock(target->address() + kPointerSize,
50 212545140 : source->address() + kPointerSize, size - kPointerSize);
51 :
52 : HeapObject* old = base::AsAtomicPointer::Release_CompareAndSwap(
53 : reinterpret_cast<HeapObject**>(source->address()), map,
54 106064706 : MapWord::FromForwardingAddress(target).ToMap());
55 106650907 : if (old != map) {
56 : // Other task migrated the object.
57 : return false;
58 : }
59 :
60 105540484 : if (V8_UNLIKELY(is_logging_)) {
61 : // Update NewSpace stats if necessary.
62 19891 : RecordCopiedObject(target);
63 19891 : heap()->OnMoveEvent(target, source, size);
64 : }
65 :
66 105536489 : if (is_incremental_marking_) {
67 : heap()->incremental_marking()->TransferColor(source, target);
68 : }
69 211063328 : heap()->UpdateAllocationSite(map, source, &local_pretenuring_feedback_);
70 : return true;
71 : }
72 :
73 : bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObject** slot,
74 : HeapObject* object, int object_size) {
75 : DCHECK(heap()->AllowedToBeMigrated(object, NEW_SPACE));
76 60570815 : AllocationAlignment alignment = object->RequiredAlignment();
77 : AllocationResult allocation =
78 60553197 : allocator_.Allocate(NEW_SPACE, object_size, alignment);
79 :
80 60520322 : HeapObject* target = nullptr;
81 60520322 : if (allocation.To(&target)) {
82 : DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
83 : target));
84 60070305 : const bool self_success = MigrateObject(map, object, target, object_size);
85 60181158 : if (!self_success) {
86 549579 : allocator_.FreeLast(NEW_SPACE, target, object_size);
87 548811 : MapWord map_word = object->map_word();
88 548129 : *slot = map_word.ToForwardingAddress();
89 : return true;
90 : }
91 59631579 : *slot = target;
92 :
93 59631579 : copied_list_.Push(ObjectAndSize(target, object_size));
94 59579779 : copied_size_ += object_size;
95 : return true;
96 : }
97 : return false;
98 : }
99 :
100 : bool Scavenger::PromoteObject(Map* map, HeapObject** slot, HeapObject* object,
101 : int object_size) {
102 46307698 : AllocationAlignment alignment = object->RequiredAlignment();
103 : AllocationResult allocation =
104 46367399 : allocator_.Allocate(OLD_SPACE, object_size, alignment);
105 :
106 46340048 : HeapObject* target = nullptr;
107 46340048 : if (allocation.To(&target)) {
108 : DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
109 : target));
110 46329582 : const bool self_success = MigrateObject(map, object, target, object_size);
111 46464548 : if (!self_success) {
112 629362 : allocator_.FreeLast(OLD_SPACE, target, object_size);
113 628102 : MapWord map_word = object->map_word();
114 626742 : *slot = map_word.ToForwardingAddress();
115 : return true;
116 : }
117 45835186 : *slot = target;
118 :
119 45835186 : if (!ContainsOnlyData(static_cast<VisitorId>(map->visitor_id()))) {
120 37487361 : promotion_list_.Push(ObjectAndSize(target, object_size));
121 : }
122 45773332 : promoted_size_ += object_size;
123 : return true;
124 : }
125 : return false;
126 : }
127 :
128 : void Scavenger::EvacuateObjectDefault(Map* map, HeapObject** slot,
129 106450283 : HeapObject* object, int object_size) {
130 : SLOW_DCHECK(object_size <= Page::kAllocatableMemory);
131 : SLOW_DCHECK(object->SizeFromMap(map) == object_size);
132 :
133 212909351 : if (!heap()->ShouldBePromoted(object->address())) {
134 : // A semi-space copy may fail due to fragmentation. In that case, we
135 : // try to promote the object.
136 60579661 : if (SemiSpaceCopyObject(map, slot, object, object_size)) return;
137 : }
138 :
139 46397819 : if (PromoteObject(map, slot, object, object_size)) return;
140 :
141 : // If promotion failed, we try to copy the object to the other semi-space
142 0 : if (SemiSpaceCopyObject(map, slot, object, object_size)) return;
143 :
144 0 : FatalProcessOutOfMemory("Scavenger: semi-space copy\n");
145 : }
146 :
147 323468 : void Scavenger::EvacuateThinString(Map* map, HeapObject** slot,
148 : ThinString* object, int object_size) {
149 323468 : if (!is_incremental_marking_) {
150 : // Loading actual is fine in a parallel setting is there is no write.
151 : HeapObject* actual = object->actual();
152 308680 : *slot = actual;
153 : // ThinStrings always refer to internalized strings, which are
154 : // always in old space.
155 : DCHECK(!heap()->InNewSpace(actual));
156 : base::AsAtomicPointer::Relaxed_Store(
157 308680 : reinterpret_cast<Map**>(object->address()),
158 : MapWord::FromForwardingAddress(actual).ToMap());
159 632148 : return;
160 : }
161 :
162 : EvacuateObjectDefault(map, slot, object, object_size);
163 : }
164 :
165 50622474 : void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
166 48136714 : ConsString* object, int object_size) {
167 : DCHECK(IsShortcutCandidate(map->instance_type()));
168 98759188 : if (!is_incremental_marking_ &&
169 48136714 : object->unchecked_second() == heap()->empty_string()) {
170 : HeapObject* first = HeapObject::cast(object->unchecked_first());
171 :
172 639025 : *slot = first;
173 :
174 639025 : if (!heap()->InNewSpace(first)) {
175 : base::AsAtomicPointer::Relaxed_Store(
176 122 : reinterpret_cast<Map**>(object->address()),
177 : MapWord::FromForwardingAddress(first).ToMap());
178 122 : return;
179 : }
180 :
181 : MapWord first_word = first->map_word();
182 638903 : if (first_word.IsForwardingAddress()) {
183 788 : HeapObject* target = first_word.ToForwardingAddress();
184 :
185 788 : *slot = target;
186 : base::AsAtomicPointer::Relaxed_Store(
187 788 : reinterpret_cast<Map**>(object->address()),
188 : MapWord::FromForwardingAddress(target).ToMap());
189 788 : return;
190 : }
191 : Map* map = first_word.ToMap();
192 638115 : EvacuateObjectDefault(map, slot, first, first->SizeFromMap(map));
193 : base::AsAtomicPointer::Relaxed_Store(
194 637792 : reinterpret_cast<Map**>(object->address()),
195 637792 : MapWord::FromForwardingAddress(*slot).ToMap());
196 637792 : return;
197 : }
198 :
199 : EvacuateObjectDefault(map, slot, object, object_size);
200 : }
201 :
202 : void Scavenger::EvacuateObject(HeapObject** slot, Map* map,
203 : HeapObject* source) {
204 : SLOW_DCHECK(heap_->InFromSpace(source));
205 : SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
206 106680639 : int size = source->SizeFromMap(map);
207 : // Cannot use ::cast() below because that would add checks in debug mode
208 : // that require re-reading the map.
209 106722594 : switch (static_cast<VisitorId>(map->visitor_id())) {
210 : case kVisitThinString:
211 : EvacuateThinString(map, slot, reinterpret_cast<ThinString*>(source),
212 323472 : size);
213 : break;
214 : case kVisitShortcutCandidate:
215 : EvacuateShortcutCandidate(map, slot,
216 50621739 : reinterpret_cast<ConsString*>(source), size);
217 : break;
218 : default:
219 : EvacuateObjectDefault(map, slot, source, size);
220 : break;
221 : }
222 : }
223 :
224 149607128 : void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
225 : DCHECK(heap()->InFromSpace(object));
226 :
227 : // Synchronized load that consumes the publishing CAS of MigrateObject.
228 : MapWord first_word = object->synchronized_map_word();
229 :
230 : // If the first word is a forwarding address, the object has already been
231 : // copied.
232 149607128 : if (first_word.IsForwardingAddress()) {
233 42926489 : HeapObject* dest = first_word.ToForwardingAddress();
234 : DCHECK(object->GetIsolate()->heap()->InFromSpace(*p));
235 : base::AsAtomicPointer::Relaxed_Store(p, dest);
236 149744213 : return;
237 : }
238 :
239 : Map* map = first_word.ToMap();
240 : // AllocationMementos are unrooted and shouldn't survive a scavenge
241 : DCHECK_NE(heap()->allocation_memento_map(), map);
242 : // Call the slow part of scavenge object.
243 : EvacuateObject(p, map, object);
244 : }
245 :
246 54872073 : SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
247 : Address slot_address) {
248 : Object** slot = reinterpret_cast<Object**>(slot_address);
249 54872073 : Object* object = *slot;
250 54872073 : if (heap->InFromSpace(object)) {
251 : HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
252 : DCHECK(heap_object->IsHeapObject());
253 :
254 54505552 : ScavengeObject(reinterpret_cast<HeapObject**>(slot), heap_object);
255 :
256 54361052 : object = *slot;
257 : // If the object was in from space before and is after executing the
258 : // callback in to space, the object is still live.
259 : // Unfortunately, we do not know about the slot. It could be in a
260 : // just freed free space object.
261 : PageMemoryFence(object);
262 54361052 : if (heap->InToSpace(object)) {
263 : return KEEP_SLOT;
264 : }
265 366521 : } else if (heap->InToSpace(object)) {
266 : // Already updated slot. This can happen when processing of the work list
267 : // is interleaved with processing roots.
268 : return KEEP_SLOT;
269 : }
270 : // Slots can point to "to" space if the slot has been recorded multiple
271 : // times in the remembered set. We remove the redundant slot now.
272 : return REMOVE_SLOT;
273 : }
274 :
275 94695 : void ScavengeVisitor::VisitPointers(HeapObject* host, Object** start,
276 : Object** end) {
277 419367638 : for (Object** p = start; p < end; p++) {
278 419183223 : Object* object = *p;
279 419183223 : if (!heap_->InNewSpace(object)) continue;
280 : scavenger_->ScavengeObject(reinterpret_cast<HeapObject**>(p),
281 48960204 : reinterpret_cast<HeapObject*>(object));
282 : }
283 94715 : }
284 :
285 : } // namespace internal
286 : } // namespace v8
287 :
288 : #endif // V8_HEAP_SCAVENGER_INL_H_
|