Line data Source code
1 : // Copyright 2011 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/heap/objects-visiting.h"
6 :
7 : #include "src/heap/heap-inl.h"
8 : #include "src/heap/mark-compact-inl.h"
9 : #include "src/heap/objects-visiting-inl.h"
10 :
11 : namespace v8 {
12 : namespace internal {
13 :
14 :
15 62103630 : StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(Map* map) {
16 : return GetVisitorId(map->instance_type(), map->instance_size(),
17 124207260 : FLAG_unbox_double_fields && !map->HasFastPointerLayout());
18 : }
19 :
20 :
21 62103845 : StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
22 : int instance_type, int instance_size, bool has_unboxed_fields) {
23 62103845 : if (instance_type < FIRST_NONSTRING_TYPE) {
24 989 : switch (instance_type & kStringRepresentationMask) {
25 : case kSeqStringTag:
26 172 : if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
27 : return kVisitSeqOneByteString;
28 : } else {
29 86 : return kVisitSeqTwoByteString;
30 : }
31 :
32 : case kConsStringTag:
33 86 : if (IsShortcutCandidate(instance_type)) {
34 : return kVisitShortcutCandidate;
35 : } else {
36 0 : return kVisitConsString;
37 : }
38 :
39 : case kSlicedStringTag:
40 : return kVisitSlicedString;
41 :
42 : case kExternalStringTag:
43 559 : return kVisitDataObject;
44 :
45 : case kThinStringTag:
46 86 : return kVisitThinString;
47 : }
48 0 : UNREACHABLE();
49 : }
50 :
51 62102856 : switch (instance_type) {
52 : case BYTE_ARRAY_TYPE:
53 : return kVisitByteArray;
54 :
55 : case BYTECODE_ARRAY_TYPE:
56 43 : return kVisitBytecodeArray;
57 :
58 : case FREE_SPACE_TYPE:
59 43 : return kVisitFreeSpace;
60 :
61 : case FIXED_ARRAY_TYPE:
62 817 : return kVisitFixedArray;
63 :
64 : case FIXED_DOUBLE_ARRAY_TYPE:
65 43 : return kVisitFixedDoubleArray;
66 :
67 : case ODDBALL_TYPE:
68 430 : return kVisitOddball;
69 :
70 : case MAP_TYPE:
71 43 : return kVisitMap;
72 :
73 : case CODE_TYPE:
74 43 : return kVisitCode;
75 :
76 : case CELL_TYPE:
77 172 : return kVisitCell;
78 :
79 : case PROPERTY_CELL_TYPE:
80 43 : return kVisitPropertyCell;
81 :
82 : case WEAK_CELL_TYPE:
83 43 : return kVisitWeakCell;
84 :
85 : case TRANSITION_ARRAY_TYPE:
86 43 : return kVisitTransitionArray;
87 :
88 : case JS_WEAK_MAP_TYPE:
89 : case JS_WEAK_SET_TYPE:
90 37776 : return kVisitJSWeakCollection;
91 :
92 : case JS_REGEXP_TYPE:
93 22892 : return kVisitJSRegExp;
94 :
95 : case SHARED_FUNCTION_INFO_TYPE:
96 43 : return kVisitSharedFunctionInfo;
97 :
98 : case JS_PROXY_TYPE:
99 395 : return kVisitStruct;
100 :
101 : case SYMBOL_TYPE:
102 43 : return kVisitSymbol;
103 :
104 : case JS_ARRAY_BUFFER_TYPE:
105 20415 : return kVisitJSArrayBuffer;
106 :
107 : case JS_OBJECT_TYPE:
108 : case JS_ERROR_TYPE:
109 : case JS_ARGUMENTS_TYPE:
110 : case JS_ASYNC_FROM_SYNC_ITERATOR_TYPE:
111 : case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
112 : case JS_GENERATOR_OBJECT_TYPE:
113 : case JS_ASYNC_GENERATOR_OBJECT_TYPE:
114 : case JS_MODULE_NAMESPACE_TYPE:
115 : case JS_VALUE_TYPE:
116 : case JS_DATE_TYPE:
117 : case JS_ARRAY_TYPE:
118 : case JS_GLOBAL_PROXY_TYPE:
119 : case JS_GLOBAL_OBJECT_TYPE:
120 : case JS_MESSAGE_OBJECT_TYPE:
121 : case JS_TYPED_ARRAY_TYPE:
122 : case JS_DATA_VIEW_TYPE:
123 : case JS_SET_TYPE:
124 : case JS_MAP_TYPE:
125 : case JS_SET_ITERATOR_TYPE:
126 : case JS_MAP_ITERATOR_TYPE:
127 : case JS_STRING_ITERATOR_TYPE:
128 :
129 : case JS_TYPED_ARRAY_KEY_ITERATOR_TYPE:
130 : case JS_FAST_ARRAY_KEY_ITERATOR_TYPE:
131 : case JS_GENERIC_ARRAY_KEY_ITERATOR_TYPE:
132 : case JS_UINT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
133 : case JS_INT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
134 : case JS_UINT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
135 : case JS_INT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
136 : case JS_UINT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
137 : case JS_INT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
138 : case JS_FLOAT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
139 : case JS_FLOAT64_ARRAY_KEY_VALUE_ITERATOR_TYPE:
140 : case JS_UINT8_CLAMPED_ARRAY_KEY_VALUE_ITERATOR_TYPE:
141 : case JS_FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
142 : case JS_FAST_HOLEY_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
143 : case JS_FAST_ARRAY_KEY_VALUE_ITERATOR_TYPE:
144 : case JS_FAST_HOLEY_ARRAY_KEY_VALUE_ITERATOR_TYPE:
145 : case JS_FAST_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
146 : case JS_FAST_HOLEY_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
147 : case JS_GENERIC_ARRAY_KEY_VALUE_ITERATOR_TYPE:
148 : case JS_UINT8_ARRAY_VALUE_ITERATOR_TYPE:
149 : case JS_INT8_ARRAY_VALUE_ITERATOR_TYPE:
150 : case JS_UINT16_ARRAY_VALUE_ITERATOR_TYPE:
151 : case JS_INT16_ARRAY_VALUE_ITERATOR_TYPE:
152 : case JS_UINT32_ARRAY_VALUE_ITERATOR_TYPE:
153 : case JS_INT32_ARRAY_VALUE_ITERATOR_TYPE:
154 : case JS_FLOAT32_ARRAY_VALUE_ITERATOR_TYPE:
155 : case JS_FLOAT64_ARRAY_VALUE_ITERATOR_TYPE:
156 : case JS_UINT8_CLAMPED_ARRAY_VALUE_ITERATOR_TYPE:
157 : case JS_FAST_SMI_ARRAY_VALUE_ITERATOR_TYPE:
158 : case JS_FAST_HOLEY_SMI_ARRAY_VALUE_ITERATOR_TYPE:
159 : case JS_FAST_ARRAY_VALUE_ITERATOR_TYPE:
160 : case JS_FAST_HOLEY_ARRAY_VALUE_ITERATOR_TYPE:
161 : case JS_FAST_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
162 : case JS_FAST_HOLEY_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
163 : case JS_GENERIC_ARRAY_VALUE_ITERATOR_TYPE:
164 :
165 : case JS_PROMISE_CAPABILITY_TYPE:
166 : case JS_PROMISE_TYPE:
167 : case JS_BOUND_FUNCTION_TYPE:
168 43258878 : return has_unboxed_fields ? kVisitJSObject : kVisitJSObjectFast;
169 : case JS_API_OBJECT_TYPE:
170 : case JS_SPECIAL_API_OBJECT_TYPE:
171 14178196 : return kVisitJSApiObject;
172 :
173 : case JS_FUNCTION_TYPE:
174 4580920 : return kVisitJSFunction;
175 :
176 : case FILLER_TYPE:
177 : case FOREIGN_TYPE:
178 : case HEAP_NUMBER_TYPE:
179 : case MUTABLE_HEAP_NUMBER_TYPE:
180 242 : return kVisitDataObject;
181 :
182 : case FIXED_UINT8_ARRAY_TYPE:
183 : case FIXED_INT8_ARRAY_TYPE:
184 : case FIXED_UINT16_ARRAY_TYPE:
185 : case FIXED_INT16_ARRAY_TYPE:
186 : case FIXED_UINT32_ARRAY_TYPE:
187 : case FIXED_INT32_ARRAY_TYPE:
188 : case FIXED_FLOAT32_ARRAY_TYPE:
189 : case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
190 344 : return kVisitFixedTypedArray;
191 :
192 : case FIXED_FLOAT64_ARRAY_TYPE:
193 43 : return kVisitFixedFloat64Array;
194 :
195 : #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
196 : STRUCT_LIST(MAKE_STRUCT_CASE)
197 : #undef MAKE_STRUCT_CASE
198 903 : if (instance_type == ALLOCATION_SITE_TYPE) {
199 : return kVisitAllocationSite;
200 : }
201 :
202 860 : return kVisitStruct;
203 :
204 : default:
205 0 : UNREACHABLE();
206 : return kVisitorIdCount;
207 : }
208 : }
209 :
210 :
211 : // We don't record weak slots during marking or scavenges. Instead we do it
212 : // once when we complete mark-compact cycle. Note that write barrier has no
213 : // effect if we are already in the middle of compacting mark-sweep cycle and we
214 : // have to record slots manually.
215 1376255 : static bool MustRecordSlots(Heap* heap) {
216 1376255 : return heap->gc_state() == Heap::MARK_COMPACT &&
217 495540 : heap->mark_compact_collector()->is_compacting();
218 : }
219 :
220 :
221 : template <class T>
222 : struct WeakListVisitor;
223 :
224 :
225 : template <class T>
226 528298 : Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
227 : Object* undefined = heap->undefined_value();
228 : Object* head = undefined;
229 : T* tail = NULL;
230 : MarkCompactCollector* collector = heap->mark_compact_collector();
231 : bool record_slots = MustRecordSlots(heap);
232 :
233 6220337 : while (list != undefined) {
234 : // Check whether to keep the candidate in the list.
235 : T* candidate = reinterpret_cast<T*>(list);
236 :
237 5163741 : Object* retained = retainer->RetainAs(list);
238 5163741 : if (retained != NULL) {
239 3451568 : if (head == undefined) {
240 : // First element in the list.
241 : head = retained;
242 : } else {
243 : // Subsequent elements in the list.
244 : DCHECK(tail != NULL);
245 : WeakListVisitor<T>::SetWeakNext(tail, retained);
246 3189380 : if (record_slots) {
247 : Object** next_slot =
248 86381 : HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
249 : collector->RecordSlot(tail, next_slot, retained);
250 : }
251 : }
252 : // Retained object is new tail.
253 : DCHECK(!retained->IsUndefined(heap->isolate()));
254 : candidate = reinterpret_cast<T*>(retained);
255 : tail = candidate;
256 :
257 : // tail is a live object, visit it.
258 222801 : WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
259 :
260 : } else {
261 11758 : WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
262 : }
263 :
264 : // Move to next element in the list.
265 : list = WeakListVisitor<T>::WeakNext(candidate);
266 : }
267 :
268 : // Terminate the list if there is one or more elements.
269 528298 : if (tail != NULL) WeakListVisitor<T>::SetWeakNext(tail, undefined);
270 528298 : return head;
271 : }
272 :
273 :
274 : template <class T>
275 35274 : static void ClearWeakList(Heap* heap, Object* list) {
276 : Object* undefined = heap->undefined_value();
277 144702 : while (list != undefined) {
278 : T* candidate = reinterpret_cast<T*>(list);
279 : list = WeakListVisitor<T>::WeakNext(candidate);
280 : WeakListVisitor<T>::SetWeakNext(candidate, undefined);
281 : }
282 : }
283 :
284 :
285 : template <>
286 : struct WeakListVisitor<JSFunction> {
287 : static void SetWeakNext(JSFunction* function, Object* next) {
288 1187423 : function->set_next_function_link(next, UPDATE_WEAK_WRITE_BARRIER);
289 : }
290 :
291 : static Object* WeakNext(JSFunction* function) {
292 : return function->next_function_link();
293 : }
294 :
295 : static int WeakNextOffset() { return JSFunction::kNextFunctionLinkOffset; }
296 :
297 : static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
298 :
299 : static void VisitPhantomObject(Heap*, JSFunction*) {}
300 : };
301 :
302 :
303 : template <>
304 : struct WeakListVisitor<Code> {
305 : static void SetWeakNext(Code* code, Object* next) {
306 319593 : code->set_next_code_link(next, UPDATE_WEAK_WRITE_BARRIER);
307 : }
308 :
309 : static Object* WeakNext(Code* code) { return code->next_code_link(); }
310 :
311 : static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
312 :
313 : static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
314 :
315 : static void VisitPhantomObject(Heap*, Code*) {}
316 : };
317 :
318 :
319 : template <>
320 : struct WeakListVisitor<Context> {
321 : static void SetWeakNext(Context* context, Object* next) {
322 222801 : context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
323 : }
324 :
325 : static Object* WeakNext(Context* context) {
326 : return context->next_context_link();
327 : }
328 :
329 : static int WeakNextOffset() {
330 : return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
331 : }
332 :
333 445602 : static void VisitLiveObject(Heap* heap, Context* context,
334 : WeakObjectRetainer* retainer) {
335 : // Process the three weak lists linked off the context.
336 : DoWeakList<JSFunction>(heap, context, retainer,
337 222801 : Context::OPTIMIZED_FUNCTIONS_LIST);
338 :
339 222801 : if (heap->gc_state() == Heap::MARK_COMPACT) {
340 : // Record the slots of the weak entries in the native context.
341 : MarkCompactCollector* collector = heap->mark_compact_collector();
342 259232 : for (int idx = Context::FIRST_WEAK_SLOT;
343 : idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) {
344 : Object** slot = Context::cast(context)->RawFieldOfElementAt(idx);
345 259232 : collector->RecordSlot(context, slot, *slot);
346 : }
347 : // Code objects are always allocated in Code space, we do not have to
348 : // visit
349 : // them during scavenges.
350 64808 : DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
351 64808 : DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
352 : }
353 222801 : }
354 :
355 : template <class T>
356 352417 : static void DoWeakList(Heap* heap, Context* context,
357 : WeakObjectRetainer* retainer, int index) {
358 : // Visit the weak list, removing dead intermediate elements.
359 352417 : Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
360 :
361 : // Update the list head.
362 352417 : context->set(index, list_head, UPDATE_WRITE_BARRIER);
363 :
364 352417 : if (MustRecordSlots(heap)) {
365 : // Record the updated slot if necessary.
366 : Object** head_slot =
367 : HeapObject::RawField(context, FixedArray::SizeFor(index));
368 : heap->mark_compact_collector()->RecordSlot(context, head_slot, list_head);
369 : }
370 352417 : }
371 :
372 11758 : static void VisitPhantomObject(Heap* heap, Context* context) {
373 : ClearWeakList<JSFunction>(heap,
374 : context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
375 : ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
376 : ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
377 11758 : }
378 : };
379 :
380 :
381 : template <>
382 : struct WeakListVisitor<AllocationSite> {
383 : static void SetWeakNext(AllocationSite* obj, Object* next) {
384 1831179 : obj->set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
385 : }
386 :
387 : static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
388 :
389 : static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
390 :
391 : static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
392 :
393 : static void VisitPhantomObject(Heap*, AllocationSite*) {}
394 : };
395 :
396 :
397 : template Object* VisitWeakList<Context>(Heap* heap, Object* list,
398 : WeakObjectRetainer* retainer);
399 :
400 : template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
401 : WeakObjectRetainer* retainer);
402 : } // namespace internal
403 : } // namespace v8
|