Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_OBJECTS_VISITING_INL_H_
6 : #define V8_OBJECTS_VISITING_INL_H_
7 :
8 : #include "src/heap/array-buffer-tracker.h"
9 : #include "src/heap/mark-compact.h"
10 : #include "src/heap/objects-visiting.h"
11 : #include "src/ic/ic-state.h"
12 : #include "src/macro-assembler.h"
13 : #include "src/objects-body-descriptors-inl.h"
14 :
15 : namespace v8 {
16 : namespace internal {
17 :
18 :
19 : template <typename Callback>
20 769758767 : Callback VisitorDispatchTable<Callback>::GetVisitor(Map* map) {
21 887325107 : return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
22 : }
23 :
24 :
25 : template <typename StaticVisitor>
26 58018 : void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
27 : table_.Register(
28 : kVisitShortcutCandidate,
29 : &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
30 :
31 : table_.Register(
32 : kVisitConsString,
33 : &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
34 :
35 : table_.Register(
36 : kVisitThinString,
37 : &FixedBodyVisitor<StaticVisitor, ThinString::BodyDescriptor, int>::Visit);
38 :
39 : table_.Register(kVisitSlicedString,
40 : &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
41 : int>::Visit);
42 :
43 : table_.Register(
44 : kVisitSymbol,
45 : &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
46 :
47 : table_.Register(kVisitFixedArray,
48 : &FlexibleBodyVisitor<StaticVisitor,
49 : FixedArray::BodyDescriptor, int>::Visit);
50 :
51 : table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
52 : table_.Register(
53 : kVisitFixedTypedArray,
54 : &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
55 : int>::Visit);
56 :
57 : table_.Register(
58 : kVisitFixedFloat64Array,
59 : &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
60 : int>::Visit);
61 :
62 : table_.Register(
63 : kVisitNativeContext,
64 : &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
65 : int>::Visit);
66 :
67 : table_.Register(kVisitByteArray, &VisitByteArray);
68 :
69 : table_.Register(
70 : kVisitSharedFunctionInfo,
71 : &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
72 : int>::Visit);
73 :
74 : table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
75 :
76 : table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
77 :
78 : // Don't visit code entry. We are using this visitor only during scavenges.
79 : table_.Register(
80 : kVisitJSFunction,
81 : &FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
82 : int>::Visit);
83 :
84 : table_.Register(
85 : kVisitJSArrayBuffer,
86 : &FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
87 : int>::Visit);
88 :
89 : table_.Register(kVisitFreeSpace, &VisitFreeSpace);
90 :
91 : table_.Register(
92 : kVisitJSWeakCollection,
93 : &FlexibleBodyVisitor<StaticVisitor, JSWeakCollection::BodyDescriptor,
94 : int>::Visit);
95 :
96 : table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
97 :
98 : table_.Register(kVisitDataObject, &DataObjectVisitor::Visit);
99 :
100 : table_.Register(kVisitJSObjectFast, &JSObjectFastVisitor::Visit);
101 : table_.Register(kVisitJSObject, &JSObjectVisitor::Visit);
102 :
103 : // Not using specialized Api object visitor for newspace.
104 : table_.Register(kVisitJSApiObject, &JSObjectVisitor::Visit);
105 :
106 : table_.Register(kVisitStruct, &StructVisitor::Visit);
107 :
108 : table_.Register(kVisitBytecodeArray, &UnreachableVisitor);
109 : table_.Register(kVisitSharedFunctionInfo, &UnreachableVisitor);
110 58018 : }
111 :
112 : template <typename StaticVisitor>
113 116036 : void StaticMarkingVisitor<StaticVisitor>::Initialize() {
114 : table_.Register(kVisitShortcutCandidate,
115 : &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
116 : void>::Visit);
117 :
118 : table_.Register(kVisitConsString,
119 : &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
120 : void>::Visit);
121 :
122 : table_.Register(kVisitThinString,
123 : &FixedBodyVisitor<StaticVisitor, ThinString::BodyDescriptor,
124 : void>::Visit);
125 :
126 : table_.Register(kVisitSlicedString,
127 : &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
128 : void>::Visit);
129 :
130 : table_.Register(
131 : kVisitSymbol,
132 : &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
133 :
134 : table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
135 :
136 : table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
137 :
138 : table_.Register(
139 : kVisitFixedTypedArray,
140 : &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
141 : void>::Visit);
142 :
143 : table_.Register(
144 : kVisitFixedFloat64Array,
145 : &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
146 : void>::Visit);
147 :
148 : table_.Register(kVisitNativeContext, &VisitNativeContext);
149 :
150 : table_.Register(
151 : kVisitAllocationSite,
152 : &FixedBodyVisitor<StaticVisitor, AllocationSite::MarkingBodyDescriptor,
153 : void>::Visit);
154 :
155 : table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
156 :
157 : table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
158 :
159 : table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
160 :
161 : table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
162 :
163 : table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
164 :
165 : table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
166 :
167 : table_.Register(
168 : kVisitOddball,
169 : &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
170 :
171 : table_.Register(kVisitMap, &VisitMap);
172 :
173 : table_.Register(kVisitCode, &VisitCode);
174 :
175 : table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
176 :
177 : table_.Register(kVisitJSFunction, &VisitJSFunction);
178 :
179 : table_.Register(
180 : kVisitJSArrayBuffer,
181 : &FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
182 : void>::Visit);
183 :
184 : table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
185 :
186 : table_.Register(
187 : kVisitCell,
188 : &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
189 :
190 : table_.Register(kVisitPropertyCell,
191 : &FixedBodyVisitor<StaticVisitor, PropertyCell::BodyDescriptor,
192 : void>::Visit);
193 :
194 : table_.Register(kVisitWeakCell, &VisitWeakCell);
195 :
196 : table_.Register(kVisitTransitionArray, &VisitTransitionArray);
197 :
198 : table_.Register(kVisitDataObject, &DataObjectVisitor::Visit);
199 :
200 : table_.Register(kVisitJSObjectFast, &JSObjectFastVisitor::Visit);
201 : table_.Register(kVisitJSObject, &JSObjectVisitor::Visit);
202 :
203 : table_.Register(kVisitJSApiObject, &JSApiObjectVisitor::Visit);
204 :
205 : table_.Register(kVisitStruct, &StructObjectVisitor::Visit);
206 116036 : }
207 :
208 :
209 : template <typename StaticVisitor>
210 : void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
211 61686674 : Heap* heap, HeapObject* object, Address entry_address) {
212 61686674 : Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
213 61686674 : heap->mark_compact_collector()->RecordCodeEntrySlot(object, entry_address,
214 61686674 : code);
215 : StaticVisitor::MarkObject(heap, code);
216 : }
217 :
218 :
219 : template <typename StaticVisitor>
220 : void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
221 92453562 : Heap* heap, RelocInfo* rinfo) {
222 : DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
223 : HeapObject* object = HeapObject::cast(rinfo->target_object());
224 30817854 : Code* host = rinfo->host();
225 30817855 : heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, object);
226 : // TODO(ulan): It could be better to record slots only for strongly embedded
227 : // objects here and record slots for weakly embedded object during clearing
228 : // of non-live references in mark-compact.
229 30817856 : if (!host->IsWeakObject(object)) {
230 : StaticVisitor::MarkObject(heap, object);
231 : }
232 : }
233 :
234 :
235 : template <typename StaticVisitor>
236 61550 : void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
237 123100 : RelocInfo* rinfo) {
238 : DCHECK(rinfo->rmode() == RelocInfo::CELL);
239 : Cell* cell = rinfo->target_cell();
240 61550 : Code* host = rinfo->host();
241 61550 : heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, cell);
242 61550 : if (!host->IsWeakObject(cell)) {
243 : StaticVisitor::MarkObject(heap, cell);
244 : }
245 : }
246 :
247 :
248 : template <typename StaticVisitor>
249 12027 : void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
250 24054 : RelocInfo* rinfo) {
251 : DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
252 : rinfo->IsPatchedDebugBreakSlotSequence());
253 12027 : Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
254 12027 : Code* host = rinfo->host();
255 12027 : heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
256 : StaticVisitor::MarkObject(heap, target);
257 : }
258 :
259 :
260 : template <typename StaticVisitor>
261 314349298 : void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
262 628698655 : RelocInfo* rinfo) {
263 : DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
264 314349368 : Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
265 314349407 : Code* host = rinfo->host();
266 314349298 : heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
267 : StaticVisitor::MarkObject(heap, target);
268 : }
269 :
270 : template <typename StaticVisitor>
271 : void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
272 0 : Heap* heap, RelocInfo* rinfo) {
273 : DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
274 : Code* target = rinfo->code_age_stub();
275 : DCHECK(target != NULL);
276 0 : Code* host = rinfo->host();
277 0 : heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
278 : StaticVisitor::MarkObject(heap, target);
279 : }
280 :
281 : template <typename StaticVisitor>
282 1944437 : void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
283 : Map* map, HeapObject* object) {
284 : FixedBodyVisitor<StaticVisitor, BytecodeArray::MarkingBodyDescriptor,
285 : void>::Visit(map, object);
286 1944437 : BytecodeArray::cast(object)->MakeOlder();
287 1944437 : }
288 :
289 : template <typename StaticVisitor>
290 63436 : void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
291 : Map* map, HeapObject* object) {
292 : FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
293 : void>::Visit(map, object);
294 63436 : }
295 :
296 :
297 : template <typename StaticVisitor>
298 44622604 : void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
299 : HeapObject* object) {
300 44622604 : Heap* heap = map->GetHeap();
301 : Map* map_object = Map::cast(object);
302 :
303 : // Clears the cache of ICs related to this map.
304 44622604 : if (FLAG_cleanup_code_caches_at_gc) {
305 44592622 : map_object->ClearCodeCache(heap);
306 : }
307 :
308 : // When map collection is enabled we have to mark through map's transitions
309 : // and back pointers in a special way to make these links weak.
310 44622603 : if (map_object->CanTransition()) {
311 38983542 : MarkMapContents(heap, map_object);
312 : } else {
313 : StaticVisitor::VisitPointers(
314 : heap, object,
315 : HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
316 5639063 : HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
317 : }
318 44622600 : }
319 :
320 : template <typename StaticVisitor>
321 90423574 : void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
322 : HeapObject* object) {
323 144657398 : Heap* heap = map->GetHeap();
324 : WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
325 : // Enqueue weak cell in linked list of encountered weak collections.
326 : // We can ignore weak cells with cleared values because they will always
327 : // contain smi zero.
328 90423574 : if (weak_cell->next_cleared() && !weak_cell->cleared()) {
329 85089594 : HeapObject* value = HeapObject::cast(weak_cell->value());
330 170179241 : if (ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) {
331 : // Weak cells with live values are directly processed here to reduce
332 : // the processing time of weak cells during the main GC pause.
333 30855823 : Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
334 : map->GetHeap()->mark_compact_collector()->RecordSlot(weak_cell, slot,
335 30855823 : *slot);
336 : } else {
337 : // If we do not know about liveness of values of weak cells, we have to
338 : // process them when we know the liveness of the whole transitive
339 : // closure.
340 54233824 : weak_cell->set_next(heap->encountered_weak_cells(),
341 54233824 : UPDATE_WEAK_WRITE_BARRIER);
342 54233830 : heap->set_encountered_weak_cells(weak_cell);
343 : }
344 : }
345 90423636 : }
346 :
347 :
348 : template <typename StaticVisitor>
349 878783 : void StaticMarkingVisitor<StaticVisitor>::VisitTransitionArray(
350 : Map* map, HeapObject* object) {
351 878783 : TransitionArray* array = TransitionArray::cast(object);
352 878783 : Heap* heap = array->GetHeap();
353 : // Visit strong references.
354 878783 : if (array->HasPrototypeTransitions()) {
355 : StaticVisitor::VisitPointer(heap, array,
356 225771 : array->GetPrototypeTransitionsSlot());
357 : }
358 878783 : int num_transitions = TransitionArray::NumberOfTransitions(array);
359 2402649 : for (int i = 0; i < num_transitions; ++i) {
360 1523866 : StaticVisitor::VisitPointer(heap, array, array->GetKeySlot(i));
361 : }
362 : // Enqueue the array in linked list of encountered transition arrays if it is
363 : // not already in the list.
364 1757566 : if (array->next_link()->IsUndefined(heap->isolate())) {
365 878783 : Heap* heap = map->GetHeap();
366 878783 : array->set_next_link(heap->encountered_transition_arrays(),
367 878783 : UPDATE_WEAK_WRITE_BARRIER);
368 878783 : heap->set_encountered_transition_arrays(array);
369 : }
370 878783 : }
371 :
372 : template <typename StaticVisitor>
373 2215 : void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
374 : Map* map, HeapObject* object) {
375 : typedef FlexibleBodyVisitor<StaticVisitor,
376 : JSWeakCollection::BodyDescriptorWeak,
377 : void> JSWeakCollectionBodyVisitor;
378 6645 : Heap* heap = map->GetHeap();
379 : JSWeakCollection* weak_collection =
380 : reinterpret_cast<JSWeakCollection*>(object);
381 :
382 : // Enqueue weak collection in linked list of encountered weak collections.
383 4430 : if (weak_collection->next() == heap->undefined_value()) {
384 2215 : weak_collection->set_next(heap->encountered_weak_collections());
385 2215 : heap->set_encountered_weak_collections(weak_collection);
386 : }
387 :
388 : // Skip visiting the backing hash table containing the mappings and the
389 : // pointer to the other enqueued weak collections, both are post-processed.
390 : JSWeakCollectionBodyVisitor::Visit(map, object);
391 :
392 : // Partially initialized weak collection is enqueued, but table is ignored.
393 6645 : if (!weak_collection->table()->IsHashTable()) return;
394 :
395 : // Mark the backing hash table without pushing it on the marking stack.
396 2215 : Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
397 2215 : HeapObject* obj = HeapObject::cast(*slot);
398 : heap->mark_compact_collector()->RecordSlot(object, slot, obj);
399 : StaticVisitor::MarkObjectWithoutPush(heap, obj);
400 : }
401 :
402 :
403 : template <typename StaticVisitor>
404 75043280 : void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
405 : HeapObject* object) {
406 : typedef FlexibleBodyVisitor<StaticVisitor, Code::BodyDescriptor, void>
407 : CodeBodyVisitor;
408 75043280 : Heap* heap = map->GetHeap();
409 : Code* code = Code::cast(object);
410 75043283 : if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
411 74612369 : code->MakeOlder();
412 : }
413 : CodeBodyVisitor::Visit(map, object);
414 75043393 : }
415 :
416 :
417 : template <typename StaticVisitor>
418 56411326 : void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
419 : Map* map, HeapObject* object) {
420 169233997 : Heap* heap = map->GetHeap();
421 : SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
422 112822652 : if (shared->ic_age() != heap->global_ic_age()) {
423 4455229 : shared->ResetForNewContext(heap->global_ic_age());
424 : }
425 56421238 : MarkCompactCollector* collector = heap->mark_compact_collector();
426 56411350 : if (collector->is_code_flushing_enabled()) {
427 56411320 : if (IsFlushable(heap, shared)) {
428 : // This function's code looks flushable. But we have to postpone
429 : // the decision until we see all functions that point to the same
430 : // SharedFunctionInfo because some of them might be optimized.
431 : // That would also make the non-optimized version of the code
432 : // non-flushable, because it is required for bailing out from
433 : // optimized code.
434 9893 : collector->code_flusher()->AddCandidate(shared);
435 : // Treat the reference to the code object weakly.
436 9893 : VisitSharedFunctionInfoWeakCode(map, object);
437 56421238 : return;
438 : }
439 : }
440 56401427 : VisitSharedFunctionInfoStrongCode(map, object);
441 : }
442 :
443 :
444 : template <typename StaticVisitor>
445 61690375 : void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
446 : HeapObject* object) {
447 61690375 : Heap* heap = map->GetHeap();
448 : JSFunction* function = JSFunction::cast(object);
449 61694076 : MarkCompactCollector* collector = heap->mark_compact_collector();
450 61690375 : if (collector->is_code_flushing_enabled()) {
451 61690375 : if (IsFlushable(heap, function)) {
452 : // This function's code looks flushable. But we have to postpone
453 : // the decision until we see all functions that point to the same
454 : // SharedFunctionInfo because some of them might be optimized.
455 : // That would also make the non-optimized version of the code
456 : // non-flushable, because it is required for bailing out from
457 : // optimized code.
458 3701 : collector->code_flusher()->AddCandidate(function);
459 : // Treat the reference to the code object weakly.
460 3701 : VisitJSFunctionWeakCode(map, object);
461 61694076 : return;
462 : } else {
463 : // Visit all unoptimized code objects to prevent flushing them.
464 61686674 : StaticVisitor::MarkObject(heap, function->shared()->code());
465 : }
466 : }
467 61686674 : VisitJSFunctionStrongCode(map, object);
468 : }
469 :
470 : template <typename StaticVisitor>
471 38983542 : void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
472 : Map* map) {
473 : // Since descriptor arrays are potentially shared, ensure that only the
474 : // descriptors that belong to this map are marked. The first time a non-empty
475 : // descriptor array is marked, its header is also visited. The slot holding
476 : // the descriptor array will be implicitly recorded when the pointer fields of
477 : // this map are visited. Prototype maps don't keep track of transitions, so
478 : // just mark the entire descriptor array.
479 38983542 : if (!map->is_prototype_map()) {
480 : DescriptorArray* descriptors = map->instance_descriptors();
481 38760707 : if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
482 : descriptors->length() > 0) {
483 : StaticVisitor::VisitPointers(heap, descriptors,
484 : descriptors->GetFirstElementAddress(),
485 7765674 : descriptors->GetDescriptorEndSlot(0));
486 : }
487 : int start = 0;
488 : int end = map->NumberOfOwnDescriptors();
489 30995033 : if (start < end) {
490 : StaticVisitor::VisitPointers(heap, descriptors,
491 : descriptors->GetDescriptorStartSlot(start),
492 22332733 : descriptors->GetDescriptorEndSlot(end));
493 : }
494 : }
495 :
496 : // Mark the pointer fields of the Map. Since the transitions array has
497 : // been marked already, it is fine that one of these fields contains a
498 : // pointer to it.
499 : StaticVisitor::VisitPointers(
500 : heap, map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
501 38983542 : HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
502 38983542 : }
503 :
504 :
505 744466 : inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
506 : Object* undefined = heap->undefined_value();
507 1188757 : return (info->script() != undefined) &&
508 744466 : (reinterpret_cast<Script*>(info->script())->source() != undefined);
509 : }
510 :
511 :
512 : template <typename StaticVisitor>
513 : bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
514 : JSFunction* function) {
515 61690375 : SharedFunctionInfo* shared_info = function->shared();
516 :
517 : // Code is either on stack, in compilation cache or referenced
518 : // by optimized version of function.
519 123380750 : if (ObjectMarking::IsBlackOrGrey(function->code(),
520 123380750 : MarkingState::Internal(function->code()))) {
521 : return false;
522 : }
523 :
524 : // We do not (yet) flush code for optimized functions.
525 14013548 : if (function->code() != shared_info->code()) {
526 : return false;
527 : }
528 :
529 : // Check age of optimized code.
530 13748834 : if (FLAG_age_code && !function->code()->IsOld()) {
531 : return false;
532 : }
533 :
534 : return IsFlushable(heap, shared_info);
535 : }
536 :
537 :
538 : template <typename StaticVisitor>
539 : bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
540 : Heap* heap, SharedFunctionInfo* shared_info) {
541 : // Code is either on stack, in compilation cache or referenced
542 : // by optimized version of function.
543 56451419 : if (ObjectMarking::IsBlackOrGrey(
544 112902867 : shared_info->code(), MarkingState::Internal(shared_info->code()))) {
545 : return false;
546 : }
547 :
548 : // The function must be compiled and have the source code available,
549 : // to be able to recompile it in case we need the function again.
550 763997 : if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
551 : return false;
552 : }
553 :
554 : // We never flush code for API functions.
555 444291 : if (shared_info->IsApiFunction()) {
556 : return false;
557 : }
558 :
559 : // Only flush code for functions.
560 444291 : if (shared_info->code()->kind() != Code::FUNCTION) {
561 : return false;
562 : }
563 :
564 : // Function must be lazy compilable.
565 438400 : if (!shared_info->allows_lazy_compilation()) {
566 : return false;
567 : }
568 :
569 : // We do not (yet?) flush code for generator functions, or async functions,
570 : // because we don't know if there are still live activations
571 : // (generator objects) on the heap.
572 433949 : if (IsResumableFunction(shared_info->kind())) {
573 : return false;
574 : }
575 :
576 : // If this is a full script wrapped in a function we do not flush the code.
577 433949 : if (shared_info->is_toplevel()) {
578 : return false;
579 : }
580 :
581 : // The function must be user code.
582 337338 : if (!shared_info->IsUserJavaScript()) {
583 : return false;
584 : }
585 :
586 : // Maintain debug break slots in the code.
587 140756 : if (shared_info->HasDebugCode()) {
588 : return false;
589 : }
590 :
591 : // If this is a function initialized with %SetCode then the one-to-one
592 : // relation between SharedFunctionInfo and Code is broken.
593 133499 : if (shared_info->dont_flush()) {
594 : return false;
595 : }
596 :
597 : // Check age of code. If code aging is disabled we never flush.
598 133499 : if (!FLAG_age_code || !shared_info->code()->IsOld()) {
599 : return false;
600 : }
601 :
602 : return true;
603 : }
604 :
605 : template <typename StaticVisitor>
606 56401429 : void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
607 : Map* map, HeapObject* object) {
608 : FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
609 : void>::Visit(map, object);
610 56401452 : }
611 :
612 : template <typename StaticVisitor>
613 9893 : void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
614 : Map* map, HeapObject* object) {
615 : // Skip visiting kCodeOffset as it is treated weakly here.
616 : STATIC_ASSERT(SharedFunctionInfo::kCodeOffset <
617 : SharedFunctionInfo::BodyDescriptorWeakCode::kStartOffset);
618 : FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptorWeakCode,
619 : void>::Visit(map, object);
620 9893 : }
621 :
622 : template <typename StaticVisitor>
623 61686674 : void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
624 : Map* map, HeapObject* object) {
625 : typedef FlexibleBodyVisitor<StaticVisitor,
626 : JSFunction::BodyDescriptorStrongCode,
627 : void> JSFunctionStrongCodeBodyVisitor;
628 : JSFunctionStrongCodeBodyVisitor::Visit(map, object);
629 61686674 : }
630 :
631 :
632 : template <typename StaticVisitor>
633 3701 : void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
634 : Map* map, HeapObject* object) {
635 : typedef FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
636 : void> JSFunctionWeakCodeBodyVisitor;
637 : JSFunctionWeakCodeBodyVisitor::Visit(map, object);
638 3701 : }
639 :
640 :
641 : } // namespace internal
642 : } // namespace v8
643 :
644 : #endif // V8_OBJECTS_VISITING_INL_H_
|