LCOV - code coverage report
Current view: top level - src/heap - objects-visiting.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 33 33 100.0 %
Date: 2017-10-20 Functions: 6 6 100.0 %

          Line data    Source code
       1             : // Copyright 2011 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/heap/objects-visiting.h"
       6             : 
       7             : #include "src/heap/heap-inl.h"
       8             : #include "src/heap/mark-compact-inl.h"
       9             : #include "src/heap/objects-visiting-inl.h"
      10             : 
      11             : namespace v8 {
      12             : namespace internal {
      13             : 
      14             : // We don't record weak slots during marking or scavenges. Instead we do it
      15             : // once when we complete mark-compact cycle.  Note that write barrier has no
      16             : // effect if we are already in the middle of compacting mark-sweep cycle and we
      17             : // have to record slots manually.
      18      778780 : static bool MustRecordSlots(Heap* heap) {
      19      778780 :   return heap->gc_state() == Heap::MARK_COMPACT &&
      20      374564 :          heap->mark_compact_collector()->is_compacting();
      21             : }
      22             : 
      23             : 
      24             : template <class T>
      25             : struct WeakListVisitor;
      26             : 
      27             : 
      28             : template <class T>
      29      273734 : Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
      30             :   Object* undefined = heap->undefined_value();
      31             :   Object* head = undefined;
      32             :   T* tail = nullptr;
      33             :   bool record_slots = MustRecordSlots(heap);
      34             : 
      35     1525382 :   while (list != undefined) {
      36             :     // Check whether to keep the candidate in the list.
      37             :     T* candidate = reinterpret_cast<T*>(list);
      38             : 
      39      977914 :     Object* retained = retainer->RetainAs(list);
      40      977914 :     if (retained != nullptr) {
      41      777628 :       if (head == undefined) {
      42             :         // First element in the list.
      43             :         head = retained;
      44             :       } else {
      45             :         // Subsequent elements in the list.
      46             :         DCHECK_NOT_NULL(tail);
      47             :         WeakListVisitor<T>::SetWeakNext(tail, retained);
      48      661948 :         if (record_slots) {
      49             :           Object** next_slot =
      50       20273 :               HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
      51             :           MarkCompactCollector::RecordSlot(tail, next_slot, retained);
      52             :         }
      53             :       }
      54             :       // Retained object is new tail.
      55             :       DCHECK(!retained->IsUndefined(heap->isolate()));
      56             :       candidate = reinterpret_cast<T*>(retained);
      57             :       tail = candidate;
      58             : 
      59             :       // tail is a live object, visit it.
      60      134544 :       WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
      61             : 
      62             :     } else {
      63        8851 :       WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
      64             :     }
      65             : 
      66             :     // Move to next element in the list.
      67             :     list = WeakListVisitor<T>::WeakNext(candidate);
      68             :   }
      69             : 
      70             :   // Terminate the list if there is one or more elements.
      71      273734 :   if (tail != nullptr) WeakListVisitor<T>::SetWeakNext(tail, undefined);
      72      273734 :   return head;
      73             : }
      74             : 
      75             : 
      76             : template <class T>
      77       17702 : static void ClearWeakList(Heap* heap, Object* list) {
      78             :   Object* undefined = heap->undefined_value();
      79       45894 :   while (list != undefined) {
      80             :     T* candidate = reinterpret_cast<T*>(list);
      81             :     list = WeakListVisitor<T>::WeakNext(candidate);
      82             :     WeakListVisitor<T>::SetWeakNext(candidate, undefined);
      83             :   }
      84             : }
      85             : 
      86             : template <>
      87             : struct WeakListVisitor<Code> {
      88             :   static void SetWeakNext(Code* code, Object* next) {
      89      136870 :     code->set_next_code_link(next, UPDATE_WEAK_WRITE_BARRIER);
      90             :   }
      91             : 
      92             :   static Object* WeakNext(Code* code) { return code->next_code_link(); }
      93             : 
      94             :   static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
      95             : 
      96             :   static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
      97             : 
      98             :   static void VisitPhantomObject(Heap*, Code*) {}
      99             : };
     100             : 
     101             : 
     102             : template <>
     103             : struct WeakListVisitor<Context> {
     104             :   static void SetWeakNext(Context* context, Object* next) {
     105      134544 :     context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
     106             :   }
     107             : 
     108             :   static Object* WeakNext(Context* context) {
     109             :     return context->next_context_link();
     110             :   }
     111             : 
     112             :   static int WeakNextOffset() {
     113             :     return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
     114             :   }
     115             : 
     116      134544 :   static void VisitLiveObject(Heap* heap, Context* context,
     117             :                               WeakObjectRetainer* retainer) {
     118      134544 :     if (heap->gc_state() == Heap::MARK_COMPACT) {
     119             :       // Record the slots of the weak entries in the native context.
     120      195723 :       for (int idx = Context::FIRST_WEAK_SLOT;
     121             :            idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) {
     122             :         Object** slot = Context::cast(context)->RawFieldOfElementAt(idx);
     123      195723 :         MarkCompactCollector::RecordSlot(context, slot, *slot);
     124             :       }
     125             :       // Code objects are always allocated in Code space, we do not have to
     126             :       // visit them during scavenges.
     127       65241 :       DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
     128       65241 :       DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
     129             :     }
     130      134544 :   }
     131             : 
     132             :   template <class T>
     133      130482 :   static void DoWeakList(Heap* heap, Context* context,
     134             :                          WeakObjectRetainer* retainer, int index) {
     135             :     // Visit the weak list, removing dead intermediate elements.
     136      130482 :     Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
     137             : 
     138             :     // Update the list head.
     139      130482 :     context->set(index, list_head, UPDATE_WRITE_BARRIER);
     140             : 
     141      130482 :     if (MustRecordSlots(heap)) {
     142             :       // Record the updated slot if necessary.
     143             :       Object** head_slot =
     144             :           HeapObject::RawField(context, FixedArray::SizeFor(index));
     145             :       heap->mark_compact_collector()->RecordSlot(context, head_slot, list_head);
     146             :     }
     147      130482 :   }
     148             : 
     149        8851 :   static void VisitPhantomObject(Heap* heap, Context* context) {
     150             :     ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
     151             :     ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
     152        8851 :   }
     153             : };
     154             : 
     155             : 
     156             : template <>
     157             : struct WeakListVisitor<AllocationSite> {
     158             :   static void SetWeakNext(AllocationSite* obj, Object* next) {
     159      534406 :     obj->set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
     160             :   }
     161             : 
     162             :   static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
     163             : 
     164             :   static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
     165             : 
     166             :   static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
     167             : 
     168             :   static void VisitPhantomObject(Heap*, AllocationSite*) {}
     169             : };
     170             : 
     171             : 
     172             : template Object* VisitWeakList<Context>(Heap* heap, Object* list,
     173             :                                         WeakObjectRetainer* retainer);
     174             : 
     175             : template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
     176             :                                                WeakObjectRetainer* retainer);
     177             : }  // namespace internal
     178             : }  // namespace v8

Generated by: LCOV version 1.10