LCOV - code coverage report
Current view: top level - src/snapshot - startup-serializer.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 82 84 97.6 %
Date: 2017-04-26 Functions: 8 10 80.0 %

          Line data    Source code
       1             : // Copyright 2016 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/snapshot/startup-serializer.h"
       6             : 
       7             : #include "src/objects-inl.h"
       8             : #include "src/v8threads.h"
       9             : 
      10             : namespace v8 {
      11             : namespace internal {
      12             : 
      13         145 : StartupSerializer::StartupSerializer(
      14             :     Isolate* isolate,
      15             :     v8::SnapshotCreator::FunctionCodeHandling function_code_handling)
      16             :     : Serializer(isolate),
      17         145 :       clear_function_code_(function_code_handling ==
      18             :                            v8::SnapshotCreator::FunctionCodeHandling::kClear),
      19         290 :       serializing_builtins_(false) {
      20         145 :   InitializeCodeAddressMap();
      21         145 : }
      22             : 
      23         290 : StartupSerializer::~StartupSerializer() {
      24         145 :   RestoreExternalReferenceRedirectors(&accessor_infos_);
      25         145 :   OutputStatistics("StartupSerializer");
      26         145 : }
      27             : 
      28     5733196 : void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
      29             :                                         WhereToPoint where_to_point, int skip) {
      30             :   DCHECK(!obj->IsJSFunction());
      31             : 
      32     5733196 :   if (clear_function_code_) {
      33     5296500 :     if (obj->IsCode()) {
      34             :       Code* code = Code::cast(obj);
      35             :       // If the function code is compiled (either as native code or bytecode),
      36             :       // replace it with lazy-compile builtin. Only exception is when we are
      37             :       // serializing the canonical interpreter-entry-trampoline builtin.
      38     3790458 :       if (code->kind() == Code::FUNCTION ||
      39     2199900 :           (!serializing_builtins_ &&
      40      936750 :            code->is_interpreter_trampoline_builtin())) {
      41        3427 :         obj = isolate()->builtins()->builtin(Builtins::kCompileLazy);
      42             :       }
      43     4032846 :     } else if (obj->IsBytecodeArray()) {
      44        1451 :       obj = isolate()->heap()->undefined_value();
      45             :     }
      46      436696 :   } else if (obj->IsCode()) {
      47             :     Code* code = Code::cast(obj);
      48      109636 :     if (code->kind() == Code::FUNCTION) {
      49           2 :       code->ClearInlineCaches();
      50             :       code->set_profiler_ticks(0);
      51             :     }
      52             :   }
      53             : 
      54    10500073 :   if (SerializeHotObject(obj, how_to_code, where_to_point, skip)) return;
      55             : 
      56     5048246 :   int root_index = root_index_map_.Lookup(obj);
      57             :   // We can only encode roots as such if it has already been serialized.
      58             :   // That applies to root indices below the wave front.
      59     5048246 :   if (root_index != RootIndexMap::kInvalidRootIndex) {
      60     2873703 :     if (root_has_been_serialized_.test(root_index)) {
      61     2807895 :       PutRoot(root_index, obj, how_to_code, where_to_point, skip);
      62     2807895 :       return;
      63             :     }
      64             :   }
      65             : 
      66     2240351 :   if (SerializeBackReference(obj, how_to_code, where_to_point, skip)) return;
      67             : 
      68      966319 :   FlushSkip(skip);
      69             : 
      70      966319 :   if (isolate_->external_reference_redirector() && obj->IsAccessorInfo()) {
      71             :     // Wipe external reference redirects in the accessor info.
      72             :     AccessorInfo* info = AccessorInfo::cast(obj);
      73             :     Address original_address = Foreign::cast(info->getter())->foreign_address();
      74             :     Foreign::cast(info->js_getter())->set_foreign_address(original_address);
      75           0 :     accessor_infos_.Add(info);
      76             :   }
      77             : 
      78             :   // Object has not yet been serialized.  Serialize it here.
      79             :   ObjectSerializer object_serializer(this, obj, &sink_, how_to_code,
      80      966319 :                                      where_to_point);
      81      966319 :   object_serializer.Serialize();
      82             : 
      83      966319 :   if (serializing_immortal_immovables_roots_ &&
      84             :       root_index != RootIndexMap::kInvalidRootIndex) {
      85             :     // Make sure that the immortal immovable root has been included in the first
      86             :     // chunk of its reserved space , so that it is deserialized onto the first
      87             :     // page of its space and stays immortal immovable.
      88       49480 :     SerializerReference ref = reference_map_.Lookup(obj);
      89       98960 :     CHECK(ref.is_back_reference() && ref.chunk_index() == 0);
      90             :   }
      91             : }
      92             : 
      93         145 : void StartupSerializer::SerializeWeakReferencesAndDeferred() {
      94             :   // This comes right after serialization of the partial snapshot, where we
      95             :   // add entries to the partial snapshot cache of the startup snapshot. Add
      96             :   // one entry with 'undefined' to terminate the partial snapshot cache.
      97         290 :   Object* undefined = isolate()->heap()->undefined_value();
      98         145 :   VisitRootPointer(Root::kPartialSnapshotCache, &undefined);
      99         145 :   isolate()->heap()->IterateWeakRoots(this, VISIT_ALL);
     100         145 :   SerializeDeferredObjects();
     101         145 :   Pad();
     102         145 : }
     103             : 
     104      437326 : int StartupSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) {
     105             :   int index;
     106      437326 :   if (!partial_cache_index_map_.LookupOrInsert(heap_object, &index)) {
     107             :     // This object is not part of the partial snapshot cache yet. Add it to the
     108             :     // startup snapshot so we can refer to it via partial snapshot index from
     109             :     // the partial snapshot.
     110             :     VisitRootPointer(Root::kPartialSnapshotCache,
     111      236623 :                      reinterpret_cast<Object**>(&heap_object));
     112             :   }
     113      437326 :   return index;
     114             : }
     115             : 
     116        2610 : void StartupSerializer::Synchronize(VisitorSynchronization::SyncTag tag) {
     117             :   // We expect the builtins tag after builtins have been serialized.
     118             :   DCHECK(!serializing_builtins_ || tag == VisitorSynchronization::kBuiltins);
     119        2610 :   serializing_builtins_ = (tag == VisitorSynchronization::kHandleScope);
     120             :   sink_.Put(kSynchronize, "Synchronize");
     121        2610 : }
     122             : 
     123         145 : void StartupSerializer::SerializeStrongReferences() {
     124         580 :   Isolate* isolate = this->isolate();
     125             :   // No active threads.
     126         145 :   CHECK_NULL(isolate->thread_manager()->FirstThreadStateInUse());
     127             :   // No active or weak handles.
     128         145 :   CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
     129         145 :   CHECK_EQ(0, isolate->global_handles()->global_handles_count());
     130         145 :   CHECK_EQ(0, isolate->eternal_handles()->NumberOfHandles());
     131             :   // First visit immortal immovables to make sure they end up in the first page.
     132         145 :   serializing_immortal_immovables_roots_ = true;
     133         145 :   isolate->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG_ROOT_LIST);
     134             :   // Check that immortal immovable roots are allocated on the first page.
     135         145 :   CHECK(HasNotExceededFirstPageOfEachSpace());
     136         145 :   serializing_immortal_immovables_roots_ = false;
     137             :   // Visit the rest of the strong roots.
     138             :   // Clear the stack limits to make the snapshot reproducible.
     139             :   // Reset it again afterwards.
     140         145 :   isolate->heap()->ClearStackLimits();
     141         145 :   isolate->heap()->IterateSmiRoots(this);
     142         145 :   isolate->heap()->SetStackLimits();
     143             : 
     144             :   isolate->heap()->IterateStrongRoots(this,
     145         145 :                                       VISIT_ONLY_STRONG_FOR_SERIALIZATION);
     146         145 : }
     147             : 
     148      350702 : void StartupSerializer::VisitRootPointers(Root root, Object** start,
     149             :                                           Object** end) {
     150      350702 :   if (start == isolate()->heap()->roots_array_start()) {
     151             :     // Serializing the root list needs special handling:
     152             :     // - The first pass over the root list only serializes immortal immovables.
     153             :     // - The second pass over the root list serializes the rest.
     154             :     // - Only root list elements that have been fully serialized can be
     155             :     //   referenced via as root by using kRootArray bytecodes.
     156             :     int skip = 0;
     157      118320 :     for (Object** current = start; current < end; current++) {
     158      118320 :       int root_index = static_cast<int>(current - start);
     159      118320 :       if (RootShouldBeSkipped(root_index)) {
     160       59160 :         skip += kPointerSize;
     161       59160 :         continue;
     162             :       } else {
     163      118320 :         if ((*current)->IsSmi()) {
     164         580 :           FlushSkip(skip);
     165         580 :           PutSmi(Smi::cast(*current));
     166             :         } else {
     167             :           SerializeObject(HeapObject::cast(*current), kPlain, kStartOfObject,
     168       58580 :                           skip);
     169             :         }
     170       59160 :         root_has_been_serialized_.set(root_index);
     171             :         skip = 0;
     172             :       }
     173             :     }
     174         290 :     FlushSkip(skip);
     175             :   } else {
     176      350412 :     Serializer::VisitRootPointers(root, start, end);
     177             :   }
     178      350702 : }
     179             : 
     180           0 : bool StartupSerializer::RootShouldBeSkipped(int root_index) {
     181      118320 :   if (root_index == Heap::kStackLimitRootIndex ||
     182             :       root_index == Heap::kRealStackLimitRootIndex) {
     183             :     return true;
     184             :   }
     185      118320 :   return Heap::RootIsImmortalImmovable(root_index) !=
     186      118320 :          serializing_immortal_immovables_roots_;
     187             : }
     188             : 
     189             : }  // namespace internal
     190             : }  // namespace v8

Generated by: LCOV version 1.10