LCOV - code coverage report
Current view: top level - src/snapshot - deserializer.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 257 288 89.2 %
Date: 2017-04-26 Functions: 24 28 85.7 %

          Line data    Source code
       1             : // Copyright 2016 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/snapshot/deserializer.h"
       6             : 
       7             : #include "src/api.h"
       8             : #include "src/assembler-inl.h"
       9             : #include "src/bootstrapper.h"
      10             : #include "src/deoptimizer.h"
      11             : #include "src/external-reference-table.h"
      12             : #include "src/heap/heap-inl.h"
      13             : #include "src/isolate.h"
      14             : #include "src/macro-assembler.h"
      15             : #include "src/objects-inl.h"
      16             : #include "src/snapshot/natives.h"
      17             : #include "src/v8.h"
      18             : #include "src/v8threads.h"
      19             : 
      20             : namespace v8 {
      21             : namespace internal {
      22             : 
      23      168002 : void Deserializer::DecodeReservation(
      24             :     Vector<const SerializedData::Reservation> res) {
      25             :   DCHECK_EQ(0, reservations_[NEW_SPACE].length());
      26             :   STATIC_ASSERT(NEW_SPACE == 0);
      27             :   int current_space = NEW_SPACE;
      28     1297570 :   for (auto& r : res) {
      29      961566 :     reservations_[current_space].Add({r.chunk_size(), NULL, NULL});
      30      961566 :     if (r.is_last()) current_space++;
      31             :   }
      32             :   DCHECK_EQ(kNumberOfSpaces, current_space);
      33      504006 :   for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) current_chunk_[i] = 0;
      34      168002 : }
      35             : 
      36       60739 : void Deserializer::FlushICacheForNewIsolate() {
      37             :   DCHECK(!deserializing_user_code_);
      38             :   // The entire isolate is newly deserialized. Simply flush all code pages.
      39      485912 :   for (Page* p : *isolate_->heap()->code_space()) {
      40             :     Assembler::FlushICache(isolate_, p->area_start(),
      41      182217 :                            p->area_end() - p->area_start());
      42             :   }
      43       60739 : }
      44             : 
      45         385 : void Deserializer::FlushICacheForNewCodeObjectsAndRecordEmbeddedObjects() {
      46             :   DCHECK(deserializing_user_code_);
      47        1267 :   for (Code* code : new_code_objects_) {
      48             :     // Record all references to embedded objects in the new code object.
      49         497 :     isolate_->heap()->RecordWritesIntoCode(code);
      50             : 
      51         497 :     if (FLAG_serialize_age_code) code->PreAge(isolate_);
      52         497 :     Assembler::FlushICache(isolate_, code->instruction_start(),
      53         994 :                            code->instruction_size());
      54             :   }
      55         385 : }
      56             : 
      57      168002 : bool Deserializer::ReserveSpace() {
      58             : #ifdef DEBUG
      59             :   for (int i = NEW_SPACE; i < kNumberOfSpaces; ++i) {
      60             :     CHECK(reservations_[i].length() > 0);
      61             :   }
      62             : #endif  // DEBUG
      63             :   DCHECK(allocated_maps_.is_empty());
      64      168002 :   if (!isolate_->heap()->ReserveSpace(reservations_, &allocated_maps_))
      65             :     return false;
      66      504006 :   for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) {
      67      504006 :     high_water_[i] = reservations_[i][0].start;
      68             :   }
      69             :   return true;
      70             : }
      71             : 
      72      168002 : void Deserializer::Initialize(Isolate* isolate) {
      73             :   DCHECK_NULL(isolate_);
      74             :   DCHECK_NOT_NULL(isolate);
      75      168002 :   isolate_ = isolate;
      76             :   DCHECK_NULL(external_reference_table_);
      77      168002 :   external_reference_table_ = ExternalReferenceTable::instance(isolate);
      78      168002 :   CHECK_EQ(magic_number_,
      79             :            SerializedData::ComputeMagicNumber(external_reference_table_));
      80             :   // The current isolate must have at least as many API-provided external
      81             :   // references as the to-be-deserialized snapshot expects and refers to.
      82      168002 :   CHECK_LE(num_extra_references_,
      83             :            SerializedData::GetExtraReferences(external_reference_table_));
      84      168002 : }
      85             : 
      86       60739 : void Deserializer::Deserialize(Isolate* isolate) {
      87       60739 :   Initialize(isolate);
      88       60739 :   if (!ReserveSpace()) V8::FatalProcessOutOfMemory("deserializing context");
      89             :   // No active threads.
      90             :   DCHECK_NULL(isolate_->thread_manager()->FirstThreadStateInUse());
      91             :   // No active handles.
      92             :   DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty());
      93             :   // Partial snapshot cache is not yet populated.
      94             :   DCHECK(isolate_->partial_snapshot_cache()->is_empty());
      95             :   // Builtins are not yet created.
      96             :   DCHECK(!isolate_->builtins()->is_initialized());
      97             : 
      98             :   {
      99             :     DisallowHeapAllocation no_gc;
     100      242956 :     isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG_ROOT_LIST);
     101       60739 :     isolate_->heap()->IterateSmiRoots(this);
     102       60739 :     isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
     103       60739 :     isolate_->heap()->RepairFreeListsAfterDeserialization();
     104       60739 :     isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
     105       60739 :     DeserializeDeferredObjects();
     106       60739 :     FlushICacheForNewIsolate();
     107       60739 :     RestoreExternalReferenceRedirectors(&accessor_infos_);
     108             :   }
     109             : 
     110             :   isolate_->heap()->set_native_contexts_list(
     111       60739 :       isolate_->heap()->undefined_value());
     112             :   // The allocation site list is build during root iteration, but if no sites
     113             :   // were encountered then it needs to be initialized to undefined.
     114       60739 :   if (isolate_->heap()->allocation_sites_list() == Smi::kZero) {
     115             :     isolate_->heap()->set_allocation_sites_list(
     116       60739 :         isolate_->heap()->undefined_value());
     117             :   }
     118             : 
     119             :   // If needed, print the dissassembly of deserialized code objects.
     120             :   PrintDisassembledCodeObjects();
     121             : 
     122             :   // Issue code events for newly deserialized code objects.
     123      121478 :   LOG_CODE_EVENT(isolate_, LogCodeObjects());
     124      121478 :   LOG_CODE_EVENT(isolate_, LogBytecodeHandlers());
     125      121478 :   LOG_CODE_EVENT(isolate_, LogCompiledFunctions());
     126             : 
     127       60739 :   isolate_->builtins()->MarkInitialized();
     128       60739 : }
     129             : 
     130      106878 : MaybeHandle<Object> Deserializer::DeserializePartial(
     131             :     Isolate* isolate, Handle<JSGlobalProxy> global_proxy,
     132             :     v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer) {
     133      106878 :   Initialize(isolate);
     134      106878 :   if (!ReserveSpace()) {
     135           0 :     V8::FatalProcessOutOfMemory("deserialize context");
     136             :     return MaybeHandle<Object>();
     137             :   }
     138             : 
     139             :   AddAttachedObject(global_proxy);
     140             : 
     141             :   DisallowHeapAllocation no_gc;
     142             :   // Keep track of the code space start and end pointers in case new
     143             :   // code objects were unserialized
     144      106878 :   OldSpace* code_space = isolate_->heap()->code_space();
     145             :   Address start_address = code_space->top();
     146             :   Object* root;
     147      106878 :   VisitRootPointer(Root::kPartialSnapshotCache, &root);
     148      106878 :   DeserializeDeferredObjects();
     149      106878 :   DeserializeEmbedderFields(embedder_fields_deserializer);
     150             : 
     151      106878 :   isolate->heap()->RegisterReservationsForBlackAllocation(reservations_);
     152             : 
     153             :   // There's no code deserialized here. If this assert fires then that's
     154             :   // changed and logging should be added to notify the profiler et al of the
     155             :   // new code, which also has to be flushed from instruction cache.
     156      106878 :   CHECK_EQ(start_address, code_space->top());
     157      106878 :   return Handle<Object>(root, isolate);
     158             : }
     159             : 
     160         385 : MaybeHandle<HeapObject> Deserializer::DeserializeObject(Isolate* isolate) {
     161         385 :   Initialize(isolate);
     162         385 :   if (!ReserveSpace()) {
     163             :     return MaybeHandle<HeapObject>();
     164             :   } else {
     165         385 :     deserializing_user_code_ = true;
     166             :     HandleScope scope(isolate);
     167             :     Handle<HeapObject> result;
     168             :     {
     169             :       DisallowHeapAllocation no_gc;
     170             :       Object* root;
     171         385 :       VisitRootPointer(Root::kPartialSnapshotCache, &root);
     172         385 :       DeserializeDeferredObjects();
     173         385 :       FlushICacheForNewCodeObjectsAndRecordEmbeddedObjects();
     174         770 :       result = Handle<HeapObject>(HeapObject::cast(root));
     175         385 :       isolate->heap()->RegisterReservationsForBlackAllocation(reservations_);
     176             :     }
     177         385 :     CommitPostProcessedObjects(isolate);
     178         385 :     return scope.CloseAndEscape(result);
     179             :   }
     180             : }
     181             : 
     182      336004 : Deserializer::~Deserializer() {
     183             : #ifdef DEBUG
     184             :   // Do not perform checks if we aborted deserialization.
     185             :   if (source_.position() == 0) return;
     186             :   // Check that we only have padding bytes remaining.
     187             :   while (source_.HasMore()) CHECK_EQ(kNop, source_.Get());
     188             :   for (int space = 0; space < kNumberOfPreallocatedSpaces; space++) {
     189             :     int chunk_index = current_chunk_[space];
     190             :     CHECK_EQ(reservations_[space].length(), chunk_index + 1);
     191             :     CHECK_EQ(reservations_[space][chunk_index].end, high_water_[space]);
     192             :   }
     193             :   CHECK_EQ(allocated_maps_.length(), next_map_index_);
     194             : #endif  // DEBUG
     195      168002 : }
     196             : 
     197             : // This is called on the roots.  It is the driver of the deserialization
     198             : // process.  It is also called on the body of each function.
     199   151716914 : void Deserializer::VisitRootPointers(Root root, Object** start, Object** end) {
     200             :   // The space must be new space.  Any other space would cause ReadChunk to try
     201             :   // to update the remembered using NULL as the address.
     202   151716914 :   ReadData(start, end, NEW_SPACE, NULL);
     203   151716943 : }
     204             : 
     205     1093302 : void Deserializer::Synchronize(VisitorSynchronization::SyncTag tag) {
     206             :   static const byte expected = kSynchronize;
     207     1093302 :   CHECK_EQ(expected, source_.Get());
     208     1093302 : }
     209             : 
     210      168002 : void Deserializer::DeserializeDeferredObjects() {
     211     7118062 :   for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) {
     212     6950060 :     switch (code) {
     213             :       case kAlignmentPrefix:
     214             :       case kAlignmentPrefix + 1:
     215             :       case kAlignmentPrefix + 2:
     216           0 :         SetAlignment(code);
     217             :         break;
     218             :       default: {
     219     6950060 :         int space = code & kSpaceMask;
     220             :         DCHECK(space <= kNumberOfSpaces);
     221             :         DCHECK(code - space == kNewObject);
     222     6950060 :         HeapObject* object = GetBackReferencedObject(space);
     223     6950060 :         int size = source_.GetInt() << kPointerSizeLog2;
     224     6950060 :         Address obj_address = object->address();
     225     6950060 :         Object** start = reinterpret_cast<Object**>(obj_address + kPointerSize);
     226     6950060 :         Object** end = reinterpret_cast<Object**>(obj_address + size);
     227     6950060 :         bool filled = ReadData(start, end, space, obj_address);
     228     6950062 :         CHECK(filled);
     229             :         DCHECK(CanBeDeferred(object));
     230     6950062 :         PostProcessNewObject(object, space);
     231             :       }
     232             :     }
     233             :   }
     234      168002 : }
     235             : 
     236      106878 : void Deserializer::DeserializeEmbedderFields(
     237             :     v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer) {
     238      320616 :   if (!source_.HasMore() || source_.Get() != kEmbedderFieldsData) return;
     239             :   DisallowHeapAllocation no_gc;
     240          18 :   DisallowJavascriptExecution no_js(isolate_);
     241             :   DisallowCompilation no_compile(isolate_);
     242             :   DCHECK_NOT_NULL(embedder_fields_deserializer.callback);
     243          36 :   for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) {
     244          18 :     HandleScope scope(isolate_);
     245          18 :     int space = code & kSpaceMask;
     246             :     DCHECK(space <= kNumberOfSpaces);
     247             :     DCHECK(code - space == kNewObject);
     248          18 :     Handle<JSObject> obj(JSObject::cast(GetBackReferencedObject(space)),
     249          18 :                          isolate_);
     250          18 :     int index = source_.GetInt();
     251          18 :     int size = source_.GetInt();
     252          18 :     byte* data = new byte[size];
     253          18 :     source_.CopyRaw(data, size);
     254             :     embedder_fields_deserializer.callback(v8::Utils::ToLocal(obj), index,
     255             :                                           {reinterpret_cast<char*>(data), size},
     256          18 :                                           embedder_fields_deserializer.data);
     257          18 :     delete[] data;
     258          18 :   }
     259             : }
     260             : 
     261           0 : void Deserializer::PrintDisassembledCodeObjects() {
     262             : #ifdef ENABLE_DISASSEMBLER
     263             :   if (FLAG_print_builtin_code) {
     264             :     Heap* heap = isolate_->heap();
     265             :     HeapIterator iterator(heap);
     266             :     DisallowHeapAllocation no_gc;
     267             : 
     268             :     CodeTracer::Scope tracing_scope(isolate_->GetCodeTracer());
     269             :     OFStream os(tracing_scope.file());
     270             : 
     271             :     for (HeapObject* obj = iterator.next(); obj != NULL;
     272             :          obj = iterator.next()) {
     273             :       if (obj->IsCode()) {
     274             :         Code::cast(obj)->Disassemble(nullptr, os);
     275             :       }
     276             :     }
     277             :   }
     278             : #endif
     279           0 : }
     280             : 
     281             : // Used to insert a deserialized internalized string into the string table.
     282        4704 : class StringTableInsertionKey : public HashTableKey {
     283             :  public:
     284       12164 :   explicit StringTableInsertionKey(String* string)
     285       24328 :       : string_(string), hash_(HashForObject(string)) {
     286             :     DCHECK(string->IsInternalizedString());
     287       12164 :   }
     288             : 
     289        6544 :   bool IsMatch(Object* string) override {
     290             :     // We know that all entries in a hash table had their hash keys created.
     291             :     // Use that knowledge to have fast failure.
     292        6544 :     if (hash_ != HashForObject(string)) return false;
     293             :     // We want to compare the content of two internalized strings here.
     294        2762 :     return string_->SlowEquals(String::cast(string));
     295             :   }
     296             : 
     297       16868 :   uint32_t Hash() override { return hash_; }
     298             : 
     299        6544 :   uint32_t HashForObject(Object* key) override {
     300        6544 :     return String::cast(key)->Hash();
     301             :   }
     302             : 
     303        4704 :   MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) override {
     304        9408 :     return handle(string_, isolate);
     305             :   }
     306             : 
     307             :  private:
     308             :   String* string_;
     309             :   uint32_t hash_;
     310             :   DisallowHeapAllocation no_gc;
     311             : };
     312             : 
     313   727460167 : HeapObject* Deserializer::PostProcessNewObject(HeapObject* obj, int space) {
     314   644492664 :   if (deserializing_user_code()) {
     315       38057 :     if (obj->IsString()) {
     316             :       String* string = String::cast(obj);
     317             :       // Uninitialize hash field as the hash seed may have changed.
     318             :       string->set_hash_field(String::kEmptyHashField);
     319        8243 :       if (string->IsInternalizedString()) {
     320             :         // Canonicalize the internalized string. If it already exists in the
     321             :         // string table, set it to forward to the existing one.
     322        7460 :         StringTableInsertionKey key(string);
     323     3467867 :         String* canonical = StringTable::LookupKeyIfExists(isolate_, &key);
     324        7460 :         if (canonical == NULL) {
     325        4704 :           new_internalized_strings_.Add(handle(string));
     326        4704 :           return string;
     327             :         } else {
     328             :           string->SetForwardedInternalizedString(canonical);
     329        2756 :           return canonical;
     330             :         }
     331             :       }
     332       29814 :     } else if (obj->IsScript()) {
     333         373 :       new_scripts_.Add(handle(Script::cast(obj)));
     334             :     } else {
     335             :       DCHECK(CanBeDeferred(obj));
     336             :     }
     337             :   }
     338   644486376 :   if (obj->IsAllocationSite()) {
     339             :     DCHECK(obj->IsAllocationSite());
     340             :     // Allocation sites are present in the snapshot, and must be linked into
     341             :     // a list at deserialization time.
     342             :     AllocationSite* site = AllocationSite::cast(obj);
     343             :     // TODO(mvstanton): consider treating the heap()->allocation_sites_list()
     344             :     // as a (weak) root. If this root is relocated correctly, this becomes
     345             :     // unnecessary.
     346      320598 :     if (isolate_->heap()->allocation_sites_list() == Smi::kZero) {
     347           0 :       site->set_weak_next(isolate_->heap()->undefined_value());
     348             :     } else {
     349      320598 :       site->set_weak_next(isolate_->heap()->allocation_sites_list());
     350             :     }
     351      320598 :     isolate_->heap()->set_allocation_sites_list(site);
     352   644176239 :   } else if (obj->IsCode()) {
     353             :     // We flush all code pages after deserializing the startup snapshot. In that
     354             :     // case, we only need to remember code objects in the large object space.
     355             :     // When deserializing user code, remember each individual code object.
     356    82967503 :     if (deserializing_user_code() || space == LO_SPACE) {
     357         497 :       new_code_objects_.Add(Code::cast(obj));
     358             :     }
     359   561204173 :   } else if (obj->IsAccessorInfo()) {
     360     6920814 :     if (isolate_->external_reference_redirector()) {
     361           0 :       accessor_infos_.Add(AccessorInfo::cast(obj));
     362             :     }
     363   557743384 :   } else if (obj->IsExternalOneByteString()) {
     364             :     DCHECK(obj->map() == isolate_->heap()->native_source_string_map());
     365             :     ExternalOneByteString* string = ExternalOneByteString::cast(obj);
     366             :     DCHECK(string->is_short());
     367             :     string->set_resource(
     368             :         NativesExternalStringResource::DecodeForDeserialization(
     369      971439 :             string->resource()));
     370      971440 :     isolate_->heap()->RegisterExternalString(string);
     371             :   }
     372             :   // Check alignment.
     373             :   DCHECK_EQ(0, Heap::GetFillToAlign(obj->address(), obj->RequiredAlignment()));
     374   644472539 :   return obj;
     375             : }
     376             : 
     377         385 : void Deserializer::CommitPostProcessedObjects(Isolate* isolate) {
     378             :   StringTable::EnsureCapacityForDeserialization(
     379         385 :       isolate, new_internalized_strings_.length());
     380        5474 :   for (Handle<String> string : new_internalized_strings_) {
     381        4704 :     StringTableInsertionKey key(*string);
     382             :     DCHECK_NULL(StringTable::LookupKeyIfExists(isolate, &key));
     383        4704 :     StringTable::LookupKey(isolate, &key);
     384             :   }
     385             : 
     386             :   Heap* heap = isolate->heap();
     387             :   Factory* factory = isolate->factory();
     388        1143 :   for (Handle<Script> script : new_scripts_) {
     389             :     // Assign a new script id to avoid collision.
     390         373 :     script->set_id(isolate_->heap()->NextScriptId());
     391             :     // Add script to list.
     392         373 :     Handle<Object> list = WeakFixedArray::Add(factory->script_list(), script);
     393             :     heap->SetRootScriptList(*list);
     394             :   }
     395         385 : }
     396             : 
     397  1517557412 : HeapObject* Deserializer::GetBackReferencedObject(int space) {
     398             :   HeapObject* obj;
     399             :   SerializerReference back_reference =
     400   758777998 :       SerializerReference::FromBitfield(source_.GetInt());
     401   758779414 :   if (space == LO_SPACE) {
     402             :     uint32_t index = back_reference.large_object_index();
     403          12 :     obj = deserialized_large_objects_[index];
     404   758779408 :   } else if (space == MAP_SPACE) {
     405    50018189 :     int index = back_reference.map_index();
     406             :     DCHECK(index < next_map_index_);
     407   100036378 :     obj = HeapObject::FromAddress(allocated_maps_[index]);
     408             :   } else {
     409             :     DCHECK(space < kNumberOfPreallocatedSpaces);
     410             :     uint32_t chunk_index = back_reference.chunk_index();
     411             :     DCHECK_LE(chunk_index, current_chunk_[space]);
     412             :     uint32_t chunk_offset = back_reference.chunk_offset();
     413  1417522438 :     Address address = reservations_[space][chunk_index].start + chunk_offset;
     414   708761219 :     if (next_alignment_ != kWordAligned) {
     415           0 :       int padding = Heap::GetFillToAlign(address, next_alignment_);
     416           0 :       next_alignment_ = kWordAligned;
     417             :       DCHECK(padding == 0 || HeapObject::FromAddress(address)->IsFiller());
     418           0 :       address += padding;
     419             :     }
     420   708761219 :     obj = HeapObject::FromAddress(address);
     421             :   }
     422   758795120 :   if (deserializing_user_code() && obj->IsInternalizedString()) {
     423             :     obj = String::cast(obj)->GetForwardedInternalizedString();
     424             :   }
     425             :   hot_objects_.Add(obj);
     426   758779414 :   return obj;
     427             : }
     428             : 
     429             : // This routine writes the new object into the pointer provided and then
     430             : // returns true if the new object was in young space and false otherwise.
     431             : // The reason for this strange interface is that otherwise the object is
     432             : // written very late, which means the FreeSpace map is not set up by the
     433             : // time we need to use it to mark the space at the end of a page free.
     434   644495992 : void Deserializer::ReadObject(int space_number, Object** write_back) {
     435             :   Address address;
     436             :   HeapObject* obj;
     437   644495992 :   int size = source_.GetInt() << kObjectAlignmentBits;
     438             : 
     439   644497928 :   if (next_alignment_ != kWordAligned) {
     440           0 :     int reserved = size + Heap::GetMaximumFillToAlign(next_alignment_);
     441           0 :     address = Allocate(space_number, reserved);
     442           0 :     obj = HeapObject::FromAddress(address);
     443             :     // If one of the following assertions fails, then we are deserializing an
     444             :     // aligned object when the filler maps have not been deserialized yet.
     445             :     // We require filler maps as padding to align the object.
     446           0 :     Heap* heap = isolate_->heap();
     447             :     DCHECK(heap->free_space_map()->IsMap());
     448             :     DCHECK(heap->one_pointer_filler_map()->IsMap());
     449             :     DCHECK(heap->two_pointer_filler_map()->IsMap());
     450           0 :     obj = heap->AlignWithFiller(obj, size, reserved, next_alignment_);
     451           0 :     address = obj->address();
     452           0 :     next_alignment_ = kWordAligned;
     453             :   } else {
     454   644497928 :     address = Allocate(space_number, size);
     455   644502821 :     obj = HeapObject::FromAddress(address);
     456             :   }
     457             : 
     458   644502821 :   isolate_->heap()->OnAllocationEvent(obj, size);
     459             :   Object** current = reinterpret_cast<Object**>(address);
     460   644504766 :   Object** limit = current + (size >> kPointerSizeLog2);
     461             : 
     462   644504766 :   if (ReadData(current, limit, space_number, address)) {
     463             :     // Only post process if object content has not been deferred.
     464   637537278 :     obj = PostProcessNewObject(obj, space_number);
     465             :   }
     466             : 
     467   644479815 :   Object* write_back_obj = obj;
     468             :   UnalignedCopy(write_back, &write_back_obj);
     469             : #ifdef DEBUG
     470             :   if (obj->IsCode()) {
     471             :     DCHECK(space_number == CODE_SPACE || space_number == LO_SPACE);
     472             :   } else {
     473             :     DCHECK(space_number != CODE_SPACE);
     474             :   }
     475             : #endif  // DEBUG
     476   644479815 : }
     477             : 
     478             : // We know the space requirements before deserialization and can
     479             : // pre-allocate that reserved space. During deserialization, all we need
     480             : // to do is to bump up the pointer for each space in the reserved
     481             : // space. This is also used for fixing back references.
     482             : // We may have to split up the pre-allocation into several chunks
     483             : // because it would not fit onto a single page. We do not have to keep
     484             : // track of when to move to the next chunk. An opcode will signal this.
     485             : // Since multiple large objects cannot be folded into one large object
     486             : // space allocation, we have to do an actual allocation when deserializing
     487             : // each large object. Instead of tracking offset for back references, we
     488             : // reference large objects by index.
     489   644501032 : Address Deserializer::Allocate(int space_index, int size) {
     490   644501032 :   if (space_index == LO_SPACE) {
     491          42 :     AlwaysAllocateScope scope(isolate_);
     492          42 :     LargeObjectSpace* lo_space = isolate_->heap()->lo_space();
     493          42 :     Executability exec = static_cast<Executability>(source_.Get());
     494          42 :     AllocationResult result = lo_space->AllocateRaw(size, exec);
     495          42 :     HeapObject* obj = result.ToObjectChecked();
     496          42 :     deserialized_large_objects_.Add(obj);
     497          42 :     return obj->address();
     498   644500990 :   } else if (space_index == MAP_SPACE) {
     499             :     DCHECK_EQ(Map::kSize, size);
     500   104019164 :     return allocated_maps_[next_map_index_++];
     501             :   } else {
     502             :     DCHECK(space_index < kNumberOfPreallocatedSpaces);
     503   592491408 :     Address address = high_water_[space_index];
     504             :     DCHECK_NOT_NULL(address);
     505   592491408 :     high_water_[space_index] += size;
     506             : #ifdef DEBUG
     507             :     // Assert that the current reserved chunk is still big enough.
     508             :     const Heap::Reservation& reservation = reservations_[space_index];
     509             :     int chunk_index = current_chunk_[space_index];
     510             :     CHECK_LE(high_water_[space_index], reservation[chunk_index].end);
     511             : #endif
     512   592491408 :     if (space_index == CODE_SPACE) SkipList::Update(address, size);
     513   592493725 :     return address;
     514             :   }
     515             : }
     516             : 
     517   801916281 : bool Deserializer::ReadData(Object** current, Object** limit, int source_space,
     518             :                             Address current_object_address) {
     519   801916281 :   Isolate* const isolate = isolate_;
     520             :   // Write barrier support costs around 1% in startup time.  In fact there
     521             :   // are no new space objects in current boot snapshots, so it's not needed,
     522             :   // but that may change.
     523             :   bool write_barrier_needed =
     524   801916281 :       (current_object_address != NULL && source_space != NEW_SPACE &&
     525             :        source_space != CODE_SPACE);
     526  7275739646 :   while (current < limit) {
     527             :     byte data = source_.Get();
     528  5677641579 :     switch (data) {
     529             : #define CASE_STATEMENT(where, how, within, space_number) \
     530             :   case where + how + within + space_number:              \
     531             :     STATIC_ASSERT((where & ~kWhereMask) == 0);           \
     532             :     STATIC_ASSERT((how & ~kHowToCodeMask) == 0);         \
     533             :     STATIC_ASSERT((within & ~kWhereToPointMask) == 0);   \
     534             :     STATIC_ASSERT((space_number & ~kSpaceMask) == 0);
     535             : 
     536             : #define CASE_BODY(where, how, within, space_number_if_any)                     \
     537             :   {                                                                            \
     538             :     bool emit_write_barrier = false;                                           \
     539             :     bool current_was_incremented = false;                                      \
     540             :     int space_number = space_number_if_any == kAnyOldSpace                     \
     541             :                            ? (data & kSpaceMask)                               \
     542             :                            : space_number_if_any;                              \
     543             :     if (where == kNewObject && how == kPlain && within == kStartOfObject) {    \
     544             :       ReadObject(space_number, current);                                       \
     545             :       emit_write_barrier = (space_number == NEW_SPACE);                        \
     546             :     } else {                                                                   \
     547             :       Object* new_object = NULL; /* May not be a real Object pointer. */       \
     548             :       if (where == kNewObject) {                                               \
     549             :         ReadObject(space_number, &new_object);                                 \
     550             :       } else if (where == kBackref) {                                          \
     551             :         emit_write_barrier = (space_number == NEW_SPACE);                      \
     552             :         new_object = GetBackReferencedObject(data & kSpaceMask);               \
     553             :       } else if (where == kBackrefWithSkip) {                                  \
     554             :         int skip = source_.GetInt();                                           \
     555             :         current = reinterpret_cast<Object**>(                                  \
     556             :             reinterpret_cast<Address>(current) + skip);                        \
     557             :         emit_write_barrier = (space_number == NEW_SPACE);                      \
     558             :         new_object = GetBackReferencedObject(data & kSpaceMask);               \
     559             :       } else if (where == kRootArray) {                                        \
     560             :         int id = source_.GetInt();                                             \
     561             :         Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(id); \
     562             :         new_object = isolate->heap()->root(root_index);                        \
     563             :         emit_write_barrier = isolate->heap()->InNewSpace(new_object);          \
     564             :         hot_objects_.Add(HeapObject::cast(new_object));                        \
     565             :       } else if (where == kPartialSnapshotCache) {                             \
     566             :         int cache_index = source_.GetInt();                                    \
     567             :         new_object = isolate->partial_snapshot_cache()->at(cache_index);       \
     568             :         emit_write_barrier = isolate->heap()->InNewSpace(new_object);          \
     569             :       } else if (where == kExternalReference) {                                \
     570             :         int skip = source_.GetInt();                                           \
     571             :         current = reinterpret_cast<Object**>(                                  \
     572             :             reinterpret_cast<Address>(current) + skip);                        \
     573             :         uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());       \
     574             :         Address address = external_reference_table_->address(reference_id);    \
     575             :         new_object = reinterpret_cast<Object*>(address);                       \
     576             :       } else if (where == kAttachedReference) {                                \
     577             :         int index = source_.GetInt();                                          \
     578             :         new_object = *attached_objects_[index];                                \
     579             :         emit_write_barrier = isolate->heap()->InNewSpace(new_object);          \
     580             :       } else {                                                                 \
     581             :         DCHECK(where == kBuiltin);                                             \
     582             :         DCHECK(deserializing_user_code());                                     \
     583             :         int builtin_id = source_.GetInt();                                     \
     584             :         DCHECK_LE(0, builtin_id);                                              \
     585             :         DCHECK_LT(builtin_id, Builtins::builtin_count);                        \
     586             :         Builtins::Name name = static_cast<Builtins::Name>(builtin_id);         \
     587             :         new_object = isolate->builtins()->builtin(name);                       \
     588             :         emit_write_barrier = false;                                            \
     589             :       }                                                                        \
     590             :       if (within == kInnerPointer) {                                           \
     591             :         if (new_object->IsCode()) {                                            \
     592             :           Code* new_code_object = Code::cast(new_object);                      \
     593             :           new_object =                                                         \
     594             :               reinterpret_cast<Object*>(new_code_object->instruction_start()); \
     595             :         } else {                                                               \
     596             :           Cell* cell = Cell::cast(new_object);                                 \
     597             :           new_object = reinterpret_cast<Object*>(cell->ValueAddress());        \
     598             :         }                                                                      \
     599             :       }                                                                        \
     600             :       if (how == kFromCode) {                                                  \
     601             :         Address location_of_branch_data = reinterpret_cast<Address>(current);  \
     602             :         Assembler::deserialization_set_special_target_at(                      \
     603             :             isolate, location_of_branch_data,                                  \
     604             :             Code::cast(HeapObject::FromAddress(current_object_address)),       \
     605             :             reinterpret_cast<Address>(new_object));                            \
     606             :         location_of_branch_data += Assembler::kSpecialTargetSize;              \
     607             :         current = reinterpret_cast<Object**>(location_of_branch_data);         \
     608             :         current_was_incremented = true;                                        \
     609             :       } else {                                                                 \
     610             :         UnalignedCopy(current, &new_object);                                   \
     611             :       }                                                                        \
     612             :     }                                                                          \
     613             :     if (emit_write_barrier && write_barrier_needed) {                          \
     614             :       Address current_address = reinterpret_cast<Address>(current);            \
     615             :       SLOW_DCHECK(isolate->heap()->ContainsSlow(current_object_address));      \
     616             :       isolate->heap()->RecordWrite(                                            \
     617             :           HeapObject::FromAddress(current_object_address),                     \
     618             :           static_cast<int>(current_address - current_object_address),          \
     619             :           *reinterpret_cast<Object**>(current_address));                       \
     620             :     }                                                                          \
     621             :     if (!current_was_incremented) {                                            \
     622             :       current++;                                                               \
     623             :     }                                                                          \
     624             :     break;                                                                     \
     625             :   }
     626             : 
     627             : // This generates a case and a body for the new space (which has to do extra
     628             : // write barrier handling) and handles the other spaces with fall-through cases
     629             : // and one body.
     630             : #define ALL_SPACES(where, how, within)           \
     631             :   CASE_STATEMENT(where, how, within, NEW_SPACE)  \
     632             :   CASE_BODY(where, how, within, NEW_SPACE)       \
     633             :   CASE_STATEMENT(where, how, within, OLD_SPACE)  \
     634             :   CASE_STATEMENT(where, how, within, CODE_SPACE) \
     635             :   CASE_STATEMENT(where, how, within, MAP_SPACE)  \
     636             :   CASE_STATEMENT(where, how, within, LO_SPACE)   \
     637             :   CASE_BODY(where, how, within, kAnyOldSpace)
     638             : 
     639             : #define FOUR_CASES(byte_code) \
     640             :   case byte_code:             \
     641             :   case byte_code + 1:         \
     642             :   case byte_code + 2:         \
     643             :   case byte_code + 3:
     644             : 
     645             : #define SIXTEEN_CASES(byte_code) \
     646             :   FOUR_CASES(byte_code)          \
     647             :   FOUR_CASES(byte_code + 4)      \
     648             :   FOUR_CASES(byte_code + 8)      \
     649             :   FOUR_CASES(byte_code + 12)
     650             : 
     651             : #define SINGLE_CASE(where, how, within, space) \
     652             :   CASE_STATEMENT(where, how, within, space)    \
     653             :   CASE_BODY(where, how, within, space)
     654             : 
     655             :       // Deserialize a new object and write a pointer to it to the current
     656             :       // object.
     657      323420 :       ALL_SPACES(kNewObject, kPlain, kStartOfObject)
     658             :       // Support for direct instruction pointers in functions.  It's an inner
     659             :       // pointer because it points at the entry point, not at the start of the
     660             :       // code object.
     661           0 :       SINGLE_CASE(kNewObject, kPlain, kInnerPointer, CODE_SPACE)
     662             :       // Support for pointers into a cell. It's an inner pointer because it
     663             :       // points directly at the value field, not the start of the cell object.
     664          18 :       SINGLE_CASE(kNewObject, kPlain, kInnerPointer, OLD_SPACE)
     665             :       // Deserialize a new code object and write a pointer to its first
     666             :       // instruction to the current code object.
     667     9959514 :       ALL_SPACES(kNewObject, kFromCode, kInnerPointer)
     668             :       // Find a recently deserialized object using its offset from the current
     669             :       // allocation point and write a pointer to it to the current object.
     670   425178150 :       ALL_SPACES(kBackref, kPlain, kStartOfObject)
     671     4677581 :       ALL_SPACES(kBackrefWithSkip, kPlain, kStartOfObject)
     672             : #if V8_CODE_EMBEDS_OBJECT_POINTER
     673             :       // Deserialize a new object from pointer found in code and write
     674             :       // a pointer to it to the current object. Required only for MIPS, PPC, ARM
     675             :       // or S390 with embedded constant pool, and omitted on the other
     676             :       // architectures because it is fully unrolled and would cause bloat.
     677             :       ALL_SPACES(kNewObject, kFromCode, kStartOfObject)
     678             :       // Find a recently deserialized code object using its offset from the
     679             :       // current allocation point and write a pointer to it to the current
     680             :       // object. Required only for MIPS, PPC, ARM or S390 with embedded
     681             :       // constant pool.
     682             :       ALL_SPACES(kBackref, kFromCode, kStartOfObject)
     683             :       ALL_SPACES(kBackrefWithSkip, kFromCode, kStartOfObject)
     684             : #endif
     685             :       // Find a recently deserialized code object using its offset from the
     686             :       // current allocation point and write a pointer to its first instruction
     687             :       // to the current code object or the instruction pointer in a function
     688             :       // object.
     689           0 :       ALL_SPACES(kBackref, kFromCode, kInnerPointer)
     690   644003341 :       ALL_SPACES(kBackrefWithSkip, kFromCode, kInnerPointer)
     691             :       // Support for direct instruction pointers in functions.
     692           0 :       SINGLE_CASE(kBackref, kPlain, kInnerPointer, CODE_SPACE)
     693           0 :       SINGLE_CASE(kBackrefWithSkip, kPlain, kInnerPointer, CODE_SPACE)
     694             :       // Support for pointers into a cell.
     695           0 :       SINGLE_CASE(kBackref, kPlain, kInnerPointer, OLD_SPACE)
     696           0 :       SINGLE_CASE(kBackrefWithSkip, kPlain, kInnerPointer, OLD_SPACE)
     697             :       // Find an object in the roots array and write a pointer to it to the
     698             :       // current object.
     699   470566902 :       SINGLE_CASE(kRootArray, kPlain, kStartOfObject, 0)
     700             : #if V8_CODE_EMBEDS_OBJECT_POINTER
     701             :       // Find an object in the roots array and write a pointer to it to in code.
     702             :       SINGLE_CASE(kRootArray, kFromCode, kStartOfObject, 0)
     703             : #endif
     704             :       // Find an object in the partial snapshots cache and write a pointer to it
     705             :       // to the current object.
     706   557700374 :       SINGLE_CASE(kPartialSnapshotCache, kPlain, kStartOfObject, 0)
     707             :       // Find an code entry in the partial snapshots cache and
     708             :       // write a pointer to it to the current object.
     709   321265910 :       SINGLE_CASE(kPartialSnapshotCache, kPlain, kInnerPointer, 0)
     710             :       // Find an external reference and write a pointer to it to the current
     711             :       // object.
     712  1184821252 :       SINGLE_CASE(kExternalReference, kPlain, kStartOfObject, 0)
     713             :       // Find an external reference and write a pointer to it in the current
     714             :       // code object.
     715           0 :       SINGLE_CASE(kExternalReference, kFromCode, kStartOfObject, 0)
     716             :       // Find an object in the attached references and write a pointer to it to
     717             :       // the current object.
     718      963330 :       SINGLE_CASE(kAttachedReference, kPlain, kStartOfObject, 0)
     719           0 :       SINGLE_CASE(kAttachedReference, kPlain, kInnerPointer, 0)
     720           0 :       SINGLE_CASE(kAttachedReference, kFromCode, kStartOfObject, 0)
     721        3129 :       SINGLE_CASE(kAttachedReference, kFromCode, kInnerPointer, 0)
     722             :       // Find a builtin and write a pointer to it to the current object.
     723       29658 :       SINGLE_CASE(kBuiltin, kPlain, kStartOfObject, 0)
     724           0 :       SINGLE_CASE(kBuiltin, kPlain, kInnerPointer, 0)
     725        4044 :       SINGLE_CASE(kBuiltin, kFromCode, kInnerPointer, 0)
     726             : 
     727             : #undef CASE_STATEMENT
     728             : #undef CASE_BODY
     729             : #undef ALL_SPACES
     730             : 
     731             :       case kSkip: {
     732   178932420 :         int size = source_.GetInt();
     733             :         current = reinterpret_cast<Object**>(
     734   178932548 :             reinterpret_cast<intptr_t>(current) + size);
     735   178932548 :         break;
     736             :       }
     737             : 
     738             :       case kDeoptimizerEntryFromCode:
     739             :       case kDeoptimizerEntryPlain: {
     740    14030659 :         int skip = source_.GetInt();
     741             :         current = reinterpret_cast<Object**>(
     742    14030642 :             reinterpret_cast<intptr_t>(current) + skip);
     743             :         Deoptimizer::BailoutType bailout_type =
     744    14030642 :             static_cast<Deoptimizer::BailoutType>(source_.Get());
     745    14030642 :         int entry_id = source_.GetInt();
     746             :         HandleScope scope(isolate);
     747             :         Address address = Deoptimizer::GetDeoptimizationEntry(
     748    14030610 :             isolate_, entry_id, bailout_type, Deoptimizer::ENSURE_ENTRY_CODE);
     749    14030714 :         if (data == kDeoptimizerEntryFromCode) {
     750             :           Address location_of_branch_data = reinterpret_cast<Address>(current);
     751             :           Assembler::deserialization_set_special_target_at(
     752             :               isolate, location_of_branch_data,
     753             :               Code::cast(HeapObject::FromAddress(current_object_address)),
     754             :               address);
     755             :           location_of_branch_data += Assembler::kSpecialTargetSize;
     756      911140 :           current = reinterpret_cast<Object**>(location_of_branch_data);
     757             :         } else {
     758    13119574 :           Object* new_object = reinterpret_cast<Object*>(address);
     759             :           UnalignedCopy(current, &new_object);
     760    13119574 :           current++;
     761             :         }
     762             :         break;
     763             :       }
     764             : 
     765             :       case kInternalReferenceEncoded:
     766             :       case kInternalReference: {
     767             :         // Internal reference address is not encoded via skip, but by offset
     768             :         // from code entry.
     769    90135120 :         int pc_offset = source_.GetInt();
     770    90135110 :         int target_offset = source_.GetInt();
     771             :         Code* code =
     772             :             Code::cast(HeapObject::FromAddress(current_object_address));
     773             :         DCHECK(0 <= pc_offset && pc_offset <= code->instruction_size());
     774             :         DCHECK(0 <= target_offset && target_offset <= code->instruction_size());
     775    90135114 :         Address pc = code->entry() + pc_offset;
     776    90135114 :         Address target = code->entry() + target_offset;
     777             :         Assembler::deserialization_set_target_internal_reference_at(
     778             :             isolate, pc, target, data == kInternalReference
     779             :                                      ? RelocInfo::INTERNAL_REFERENCE
     780             :                                      : RelocInfo::INTERNAL_REFERENCE_ENCODED);
     781             :         break;
     782             :       }
     783             : 
     784             :       case kNop:
     785             :         break;
     786             : 
     787             :       case kNextChunk: {
     788      121556 :         int space = source_.Get();
     789             :         DCHECK(space < kNumberOfPreallocatedSpaces);
     790      121556 :         int chunk_index = current_chunk_[space];
     791      486224 :         const Heap::Reservation& reservation = reservations_[space];
     792             :         // Make sure the current chunk is indeed exhausted.
     793      243112 :         CHECK_EQ(reservation[chunk_index].end, high_water_[space]);
     794             :         // Move to next reserved chunk.
     795      121556 :         chunk_index = ++current_chunk_[space];
     796      121556 :         CHECK_LT(chunk_index, reservation.length());
     797      121556 :         high_water_[space] = reservation[chunk_index].start;
     798      121556 :         break;
     799             :       }
     800             : 
     801             :       case kDeferred: {
     802             :         // Deferred can only occur right after the heap object header.
     803             :         DCHECK(current == reinterpret_cast<Object**>(current_object_address +
     804             :                                                      kPointerSize));
     805     6950062 :         HeapObject* obj = HeapObject::FromAddress(current_object_address);
     806             :         // If the deferred object is a map, its instance type may be used
     807             :         // during deserialization. Initialize it with a temporary value.
     808     6950061 :         if (obj->IsMap()) Map::cast(obj)->set_instance_type(FILLER_TYPE);
     809             :         current = limit;
     810             :         return false;
     811             :       }
     812             : 
     813             :       case kSynchronize:
     814             :         // If we get here then that indicates that you have a mismatch between
     815             :         // the number of GC roots when serializing and deserializing.
     816           0 :         CHECK(false);
     817             :         break;
     818             : 
     819             :       // Deserialize raw data of variable length.
     820             :       case kVariableRawData: {
     821    84485983 :         int size_in_bytes = source_.GetInt();
     822             :         byte* raw_data_out = reinterpret_cast<byte*>(current);
     823    84486053 :         source_.CopyRaw(raw_data_out, size_in_bytes);
     824    84487100 :         break;
     825             :       }
     826             : 
     827             :       case kVariableRepeat: {
     828      898934 :         int repeats = source_.GetInt();
     829      898934 :         Object* object = current[-1];
     830             :         DCHECK(!isolate->heap()->InNewSpace(object));
     831   280876207 :         for (int i = 0; i < repeats; i++) UnalignedCopy(current++, &object);
     832             :         break;
     833             :       }
     834             : 
     835             :       case kAlignmentPrefix:
     836             :       case kAlignmentPrefix + 1:
     837             :       case kAlignmentPrefix + 2:
     838             :         SetAlignment(data);
     839             :         break;
     840             : 
     841             :       STATIC_ASSERT(kNumberOfRootArrayConstants == Heap::kOldSpaceRoots);
     842             :       STATIC_ASSERT(kNumberOfRootArrayConstants == 32);
     843             :       SIXTEEN_CASES(kRootArrayConstantsWithSkip)
     844             :       SIXTEEN_CASES(kRootArrayConstantsWithSkip + 16) {
     845      121777 :         int skip = source_.GetInt();
     846             :         current = reinterpret_cast<Object**>(
     847      121777 :             reinterpret_cast<intptr_t>(current) + skip);
     848             :         // Fall through.
     849             :       }
     850             : 
     851             :       SIXTEEN_CASES(kRootArrayConstants)
     852             :       SIXTEEN_CASES(kRootArrayConstants + 16) {
     853  1480687008 :         int id = data & kRootArrayConstantsMask;
     854             :         Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(id);
     855  1480687008 :         Object* object = isolate->heap()->root(root_index);
     856             :         DCHECK(!isolate->heap()->InNewSpace(object));
     857  1480687008 :         UnalignedCopy(current++, &object);
     858             :         break;
     859             :       }
     860             : 
     861             :       STATIC_ASSERT(kNumberOfHotObjects == 8);
     862             :       FOUR_CASES(kHotObjectWithSkip)
     863             :       FOUR_CASES(kHotObjectWithSkip + 4) {
     864      850574 :         int skip = source_.GetInt();
     865             :         current = reinterpret_cast<Object**>(
     866      850574 :             reinterpret_cast<Address>(current) + skip);
     867             :         // Fall through.
     868             :       }
     869             : 
     870             :       FOUR_CASES(kHotObject)
     871             :       FOUR_CASES(kHotObject + 4) {
     872   552990916 :         int index = data & kHotObjectMask;
     873   552990916 :         Object* hot_object = hot_objects_.Get(index);
     874             :         UnalignedCopy(current, &hot_object);
     875  1028872613 :         if (write_barrier_needed && isolate->heap()->InNewSpace(hot_object)) {
     876             :           Address current_address = reinterpret_cast<Address>(current);
     877             :           isolate->heap()->RecordWrite(
     878           0 :               HeapObject::FromAddress(current_object_address),
     879           0 :               static_cast<int>(current_address - current_object_address),
     880           0 :               hot_object);
     881             :         }
     882   555506919 :         current++;
     883             :         break;
     884             :       }
     885             : 
     886             :       // Deserialize raw data of fixed length from 1 to 32 words.
     887             :       STATIC_ASSERT(kNumberOfFixedRawData == 32);
     888             :       SIXTEEN_CASES(kFixedRawData)
     889             :       SIXTEEN_CASES(kFixedRawData + 16) {
     890             :         byte* raw_data_out = reinterpret_cast<byte*>(current);
     891   726489767 :         int size_in_bytes = (data - kFixedRawDataStart) << kPointerSizeLog2;
     892   726489767 :         source_.CopyRaw(raw_data_out, size_in_bytes);
     893   726489163 :         current = reinterpret_cast<Object**>(raw_data_out + size_in_bytes);
     894   726489163 :         break;
     895             :       }
     896             : 
     897             :       STATIC_ASSERT(kNumberOfFixedRepeat == 16);
     898             :       SIXTEEN_CASES(kFixedRepeat) {
     899   326032171 :         int repeats = data - kFixedRepeatStart;
     900             :         Object* object;
     901   326032171 :         UnalignedCopy(&object, current - 1);
     902             :         DCHECK(!isolate->heap()->InNewSpace(object));
     903   773641847 :         for (int i = 0; i < repeats; i++) UnalignedCopy(current++, &object);
     904             :         break;
     905             :       }
     906             : 
     907             : #undef SIXTEEN_CASES
     908             : #undef FOUR_CASES
     909             : #undef SINGLE_CASE
     910             : 
     911             :       default:
     912           0 :         CHECK(false);
     913             :     }
     914             :   }
     915   796181786 :   CHECK_EQ(limit, current);
     916             :   return true;
     917             : }
     918             : }  // namespace internal
     919             : }  // namespace v8

Generated by: LCOV version 1.10