LCOV - code coverage report
Current view: top level - src/snapshot - deserializer.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 222 270 82.2 %
Date: 2019-01-20 Functions: 27 47 57.4 %

          Line data    Source code
       1             : // Copyright 2016 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/snapshot/deserializer.h"
       6             : 
       7             : #include "src/assembler-inl.h"
       8             : #include "src/heap/heap-write-barrier-inl.h"
       9             : #include "src/interpreter/interpreter.h"
      10             : #include "src/isolate.h"
      11             : #include "src/log.h"
      12             : #include "src/objects/api-callbacks.h"
      13             : #include "src/objects/cell-inl.h"
      14             : #include "src/objects/hash-table.h"
      15             : #include "src/objects/js-array-buffer-inl.h"
      16             : #include "src/objects/js-array-inl.h"
      17             : #include "src/objects/maybe-object.h"
      18             : #include "src/objects/slots.h"
      19             : #include "src/objects/smi.h"
      20             : #include "src/objects/string.h"
      21             : #include "src/snapshot/natives.h"
      22             : #include "src/snapshot/snapshot.h"
      23             : 
      24             : namespace v8 {
      25             : namespace internal {
      26             : 
      27             : // This is like a MaybeObjectSlot, except it doesn't enforce alignment.
      28             : // Most slots used below are aligned, but when writing into Code objects,
      29             : // they might not be, hence the use of UnalignedSlot and UnalignedCopy.
      30             : class UnalignedSlot {
      31             :  public:
      32             :   explicit UnalignedSlot(ObjectSlot slot) : ptr_(slot.address()) {}
      33  1278619968 :   explicit UnalignedSlot(Address address) : ptr_(address) {}
      34             :   explicit UnalignedSlot(MaybeObject* slot)
      35             :       : ptr_(reinterpret_cast<Address>(slot)) {}
      36             :   explicit UnalignedSlot(Object* slot)
      37           0 :       : ptr_(reinterpret_cast<Address>(slot)) {}
      38             : 
      39             :   inline bool operator<(const UnalignedSlot& other) const {
      40  3682790159 :     return ptr_ < other.ptr_;
      41             :   }
      42   639076317 :   inline bool operator==(const UnalignedSlot& other) const {
      43   639076317 :     return ptr_ == other.ptr_;
      44             :   }
      45             : 
      46  3286920575 :   inline void Advance(int bytes = kPointerSize) { ptr_ += bytes; }
      47             : 
      48             :   MaybeObject Read() {
      49             :     Address result;
      50      276198 :     memcpy(&result, reinterpret_cast<void*>(ptr_), sizeof(result));
      51             :     return MaybeObject(result);
      52             :   }
      53             :   MaybeObject ReadPrevious() {
      54             :     Address result;
      55    98114282 :     memcpy(&result, reinterpret_cast<void*>(ptr_ - kPointerSize),
      56    98114282 :            sizeof(result));
      57             :     return MaybeObject(result);
      58             :   }
      59             :   inline void Write(Address value) {
      60  2613466993 :     memcpy(reinterpret_cast<void*>(ptr_), &value, sizeof(value));
      61             :   }
      62           0 :   MaybeObjectSlot Slot() { return MaybeObjectSlot(ptr_); }
      63             : 
      64             :   Address address() { return ptr_; }
      65             : 
      66             :  private:
      67             :   Address ptr_;
      68             : };
      69             : 
      70           0 : void Deserializer::UnalignedCopy(UnalignedSlot dest, MaybeObject value) {
      71             :   DCHECK(!allocator()->next_reference_is_weak());
      72             :   dest.Write(value.ptr());
      73           0 : }
      74             : 
      75           0 : void Deserializer::UnalignedCopy(UnalignedSlot dest, Address value) {
      76             :   DCHECK(!allocator()->next_reference_is_weak());
      77             :   dest.Write(value);
      78           0 : }
      79             : 
      80      217663 : void Deserializer::Initialize(Isolate* isolate) {
      81             :   DCHECK_NULL(isolate_);
      82             :   DCHECK_NOT_NULL(isolate);
      83      217663 :   isolate_ = isolate;
      84             :   DCHECK_NULL(external_reference_table_);
      85      217663 :   external_reference_table_ = isolate->external_reference_table();
      86             : #ifdef DEBUG
      87             :   // Count the number of external references registered through the API.
      88             :   num_api_references_ = 0;
      89             :   if (isolate_->api_external_references() != nullptr) {
      90             :     while (isolate_->api_external_references()[num_api_references_] != 0) {
      91             :       num_api_references_++;
      92             :     }
      93             :   }
      94             : #endif  // DEBUG
      95      217663 :   CHECK_EQ(magic_number_, SerializedData::kMagicNumber);
      96      217663 : }
      97             : 
      98    14447477 : void Deserializer::Rehash() {
      99             :   DCHECK(can_rehash() || deserializing_user_code());
     100    28894955 :   for (HeapObject item : to_rehash_) item->RehashBasedOnMap(isolate());
     101      217523 : }
     102             : 
     103      435323 : Deserializer::~Deserializer() {
     104             : #ifdef DEBUG
     105             :   // Do not perform checks if we aborted deserialization.
     106             :   if (source_.position() == 0) return;
     107             :   // Check that we only have padding bytes remaining.
     108             :   while (source_.HasMore()) DCHECK_EQ(kNop, source_.Get());
     109             :   // Check that we've fully used all reserved space.
     110             :   DCHECK(allocator()->ReservationsAreFullyUsed());
     111             : #endif  // DEBUG
     112      217663 : }
     113             : 
     114             : // This is called on the roots.  It is the driver of the deserialization
     115             : // process.  It is also called on the body of each function.
     116   203414689 : void Deserializer::VisitRootPointers(Root root, const char* description,
     117             :                                      FullObjectSlot start, FullObjectSlot end) {
     118             :   // The space must be new space.  Any other space would cause ReadChunk to try
     119             :   // to update the remembered using nullptr as the address.
     120             :   // TODO(ishell): this will not work once we actually compress pointers.
     121             :   STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
     122             :   ReadData(UnalignedSlot(start.address()), UnalignedSlot(end.address()),
     123   203414689 :            NEW_SPACE, kNullAddress);
     124   203412023 : }
     125             : 
     126     1068058 : void Deserializer::Synchronize(VisitorSynchronization::SyncTag tag) {
     127             :   static const byte expected = kSynchronize;
     128     1068058 :   CHECK_EQ(expected, source_.Get());
     129     1068058 : }
     130             : 
     131      217663 : void Deserializer::DeserializeDeferredObjects() {
     132      409277 :   for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) {
     133      191614 :     switch (code) {
     134             :       case kAlignmentPrefix:
     135             :       case kAlignmentPrefix + 1:
     136             :       case kAlignmentPrefix + 2: {
     137           0 :         int alignment = code - (SerializerDeserializer::kAlignmentPrefix - 1);
     138           0 :         allocator()->SetAlignment(static_cast<AllocationAlignment>(alignment));
     139             :         break;
     140             :       }
     141             :       default: {
     142      191614 :         int space = code & kSpaceMask;
     143             :         DCHECK_LE(space, kNumberOfSpaces);
     144             :         DCHECK_EQ(code - space, kNewObject);
     145      191614 :         HeapObject object = GetBackReferencedObject(space);
     146      191614 :         int size = source_.GetInt() << kPointerSizeLog2;
     147             :         Address obj_address = object->address();
     148      191614 :         UnalignedSlot start(obj_address + kPointerSize);
     149      191614 :         UnalignedSlot end(obj_address + size);
     150      191614 :         bool filled = ReadData(start, end, space, obj_address);
     151      191614 :         CHECK(filled);
     152             :         DCHECK(CanBeDeferred(object));
     153      191614 :         PostProcessNewObject(object, space);
     154             :       }
     155             :     }
     156             :   }
     157      217663 : }
     158             : 
     159           0 : void Deserializer::LogNewObjectEvents() {
     160             :   {
     161             :     // {new_maps_} and {new_code_objects_} are vectors containing raw
     162             :     // pointers, hence there should be no GC happening.
     163             :     DisallowHeapAllocation no_gc;
     164             :     // Issue code events for newly deserialized code objects.
     165           0 :     LOG_CODE_EVENT(isolate_, LogCodeObjects());
     166             :   }
     167           0 :   LOG_CODE_EVENT(isolate_, LogCompiledFunctions());
     168           0 :   LogNewMapEvents();
     169           0 : }
     170             : 
     171       92009 : void Deserializer::LogNewMapEvents() {
     172             :   DisallowHeapAllocation no_gc;
     173      190093 :   for (Map map : new_maps()) {
     174             :     DCHECK(FLAG_trace_maps);
     175       12150 :     LOG(isolate_, MapCreate(map));
     176       12150 :     LOG(isolate_, MapDetails(map));
     177             :   }
     178       92009 : }
     179             : 
     180      188924 : void Deserializer::LogScriptEvents(Script script) {
     181             :   DisallowHeapAllocation no_gc;
     182      378031 :   LOG(isolate_,
     183             :       ScriptEvent(Logger::ScriptEventType::kDeserialize, script->id()));
     184      377848 :   LOG(isolate_, ScriptDetails(script));
     185      188924 : }
     186             : 
     187       10116 : StringTableInsertionKey::StringTableInsertionKey(String string)
     188       10116 :     : StringTableKey(ComputeHashField(string)), string_(string) {
     189             :   DCHECK(string->IsInternalizedString());
     190       10116 : }
     191             : 
     192        5201 : bool StringTableInsertionKey::IsMatch(Object string) {
     193             :   // We know that all entries in a hash table had their hash keys created.
     194             :   // Use that knowledge to have fast failure.
     195       10402 :   if (Hash() != String::cast(string)->Hash()) return false;
     196             :   // We want to compare the content of two internalized strings here.
     197         617 :   return string_->SlowEquals(String::cast(string));
     198             : }
     199             : 
     200        4752 : Handle<String> StringTableInsertionKey::AsHandle(Isolate* isolate) {
     201        4752 :   return handle(string_, isolate);
     202             : }
     203             : 
     204           0 : uint32_t StringTableInsertionKey::ComputeHashField(String string) {
     205             :   // Make sure hash_field() is computed.
     206       10116 :   string->Hash();
     207           0 :   return string->hash_field();
     208             : }
     209             : 
     210   966582138 : HeapObject Deserializer::PostProcessNewObject(HeapObject obj, int space) {
     211   435982214 :   if ((FLAG_rehash_snapshot && can_rehash_) || deserializing_user_code()) {
     212   435418640 :     if (obj->IsString()) {
     213             :       // Uninitialize hash field as we need to recompute the hash.
     214             :       String string = String::cast(obj);
     215             :       string->set_hash_field(String::kEmptyHashField);
     216   375345941 :     } else if (obj->NeedsRehashing()) {
     217    14229928 :       to_rehash_.push_back(obj);
     218             :     }
     219             :   }
     220             : 
     221   435688000 :   if (deserializing_user_code()) {
     222       16827 :     if (obj->IsString()) {
     223        5764 :       String string = String::cast(obj);
     224        5764 :       if (string->IsInternalizedString()) {
     225             :         // Canonicalize the internalized string. If it already exists in the
     226             :         // string table, set it to forward to the existing one.
     227        5364 :         StringTableInsertionKey key(string);
     228             :         String canonical =
     229        5384 :             StringTable::ForwardStringIfExists(isolate_, &key, string);
     230             : 
     231        5364 :         if (!canonical.is_null()) return canonical;
     232             : 
     233       14256 :         new_internalized_strings_.push_back(handle(string, isolate_));
     234        4752 :         return string;
     235             :       }
     236       11063 :     } else if (obj->IsScript()) {
     237         642 :       new_scripts_.push_back(handle(Script::cast(obj), isolate_));
     238       10849 :     } else if (obj->IsAllocationSite()) {
     239             :       // We should link new allocation sites, but we can't do this immediately
     240             :       // because |AllocationSite::HasWeakNext()| internally accesses
     241             :       // |Heap::roots_| that may not have been initialized yet. So defer this to
     242             :       // |ObjectDeserializer::CommitPostProcessedObjects()|.
     243           0 :       new_allocation_sites_.push_back(AllocationSite::cast(obj));
     244             :     } else {
     245             :       DCHECK(CanBeDeferred(obj));
     246             :     }
     247             :   }
     248   435674830 :   if (obj->IsScript()) {
     249      188710 :     LogScriptEvents(Script::cast(obj));
     250   435492548 :   } else if (obj->IsCode()) {
     251             :     // We flush all code pages after deserializing the startup snapshot.
     252             :     // Hence we only remember each individual code object when deserializing
     253             :     // user code.
     254    94911924 :     if (deserializing_user_code() || space == LO_SPACE) {
     255           0 :       new_code_objects_.push_back(Code::cast(obj));
     256             :     }
     257   340672937 :   } else if (FLAG_trace_maps && obj->IsMap()) {
     258             :     // Keep track of all seen Maps to log them later since they might be only
     259             :     // partially initialized at this point.
     260       17386 :     new_maps_.push_back(Map::cast(obj));
     261   340568486 :   } else if (obj->IsAccessorInfo()) {
     262             : #ifdef USE_SIMULATOR
     263             :     accessor_infos_.push_back(AccessorInfo::cast(obj));
     264             : #endif
     265   339876454 :   } else if (obj->IsCallHandlerInfo()) {
     266             : #ifdef USE_SIMULATOR
     267             :     call_handler_infos_.push_back(CallHandlerInfo::cast(obj));
     268             : #endif
     269   339876862 :   } else if (obj->IsExternalString()) {
     270      125604 :     if (obj->map() == ReadOnlyRoots(isolate_).native_source_string_map()) {
     271       62782 :       ExternalOneByteString string = ExternalOneByteString::cast(obj);
     272             :       DCHECK(string->is_uncached());
     273             :       string->SetResource(
     274             :           isolate_, NativesExternalStringResource::DecodeForDeserialization(
     275       62782 :                         string->resource()));
     276             :     } else {
     277          20 :       ExternalString string = ExternalString::cast(obj);
     278             :       uint32_t index = string->resource_as_uint32();
     279             :       Address address =
     280          40 :           static_cast<Address>(isolate_->api_external_references()[index]);
     281          20 :       string->set_address_as_resource(address);
     282             :       isolate_->heap()->UpdateExternalString(string, 0,
     283          20 :                                              string->ExternalPayloadSize());
     284             :     }
     285       62802 :     isolate_->heap()->RegisterExternalString(String::cast(obj));
     286   339814318 :   } else if (obj->IsJSTypedArray()) {
     287          60 :     JSTypedArray typed_array = JSTypedArray::cast(obj);
     288          60 :     CHECK_LE(typed_array->byte_offset(), Smi::kMaxValue);
     289          60 :     int32_t byte_offset = static_cast<int32_t>(typed_array->byte_offset());
     290          60 :     if (byte_offset > 0) {
     291             :       FixedTypedArrayBase elements =
     292          10 :           FixedTypedArrayBase::cast(typed_array->elements());
     293             :       // Must be off-heap layout.
     294             :       DCHECK(!typed_array->is_on_heap());
     295             : 
     296             :       void* pointer_with_offset = reinterpret_cast<void*>(
     297           5 :           reinterpret_cast<intptr_t>(elements->external_pointer()) +
     298             :           byte_offset);
     299             :       elements->set_external_pointer(pointer_with_offset);
     300             :     }
     301   339814333 :   } else if (obj->IsJSArrayBuffer()) {
     302             :     JSArrayBuffer buffer = JSArrayBuffer::cast(obj);
     303             :     // Only fixup for the off-heap case.
     304          50 :     if (buffer->backing_store() != nullptr) {
     305             :       Smi store_index(reinterpret_cast<Address>(buffer->backing_store()));
     306         120 :       void* backing_store = off_heap_backing_stores_[store_index->value()];
     307             : 
     308             :       buffer->set_backing_store(backing_store);
     309          35 :       isolate_->heap()->RegisterNewArrayBuffer(buffer);
     310             :     }
     311   339814490 :   } else if (obj->IsFixedTypedArrayBase()) {
     312      691157 :     FixedTypedArrayBase fta = FixedTypedArrayBase::cast(obj);
     313             :     // Only fixup for the off-heap case.
     314     1382314 :     if (fta->base_pointer() == Smi::kZero) {
     315             :       Smi store_index(reinterpret_cast<Address>(fta->external_pointer()));
     316         100 :       void* backing_store = off_heap_backing_stores_[store_index->value()];
     317             :       fta->set_external_pointer(backing_store);
     318             :     }
     319   339122267 :   } else if (obj->IsBytecodeArray()) {
     320             :     // TODO(mythria): Remove these once we store the default values for these
     321             :     // fields in the serializer.
     322             :     BytecodeArray bytecode_array = BytecodeArray::cast(obj);
     323             :     bytecode_array->set_interrupt_budget(
     324         755 :         interpreter::Interpreter::InterruptBudget());
     325             :     bytecode_array->set_osr_loop_nesting_level(0);
     326   339120808 :   } else if (obj->IsDescriptorArray()) {
     327             :     // Reset the marking state of the descriptor array.
     328             :     DescriptorArray descriptor_array = DescriptorArray::cast(obj);
     329             :     descriptor_array->set_raw_number_of_marked_descriptors(0);
     330             :   }
     331             : 
     332             :   // Check alignment.
     333             :   DCHECK_EQ(0, Heap::GetFillToAlign(obj->address(),
     334             :                                     HeapObject::RequiredAlignment(obj->map())));
     335   435669579 :   return obj;
     336             : }
     337             : 
     338   383249057 : HeapObject Deserializer::GetBackReferencedObject(int space) {
     339   189425800 :   HeapObject obj;
     340   189425800 :   switch (space) {
     341             :     case LO_SPACE:
     342          10 :       obj = allocator()->GetLargeObject(source_.GetInt());
     343          10 :       break;
     344             :     case MAP_SPACE:
     345    11936596 :       obj = allocator()->GetMap(source_.GetInt());
     346    11936593 :       break;
     347             :     case RO_SPACE: {
     348     4397967 :       uint32_t chunk_index = source_.GetInt();
     349     4397968 :       uint32_t chunk_offset = source_.GetInt();
     350     4397968 :       if (isolate()->heap()->deserialization_complete()) {
     351          85 :         PagedSpace* read_only_space = isolate()->heap()->read_only_space();
     352             :         Page* page = read_only_space->first_page();
     353          85 :         for (uint32_t i = 0; i < chunk_index; ++i) {
     354             :           page = page->next_page();
     355             :         }
     356          85 :         Address address = page->OffsetToAddress(chunk_offset);
     357          85 :         obj = HeapObject::FromAddress(address);
     358             :       } else {
     359             :         obj = allocator()->GetObject(static_cast<AllocationSpace>(space),
     360     4397883 :                                      chunk_index, chunk_offset);
     361             :       }
     362             :       break;
     363             :     }
     364             :     default: {
     365   173091227 :       uint32_t chunk_index = source_.GetInt();
     366   173091132 :       uint32_t chunk_offset = source_.GetInt();
     367             :       obj = allocator()->GetObject(static_cast<AllocationSpace>(space),
     368   173091381 :                                    chunk_index, chunk_offset);
     369   173091149 :       break;
     370             :     }
     371             :   }
     372             : 
     373   189435038 :   if (deserializing_user_code() && obj->IsThinString()) {
     374         149 :     obj = ThinString::cast(obj)->actual();
     375             :   }
     376             : 
     377             :   hot_objects_.Add(obj);
     378             :   DCHECK(!HasWeakHeapObjectTag(obj->ptr()));
     379   189425289 :   return obj;
     380             : }
     381             : 
     382             : // This routine writes the new object into the pointer provided.
     383             : // The reason for this strange interface is that otherwise the object is
     384             : // written very late, which means the FreeSpace map is not set up by the
     385             : // time we need to use it to mark the space at the end of a page free.
     386   435692163 : void Deserializer::ReadObject(int space_number, UnalignedSlot write_back,
     387             :                               HeapObjectReferenceType reference_type) {
     388   435692163 :   const int size = source_.GetInt() << kObjectAlignmentBits;
     389             : 
     390             :   Address address =
     391   435693112 :       allocator()->Allocate(static_cast<AllocationSpace>(space_number), size);
     392             :   HeapObject obj = HeapObject::FromAddress(address);
     393             : 
     394   435706421 :   isolate_->heap()->OnAllocationEvent(obj, size);
     395             :   UnalignedSlot current(address);
     396   435703681 :   UnalignedSlot limit(address + size);
     397             : 
     398   435703681 :   if (ReadData(current, limit, space_number, address)) {
     399             :     // Only post process if object content has not been deferred.
     400   435497201 :     obj = PostProcessNewObject(obj, space_number);
     401             :   }
     402             : 
     403             :   MaybeObject write_back_obj = reference_type == HeapObjectReferenceType::STRONG
     404             :                                    ? HeapObjectReference::Strong(obj)
     405   435673932 :                                    : HeapObjectReference::Weak(obj);
     406             :   UnalignedCopy(write_back, write_back_obj);
     407             : #ifdef DEBUG
     408             :   if (obj->IsCode()) {
     409             :     DCHECK(space_number == CODE_SPACE || space_number == CODE_LO_SPACE);
     410             :   } else {
     411             :     DCHECK(space_number != CODE_SPACE && space_number != CODE_LO_SPACE);
     412             :   }
     413             : #endif  // DEBUG
     414   435673932 : }
     415             : 
     416           0 : static void NoExternalReferencesCallback() {
     417             :   // The following check will trigger if a function or object template
     418             :   // with references to native functions have been deserialized from
     419             :   // snapshot, but no actual external references were provided when the
     420             :   // isolate was created.
     421           0 :   CHECK_WITH_MSG(false, "No external references provided via API");
     422             : }
     423             : 
     424   639105936 : bool Deserializer::ReadData(UnalignedSlot current, UnalignedSlot limit,
     425             :                             int source_space, Address current_object_address) {
     426   639106091 :   Isolate* const isolate = isolate_;
     427             :   // Write barrier support costs around 1% in startup time.  In fact there
     428             :   // are no new space objects in current boot snapshots, so it's not needed,
     429             :   // but that may change.
     430             :   bool write_barrier_needed =
     431   639105936 :       (current_object_address != kNullAddress && source_space != NEW_SPACE &&
     432             :        source_space != CODE_SPACE);
     433  4321896095 :   while (current < limit) {
     434             :     byte data = source_.Get();
     435  3043736782 :     switch (data) {
     436             : #define CASE_STATEMENT(where, how, within, space_number) \
     437             :   case where + how + within + space_number:              \
     438             :     STATIC_ASSERT((where & ~kWhereMask) == 0);           \
     439             :     STATIC_ASSERT((how & ~kHowToCodeMask) == 0);         \
     440             :     STATIC_ASSERT((within & ~kWhereToPointMask) == 0);   \
     441             :     STATIC_ASSERT((space_number & ~kSpaceMask) == 0);
     442             : 
     443             : #define CASE_BODY(where, how, within, space_number_if_any)                   \
     444             :   current = ReadDataCase<where, how, within, space_number_if_any>(           \
     445             :       isolate, current, current_object_address, data, write_barrier_needed); \
     446             :   break;
     447             : 
     448             : // This generates a case and a body for the new space (which has to do extra
     449             : // write barrier handling) and handles the other spaces with fall-through cases
     450             : // and one body.
     451             : #define ALL_SPACES(where, how, within)           \
     452             :   CASE_STATEMENT(where, how, within, NEW_SPACE)  \
     453             :   CASE_BODY(where, how, within, NEW_SPACE)       \
     454             :   CASE_STATEMENT(where, how, within, OLD_SPACE)  \
     455             :   V8_FALLTHROUGH;                                \
     456             :   CASE_STATEMENT(where, how, within, CODE_SPACE) \
     457             :   V8_FALLTHROUGH;                                \
     458             :   CASE_STATEMENT(where, how, within, MAP_SPACE)  \
     459             :   V8_FALLTHROUGH;                                \
     460             :   CASE_STATEMENT(where, how, within, LO_SPACE)   \
     461             :   V8_FALLTHROUGH;                                \
     462             :   CASE_STATEMENT(where, how, within, RO_SPACE)   \
     463             :   CASE_BODY(where, how, within, kAnyOldSpace)
     464             : 
     465             : #define FOUR_CASES(byte_code) \
     466             :   case byte_code:             \
     467             :   case byte_code + 1:         \
     468             :   case byte_code + 2:         \
     469             :   case byte_code + 3:
     470             : 
     471             : #define SIXTEEN_CASES(byte_code) \
     472             :   FOUR_CASES(byte_code)          \
     473             :   FOUR_CASES(byte_code + 4)      \
     474             :   FOUR_CASES(byte_code + 8)      \
     475             :   FOUR_CASES(byte_code + 12)
     476             : 
     477             : #define SINGLE_CASE(where, how, within, space) \
     478             :   CASE_STATEMENT(where, how, within, space)    \
     479             :   CASE_BODY(where, how, within, space)
     480             : 
     481             :       // Deserialize a new object and write a pointer to it to the current
     482             :       // object.
     483         790 :       ALL_SPACES(kNewObject, kPlain, kStartOfObject)
     484             :       // Deserialize a new code object and write a pointer to its first
     485             :       // instruction to the current code object.
     486           0 :       ALL_SPACES(kNewObject, kFromCode, kInnerPointer)
     487             :       // Find a recently deserialized object using its offset from the current
     488             :       // allocation point and write a pointer to it to the current object.
     489           0 :       ALL_SPACES(kBackref, kPlain, kStartOfObject)
     490           0 :       ALL_SPACES(kBackrefWithSkip, kPlain, kStartOfObject)
     491             : #if V8_CODE_EMBEDS_OBJECT_POINTER
     492             :       // Deserialize a new object from pointer found in code and write
     493             :       // a pointer to it to the current object. Required only for MIPS, PPC, ARM
     494             :       // or S390 with embedded constant pool, and omitted on the other
     495             :       // architectures because it is fully unrolled and would cause bloat.
     496             :       ALL_SPACES(kNewObject, kFromCode, kStartOfObject)
     497             :       // Find a recently deserialized code object using its offset from the
     498             :       // current allocation point and write a pointer to it to the current
     499             :       // object. Required only for MIPS, PPC, ARM or S390 with embedded
     500             :       // constant pool.
     501             :       ALL_SPACES(kBackref, kFromCode, kStartOfObject)
     502             :       ALL_SPACES(kBackrefWithSkip, kFromCode, kStartOfObject)
     503             : #endif
     504             :       // Find a recently deserialized code object using its offset from the
     505             :       // current allocation point and write a pointer to its first instruction
     506             :       // to the current code object or the instruction pointer in a function
     507             :       // object.
     508           0 :       ALL_SPACES(kBackref, kFromCode, kInnerPointer)
     509           0 :       ALL_SPACES(kBackrefWithSkip, kFromCode, kInnerPointer)
     510             :       // Find an object in the roots array and write a pointer to it to the
     511             :       // current object.
     512   201133006 :       SINGLE_CASE(kRootArray, kPlain, kStartOfObject, 0)
     513             : #if V8_CODE_EMBEDS_OBJECT_POINTER
     514             :       // Find an object in the roots array and write a pointer to it to in code.
     515             :       SINGLE_CASE(kRootArray, kFromCode, kStartOfObject, 0)
     516             : #endif
     517             :       // Find an object in the partial snapshots cache and write a pointer to it
     518             :       // to the current object.
     519   151098111 :       SINGLE_CASE(kPartialSnapshotCache, kPlain, kStartOfObject, 0)
     520           0 :       SINGLE_CASE(kPartialSnapshotCache, kFromCode, kStartOfObject, 0)
     521           0 :       SINGLE_CASE(kPartialSnapshotCache, kFromCode, kInnerPointer, 0)
     522             :       // Find an object in the partial snapshots cache and write a pointer to it
     523             :       // to the current object.
     524    28000761 :       SINGLE_CASE(kReadOnlyObjectCache, kPlain, kStartOfObject, 0)
     525           0 :       SINGLE_CASE(kReadOnlyObjectCache, kFromCode, kStartOfObject, 0)
     526           0 :       SINGLE_CASE(kReadOnlyObjectCache, kFromCode, kInnerPointer, 0)
     527             :       // Find an object in the attached references and write a pointer to it to
     528             :       // the current object.
     529      275614 :       SINGLE_CASE(kAttachedReference, kPlain, kStartOfObject, 0)
     530           0 :       SINGLE_CASE(kAttachedReference, kFromCode, kStartOfObject, 0)
     531           0 :       SINGLE_CASE(kAttachedReference, kFromCode, kInnerPointer, 0)
     532             : 
     533             : #undef CASE_STATEMENT
     534             : #undef CASE_BODY
     535             : #undef ALL_SPACES
     536             : 
     537             :       case kSkip: {
     538    94913822 :         int size = source_.GetInt();
     539             :         current.Advance(size);
     540             :         break;
     541             :       }
     542             : 
     543             :       // Find an external reference and write a pointer to it to the current
     544             :       // object.
     545             :       case kExternalReference + kPlain + kStartOfObject:
     546             :         current =
     547     2073409 :             ReadExternalReferenceCase(kPlain, current, current_object_address);
     548     2073406 :         break;
     549             :       // Find an external reference and write a pointer to it in the current
     550             :       // code object.
     551             :       case kExternalReference + kFromCode + kStartOfObject:
     552             :         current = ReadExternalReferenceCase(kFromCode, current,
     553           0 :                                             current_object_address);
     554           0 :         break;
     555             : 
     556             :       case kInternalReferenceEncoded:
     557             :       case kInternalReference: {
     558             :         // Internal reference address is not encoded via skip, but by offset
     559             :         // from code entry.
     560           0 :         int pc_offset = source_.GetInt();
     561           0 :         int target_offset = source_.GetInt();
     562             :         Code code = Code::cast(HeapObject::FromAddress(current_object_address));
     563             :         DCHECK(0 <= pc_offset && pc_offset <= code->raw_instruction_size());
     564             :         DCHECK(0 <= target_offset &&
     565             :                target_offset <= code->raw_instruction_size());
     566           0 :         Address pc = code->entry() + pc_offset;
     567           0 :         Address target = code->entry() + target_offset;
     568             :         Assembler::deserialization_set_target_internal_reference_at(
     569             :             pc, target,
     570             :             data == kInternalReference ? RelocInfo::INTERNAL_REFERENCE
     571             :                                        : RelocInfo::INTERNAL_REFERENCE_ENCODED);
     572             :         break;
     573             :       }
     574             : 
     575             :       case kOffHeapTarget: {
     576             :         DCHECK(FLAG_embedded_builtins);
     577    94913222 :         int skip = source_.GetInt();
     578    94913160 :         int builtin_index = source_.GetInt();
     579             :         DCHECK(Builtins::IsBuiltinId(builtin_index));
     580             : 
     581             :         current.Advance(skip);
     582             : 
     583    94913360 :         CHECK_NOT_NULL(isolate->embedded_blob());
     584    94913573 :         EmbeddedData d = EmbeddedData::FromBlob();
     585    94913573 :         Address address = d.InstructionStartOfBuiltin(builtin_index);
     586    94913708 :         CHECK_NE(kNullAddress, address);
     587             : 
     588    94913708 :         if (RelocInfo::OffHeapTargetIsCodedSpecially()) {
     589             :           Address location_of_branch_data = current.address();
     590             :           int skip = Assembler::deserialization_special_target_size(
     591             :               location_of_branch_data);
     592             :           Assembler::deserialization_set_special_target_at(
     593             :               location_of_branch_data,
     594             :               Code::cast(HeapObject::FromAddress(current_object_address)),
     595             :               address);
     596             :           current.Advance(skip);
     597             :         } else {
     598             :           UnalignedCopy(current, address);
     599             :           current.Advance();
     600             :         }
     601             :         break;
     602             :       }
     603             : 
     604             :       case kNop:
     605             :         break;
     606             : 
     607             :       case kNextChunk: {
     608             :         int space = source_.Get();
     609     2115115 :         allocator()->MoveToNextChunk(static_cast<AllocationSpace>(space));
     610     2115115 :         break;
     611             :       }
     612             : 
     613             :       case kDeferred: {
     614             :         // Deferred can only occur right after the heap object header.
     615             :         DCHECK_EQ(current.address(), current_object_address + kPointerSize);
     616      191614 :         HeapObject obj = HeapObject::FromAddress(current_object_address);
     617             :         // If the deferred object is a map, its instance type may be used
     618             :         // during deserialization. Initialize it with a temporary value.
     619      191614 :         if (obj->IsMap()) Map::cast(obj)->set_instance_type(FILLER_TYPE);
     620             :         current = limit;
     621             :         return false;
     622             :       }
     623             : 
     624             :       case kSynchronize:
     625             :         // If we get here then that indicates that you have a mismatch between
     626             :         // the number of GC roots when serializing and deserializing.
     627           0 :         UNREACHABLE();
     628             : 
     629             :       // Deserialize raw data of variable length.
     630             :       case kVariableRawData: {
     631      217704 :         int size_in_bytes = source_.GetInt();
     632      217704 :         byte* raw_data_out = reinterpret_cast<byte*>(current.address());
     633             :         source_.CopyRaw(raw_data_out, size_in_bytes);
     634             :         current.Advance(size_in_bytes);
     635             :         break;
     636             :       }
     637             : 
     638             :       // Deserialize raw code directly into the body of the code object.
     639             :       // Do not move current.
     640             :       case kVariableRawCode: {
     641    94916509 :         int size_in_bytes = source_.GetInt();
     642             :         source_.CopyRaw(
     643    94916693 :             reinterpret_cast<byte*>(current_object_address + Code::kDataStart),
     644    94916693 :             size_in_bytes);
     645             :         break;
     646             :       }
     647             : 
     648             :       case kVariableRepeat: {
     649      526815 :         int repeats = source_.GetInt();
     650             :         MaybeObject object = current.ReadPrevious();
     651             :         DCHECK(!Heap::InNewSpace(object));
     652   248701422 :         for (int i = 0; i < repeats; i++) {
     653             :           UnalignedCopy(current, object);
     654             :           current.Advance();
     655             :         }
     656             :         break;
     657             :       }
     658             : 
     659             :       case kOffHeapBackingStore: {
     660          35 :         int byte_length = source_.GetInt();
     661             :         byte* backing_store = static_cast<byte*>(
     662             :             isolate->array_buffer_allocator()->AllocateUninitialized(
     663          35 :                 byte_length));
     664          35 :         CHECK_NOT_NULL(backing_store);
     665             :         source_.CopyRaw(backing_store, byte_length);
     666          35 :         off_heap_backing_stores_.push_back(backing_store);
     667             :         break;
     668             :       }
     669             : 
     670             :       case kApiReference: {
     671         120 :         int skip = source_.GetInt();
     672             :         current.Advance(skip);
     673         120 :         uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());
     674             :         Address address;
     675         120 :         if (isolate->api_external_references()) {
     676             :           DCHECK_WITH_MSG(
     677             :               reference_id < num_api_references_,
     678             :               "too few external references provided through the API");
     679             :           address = static_cast<Address>(
     680         100 :               isolate->api_external_references()[reference_id]);
     681             :         } else {
     682          20 :           address = reinterpret_cast<Address>(NoExternalReferencesCallback);
     683             :         }
     684             :         UnalignedCopy(current, address);
     685             :         current.Advance();
     686             :         break;
     687             :       }
     688             : 
     689             :       case kClearedWeakReference:
     690             :         UnalignedCopy(current, HeapObjectReference::ClearedValue(isolate_));
     691             :         current.Advance();
     692             :         break;
     693             : 
     694             :       case kWeakPrefix:
     695             :         DCHECK(!allocator()->next_reference_is_weak());
     696             :         allocator()->set_next_reference_is_weak(true);
     697             :         break;
     698             : 
     699             :       case kAlignmentPrefix:
     700             :       case kAlignmentPrefix + 1:
     701             :       case kAlignmentPrefix + 2: {
     702           0 :         int alignment = data - (SerializerDeserializer::kAlignmentPrefix - 1);
     703           0 :         allocator()->SetAlignment(static_cast<AllocationAlignment>(alignment));
     704             :         break;
     705             :       }
     706             : 
     707             :       // First kNumberOfRootArrayConstants roots are guaranteed to be in
     708             :       // the old space.
     709             :       STATIC_ASSERT(
     710             :           static_cast<int>(RootIndex::kFirstImmortalImmovableRoot) == 0);
     711             :       STATIC_ASSERT(kNumberOfRootArrayConstants <=
     712             :                     static_cast<int>(RootIndex::kLastImmortalImmovableRoot));
     713             :       STATIC_ASSERT(kNumberOfRootArrayConstants == 32);
     714             :       SIXTEEN_CASES(kRootArrayConstantsWithSkip)
     715             :       SIXTEEN_CASES(kRootArrayConstantsWithSkip + 16) {
     716           0 :         int skip = source_.GetInt();
     717             :         current.Advance(skip);
     718             :         V8_FALLTHROUGH;
     719             :       }
     720             : 
     721             :       SIXTEEN_CASES(kRootArrayConstants)
     722             :       SIXTEEN_CASES(kRootArrayConstants + 16) {
     723   624569000 :         int id = data & kRootArrayConstantsMask;
     724             :         RootIndex root_index = static_cast<RootIndex>(id);
     725             :         MaybeObject object = MaybeObject::FromObject(isolate->root(root_index));
     726             :         DCHECK(!Heap::InNewSpace(object));
     727             :         UnalignedCopy(current, object);
     728             :         current.Advance();
     729             :         break;
     730             :       }
     731             : 
     732             :       STATIC_ASSERT(kNumberOfHotObjects == 8);
     733             :       FOUR_CASES(kHotObjectWithSkip)
     734             :       FOUR_CASES(kHotObjectWithSkip + 4) {
     735          15 :         int skip = source_.GetInt();
     736             :         current.Advance(skip);
     737             :         V8_FALLTHROUGH;
     738             :       }
     739             : 
     740             :       FOUR_CASES(kHotObject)
     741             :       FOUR_CASES(kHotObject + 4) {
     742   506144897 :         int index = data & kHotObjectMask;
     743             :         Object hot_object = hot_objects_.Get(index);
     744             :         MaybeObject hot_maybe_object = MaybeObject::FromObject(hot_object);
     745   506144897 :         if (allocator()->GetAndClearNextReferenceIsWeak()) {
     746             :           hot_maybe_object = MaybeObject::MakeWeak(hot_maybe_object);
     747             :         }
     748             : 
     749             :         UnalignedCopy(current, hot_maybe_object);
     750   506144897 :         if (write_barrier_needed && Heap::InNewSpace(hot_object)) {
     751             :           HeapObject current_object =
     752           0 :               HeapObject::FromAddress(current_object_address);
     753           0 :           GenerationalBarrier(current_object, current.Slot(), hot_maybe_object);
     754             :         }
     755             :         current.Advance();
     756             :         break;
     757             :       }
     758             : 
     759             :       // Deserialize raw data of fixed length from 1 to 32 words.
     760             :       STATIC_ASSERT(kNumberOfFixedRawData == 32);
     761             :       SIXTEEN_CASES(kFixedRawData)
     762             :       SIXTEEN_CASES(kFixedRawData + 16) {
     763   481341151 :         byte* raw_data_out = reinterpret_cast<byte*>(current.address());
     764   481341151 :         int size_in_bytes = (data - kFixedRawDataStart) << kPointerSizeLog2;
     765             :         source_.CopyRaw(raw_data_out, size_in_bytes);
     766             :         current.Advance(size_in_bytes);
     767             :         break;
     768             :       }
     769             : 
     770             :       STATIC_ASSERT(kNumberOfFixedRepeat == 16);
     771             :       SIXTEEN_CASES(kFixedRepeat) {
     772    97587467 :         int repeats = data - kFixedRepeatStart;
     773             :         MaybeObject object = current.ReadPrevious();
     774             :         DCHECK(!Heap::InNewSpace(object));
     775   229514200 :         for (int i = 0; i < repeats; i++) {
     776             :           UnalignedCopy(current, object);
     777             :           current.Advance();
     778             :         }
     779             :         break;
     780             :       }
     781             : 
     782             : #ifdef DEBUG
     783             : #define UNUSED_CASE(byte_code) \
     784             :   case byte_code:              \
     785             :     UNREACHABLE();
     786             :       UNUSED_SERIALIZER_BYTE_CODES(UNUSED_CASE)
     787             : #endif
     788             : #undef UNUSED_CASE
     789             : 
     790             : #undef SIXTEEN_CASES
     791             : #undef FOUR_CASES
     792             : #undef SINGLE_CASE
     793             :     }
     794             :   }
     795   639053377 :   CHECK_EQ(limit, current);
     796             :   return true;
     797             : }
     798             : 
     799     2073408 : UnalignedSlot Deserializer::ReadExternalReferenceCase(
     800             :     HowToCode how, UnalignedSlot current, Address current_object_address) {
     801     2073408 :   int skip = source_.GetInt();
     802             :   current.Advance(skip);
     803     2073407 :   uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());
     804     2073406 :   Address address = external_reference_table_->address(reference_id);
     805             : 
     806     2073406 :   if (how == kFromCode) {
     807             :     Address location_of_branch_data = current.address();
     808             :     int skip =
     809             :         Assembler::deserialization_special_target_size(location_of_branch_data);
     810             :     Assembler::deserialization_set_special_target_at(
     811             :         location_of_branch_data,
     812             :         Code::cast(HeapObject::FromAddress(current_object_address)), address);
     813             :     current.Advance(skip);
     814             :   } else {
     815             :     UnalignedCopy(current, address);
     816             :     current.Advance();
     817             :   }
     818     2073406 :   return current;
     819             : }
     820             : 
     821             : template <int where, int how, int within, int space_number_if_any>
     822  1005429488 : UnalignedSlot Deserializer::ReadDataCase(Isolate* isolate,
     823             :                                          UnalignedSlot current,
     824             :                                          Address current_object_address,
     825             :                                          byte data, bool write_barrier_needed) {
     826             :   bool emit_write_barrier = false;
     827             :   bool current_was_incremented = false;
     828             :   int space_number = space_number_if_any == kAnyOldSpace ? (data & kSpaceMask)
     829   624921217 :                                                          : space_number_if_any;
     830             :   HeapObjectReferenceType reference_type = HeapObjectReferenceType::STRONG;
     831             :   if (where == kNewObject && how == kPlain && within == kStartOfObject) {
     832   435688277 :     if (allocator()->GetAndClearNextReferenceIsWeak()) {
     833             :       reference_type = HeapObjectReferenceType::WEAK;
     834             :     }
     835   435688277 :     ReadObject(space_number, current, reference_type);
     836   435669429 :     emit_write_barrier = (space_number == NEW_SPACE);
     837             :   } else {
     838           0 :     Object new_object; /* May not be a real Object pointer. */
     839             :     if (where == kNewObject) {
     840           0 :       ReadObject(space_number, UnalignedSlot(&new_object),
     841           0 :                  HeapObjectReferenceType::STRONG);
     842             :     } else if (where == kBackref) {
     843   189233725 :       emit_write_barrier = (space_number == NEW_SPACE);
     844   189233725 :       new_object = GetBackReferencedObject(data & kSpaceMask);
     845             :     } else if (where == kBackrefWithSkip) {
     846           5 :       int skip = source_.GetInt();
     847             :       current.Advance(skip);
     848           5 :       emit_write_barrier = (space_number == NEW_SPACE);
     849           5 :       new_object = GetBackReferencedObject(data & kSpaceMask);
     850             :     } else if (where == kRootArray) {
     851   201132993 :       int id = source_.GetInt();
     852   201132953 :       RootIndex root_index = static_cast<RootIndex>(id);
     853             :       new_object = isolate->root(root_index);
     854   201132953 :       emit_write_barrier = Heap::InNewSpace(new_object);
     855             :       hot_objects_.Add(HeapObject::cast(new_object));
     856             :     } else if (where == kReadOnlyObjectCache) {
     857    28000757 :       int cache_index = source_.GetInt();
     858    56001512 :       new_object = isolate->read_only_object_cache()->at(cache_index);
     859             :       DCHECK(!Heap::InNewSpace(new_object));
     860             :       emit_write_barrier = false;
     861             :     } else if (where == kPartialSnapshotCache) {
     862   151098117 :       int cache_index = source_.GetInt();
     863   302195890 :       new_object = isolate->partial_snapshot_cache()->at(cache_index);
     864   151097945 :       emit_write_barrier = Heap::InNewSpace(new_object);
     865             :     } else {
     866             :       DCHECK_EQ(where, kAttachedReference);
     867      275614 :       int index = source_.GetInt();
     868      275614 :       new_object = *attached_objects_[index];
     869      275614 :       emit_write_barrier = Heap::InNewSpace(new_object);
     870             :     }
     871             :     if (within == kInnerPointer) {
     872             :       DCHECK_EQ(how, kFromCode);
     873           0 :       if (new_object->IsCode()) {
     874           0 :         new_object = Object(Code::cast(new_object)->raw_instruction_start());
     875             :       } else {
     876             :         Cell cell = Cell::cast(new_object);
     877           0 :         new_object = Object(cell->ValueAddress());
     878             :       }
     879             :     }
     880             :     if (how == kFromCode) {
     881             :       DCHECK(!allocator()->next_reference_is_weak());
     882             :       Address location_of_branch_data = current.address();
     883             :       int skip = Assembler::deserialization_special_target_size(
     884             :           location_of_branch_data);
     885             :       Assembler::deserialization_set_special_target_at(
     886             :           location_of_branch_data,
     887             :           Code::cast(HeapObject::FromAddress(current_object_address)),
     888             :           new_object->ptr());
     889             :       current.Advance(skip);
     890             :       current_was_incremented = true;
     891             :     } else {
     892             :       MaybeObject new_maybe_object = MaybeObject::FromObject(new_object);
     893   569739806 :       if (allocator()->GetAndClearNextReferenceIsWeak()) {
     894             :         new_maybe_object = MaybeObject::MakeWeak(new_maybe_object);
     895             :       }
     896             :       UnalignedCopy(current, new_maybe_object);
     897             :     }
     898             :   }
     899   977409269 :   if (emit_write_barrier && write_barrier_needed) {
     900      276198 :     HeapObject object = HeapObject::FromAddress(current_object_address);
     901             :     SLOW_DCHECK(isolate->heap()->Contains(object));
     902      276198 :     GenerationalBarrier(object, current.Slot(), current.Read());
     903             :   }
     904             :   if (!current_was_incremented) {
     905             :     current.Advance();
     906             :   }
     907             : 
     908  1005410025 :   return current;
     909             : }
     910             : 
     911             : }  // namespace internal
     912      183867 : }  // namespace v8

Generated by: LCOV version 1.10