Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/snapshot/deserializer.h"
6 :
7 : #include "src/assembler-inl.h"
8 : #include "src/heap/heap-inl.h"
9 : #include "src/heap/heap-write-barrier-inl.h"
10 : #include "src/heap/read-only-heap.h"
11 : #include "src/interpreter/interpreter.h"
12 : #include "src/isolate.h"
13 : #include "src/log.h"
14 : #include "src/objects-body-descriptors-inl.h"
15 : #include "src/objects/api-callbacks.h"
16 : #include "src/objects/cell-inl.h"
17 : #include "src/objects/hash-table.h"
18 : #include "src/objects/js-array-buffer-inl.h"
19 : #include "src/objects/js-array-inl.h"
20 : #include "src/objects/maybe-object.h"
21 : #include "src/objects/slots.h"
22 : #include "src/objects/smi.h"
23 : #include "src/objects/string.h"
24 : #include "src/roots.h"
25 : #include "src/snapshot/natives.h"
26 : #include "src/snapshot/snapshot.h"
27 :
28 : namespace v8 {
29 : namespace internal {
30 :
31 : template <typename TSlot>
32 : TSlot Deserializer::Write(TSlot dest, MaybeObject value) {
33 : DCHECK(!allocator()->next_reference_is_weak());
34 : dest.store(value);
35 : return dest + 1;
36 : }
37 :
38 : template <typename TSlot>
39 : TSlot Deserializer::WriteAddress(TSlot dest, Address value) {
40 : DCHECK(!allocator()->next_reference_is_weak());
41 : memcpy(dest.ToVoidPtr(), &value, kSystemPointerSize);
42 : STATIC_ASSERT(IsAligned(kSystemPointerSize, TSlot::kSlotDataSize));
43 : return dest + (kSystemPointerSize / TSlot::kSlotDataSize);
44 : }
45 :
46 213547 : void Deserializer::Initialize(Isolate* isolate) {
47 : DCHECK_NULL(isolate_);
48 : DCHECK_NOT_NULL(isolate);
49 213547 : isolate_ = isolate;
50 : allocator()->Initialize(isolate->heap());
51 :
52 : #ifdef DEBUG
53 : // The read-only deserializer is run by read-only heap set-up before the heap
54 : // is fully set up. External reference table relies on a few parts of this
55 : // set-up (like old-space), so it may be uninitialized at this point.
56 : if (isolate->isolate_data()->external_reference_table()->is_initialized()) {
57 : // Count the number of external references registered through the API.
58 : num_api_references_ = 0;
59 : if (isolate_->api_external_references() != nullptr) {
60 : while (isolate_->api_external_references()[num_api_references_] != 0) {
61 : num_api_references_++;
62 : }
63 : }
64 : }
65 : #endif // DEBUG
66 213547 : CHECK_EQ(magic_number_, SerializedData::kMagicNumber);
67 213547 : }
68 :
69 213408 : void Deserializer::Rehash() {
70 : DCHECK(can_rehash() || deserializing_user_code());
71 45783983 : for (HeapObject item : to_rehash_) {
72 91141148 : item->RehashBasedOnMap(ReadOnlyRoots(isolate_));
73 : }
74 213409 : }
75 :
76 640647 : Deserializer::~Deserializer() {
77 : #ifdef DEBUG
78 : // Do not perform checks if we aborted deserialization.
79 : if (source_.position() == 0) return;
80 : // Check that we only have padding bytes remaining.
81 : while (source_.HasMore()) DCHECK_EQ(kNop, source_.Get());
82 : // Check that we've fully used all reserved space.
83 : DCHECK(allocator()->ReservationsAreFullyUsed());
84 : #endif // DEBUG
85 213549 : }
86 :
87 : // This is called on the roots. It is the driver of the deserialization
88 : // process. It is also called on the body of each function.
89 200165072 : void Deserializer::VisitRootPointers(Root root, const char* description,
90 : FullObjectSlot start, FullObjectSlot end) {
91 : // We are reading to a location outside of JS heap, so pass NEW_SPACE to
92 : // avoid triggering write barriers.
93 200165106 : ReadData(FullMaybeObjectSlot(start), FullMaybeObjectSlot(end), NEW_SPACE,
94 200165072 : kNullAddress);
95 200165106 : }
96 :
97 1045126 : void Deserializer::Synchronize(VisitorSynchronization::SyncTag tag) {
98 : static const byte expected = kSynchronize;
99 1045126 : CHECK_EQ(expected, source_.Get());
100 1045126 : }
101 :
102 213548 : void Deserializer::DeserializeDeferredObjects() {
103 401115 : for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) {
104 187567 : switch (code) {
105 : case kAlignmentPrefix:
106 : case kAlignmentPrefix + 1:
107 : case kAlignmentPrefix + 2: {
108 0 : int alignment = code - (SerializerDeserializer::kAlignmentPrefix - 1);
109 0 : allocator()->SetAlignment(static_cast<AllocationAlignment>(alignment));
110 : break;
111 : }
112 : default: {
113 187567 : int space = code & kSpaceMask;
114 : DCHECK_LE(space, kNumberOfSpaces);
115 : DCHECK_EQ(code - space, kNewObject);
116 187567 : HeapObject object = GetBackReferencedObject(space);
117 187567 : int size = source_.GetInt() << kTaggedSizeLog2;
118 : Address obj_address = object->address();
119 : // Object's map is already initialized, now read the rest.
120 187567 : MaybeObjectSlot start(obj_address + kTaggedSize);
121 187567 : MaybeObjectSlot end(obj_address + size);
122 187567 : bool filled = ReadData(start, end, space, obj_address);
123 187567 : CHECK(filled);
124 : DCHECK(CanBeDeferred(object));
125 187567 : PostProcessNewObject(object, space);
126 : }
127 : }
128 : }
129 213548 : }
130 :
131 0 : void Deserializer::LogNewObjectEvents() {
132 : {
133 : // {new_maps_} and {new_code_objects_} are vectors containing raw
134 : // pointers, hence there should be no GC happening.
135 : DisallowHeapAllocation no_gc;
136 : // Issue code events for newly deserialized code objects.
137 0 : LOG_CODE_EVENT(isolate_, LogCodeObjects());
138 : }
139 0 : LOG_CODE_EVENT(isolate_, LogCompiledFunctions());
140 0 : LogNewMapEvents();
141 0 : }
142 :
143 90593 : void Deserializer::LogNewMapEvents() {
144 : DisallowHeapAllocation no_gc;
145 96818 : for (Map map : new_maps()) {
146 : DCHECK(FLAG_trace_maps);
147 6225 : LOG(isolate_, MapCreate(map));
148 6225 : LOG(isolate_, MapDetails(map));
149 : }
150 90593 : }
151 :
152 184867 : void Deserializer::LogScriptEvents(Script script) {
153 : DisallowHeapAllocation no_gc;
154 185044 : LOG(isolate_,
155 : ScriptEvent(Logger::ScriptEventType::kDeserialize, script->id()));
156 184867 : LOG(isolate_, ScriptDetails(script));
157 184867 : }
158 :
159 9837 : StringTableInsertionKey::StringTableInsertionKey(String string)
160 9837 : : StringTableKey(ComputeHashField(string)), string_(string) {
161 : DCHECK(string->IsInternalizedString());
162 9837 : }
163 :
164 4683 : bool StringTableInsertionKey::IsMatch(Object string) {
165 : // We know that all entries in a hash table had their hash keys created.
166 : // Use that knowledge to have fast failure.
167 4683 : if (Hash() != String::cast(string)->Hash()) return false;
168 : // We want to compare the content of two internalized strings here.
169 558 : return string_->SlowEquals(String::cast(string));
170 : }
171 :
172 4642 : Handle<String> StringTableInsertionKey::AsHandle(Isolate* isolate) {
173 4642 : return handle(string_, isolate);
174 : }
175 :
176 0 : uint32_t StringTableInsertionKey::ComputeHashField(String string) {
177 : // Make sure hash_field() is computed.
178 9837 : string->Hash();
179 0 : return string->hash_field();
180 : }
181 :
182 434205337 : HeapObject Deserializer::PostProcessNewObject(HeapObject obj, int space) {
183 434205337 : if ((FLAG_rehash_snapshot && can_rehash_) || deserializing_user_code()) {
184 433939181 : if (obj->IsString()) {
185 : // Uninitialize hash field as we need to recompute the hash.
186 : String string = String::cast(obj);
187 : string->set_hash_field(String::kEmptyHashField);
188 : // Rehash strings before read-only space is sealed. Strings outside
189 : // read-only space are rehashed lazily. (e.g. when rehashing dictionaries)
190 58534997 : if (space == RO_SPACE) {
191 31198631 : to_rehash_.push_back(obj);
192 : }
193 375404184 : } else if (obj->NeedsRehashing()) {
194 14371472 : to_rehash_.push_back(obj);
195 : }
196 : }
197 :
198 434220771 : if (deserializing_user_code()) {
199 16558 : if (obj->IsString()) {
200 : String string = String::cast(obj);
201 5582 : if (string->IsInternalizedString()) {
202 : // Canonicalize the internalized string. If it already exists in the
203 : // string table, set it to forward to the existing one.
204 5195 : StringTableInsertionKey key(string);
205 : String canonical =
206 5195 : StringTable::ForwardStringIfExists(isolate_, &key, string);
207 :
208 5195 : if (!canonical.is_null()) return canonical;
209 :
210 13926 : new_internalized_strings_.push_back(handle(string, isolate_));
211 4642 : return string;
212 : }
213 10976 : } else if (obj->IsScript()) {
214 627 : new_scripts_.push_back(handle(Script::cast(obj), isolate_));
215 10767 : } else if (obj->IsAllocationSite()) {
216 : // We should link new allocation sites, but we can't do this immediately
217 : // because |AllocationSite::HasWeakNext()| internally accesses
218 : // |Heap::roots_| that may not have been initialized yet. So defer this to
219 : // |ObjectDeserializer::CommitPostProcessedObjects()|.
220 0 : new_allocation_sites_.push_back(AllocationSite::cast(obj));
221 : } else {
222 : DCHECK(CanBeDeferred(obj));
223 : }
224 : }
225 434215576 : if (obj->IsScript()) {
226 184658 : LogScriptEvents(Script::cast(obj));
227 434030918 : } else if (obj->IsCode()) {
228 : // We flush all code pages after deserializing the startup snapshot.
229 : // Hence we only remember each individual code object when deserializing
230 : // user code.
231 93315423 : if (deserializing_user_code() || space == LO_SPACE) {
232 0 : new_code_objects_.push_back(Code::cast(obj));
233 : }
234 340809655 : } else if (FLAG_trace_maps && obj->IsMap()) {
235 : // Keep track of all seen Maps to log them later since they might be only
236 : // partially initialized at this point.
237 17652 : new_maps_.push_back(Map::cast(obj));
238 340706669 : } else if (obj->IsAccessorInfo()) {
239 : #ifdef USE_SIMULATOR
240 : accessor_infos_.push_back(AccessorInfo::cast(obj));
241 : #endif
242 340030918 : } else if (obj->IsCallHandlerInfo()) {
243 : #ifdef USE_SIMULATOR
244 : call_handler_infos_.push_back(CallHandlerInfo::cast(obj));
245 : #endif
246 340031151 : } else if (obj->IsExternalString()) {
247 122906 : if (obj->map() == ReadOnlyRoots(isolate_).native_source_string_map()) {
248 61433 : ExternalOneByteString string = ExternalOneByteString::cast(obj);
249 : DCHECK(string->is_uncached());
250 61432 : string->SetResource(
251 61433 : isolate_, NativesExternalStringResource::DecodeForDeserialization(
252 61432 : string->resource()));
253 : } else {
254 20 : ExternalString string = ExternalString::cast(obj);
255 : uint32_t index = string->resource_as_uint32();
256 : Address address =
257 20 : static_cast<Address>(isolate_->api_external_references()[index]);
258 20 : string->set_address_as_resource(address);
259 20 : isolate_->heap()->UpdateExternalString(string, 0,
260 40 : string->ExternalPayloadSize());
261 : }
262 61453 : isolate_->heap()->RegisterExternalString(String::cast(obj));
263 339969698 : } else if (obj->IsJSTypedArray()) {
264 : JSTypedArray typed_array = JSTypedArray::cast(obj);
265 65 : CHECK_LE(typed_array->byte_offset(), Smi::kMaxValue);
266 65 : int32_t byte_offset = static_cast<int32_t>(typed_array->byte_offset());
267 65 : if (byte_offset > 0) {
268 : FixedTypedArrayBase elements =
269 : FixedTypedArrayBase::cast(typed_array->elements());
270 : // Must be off-heap layout.
271 : DCHECK(!typed_array->is_on_heap());
272 :
273 : void* pointer_with_offset = reinterpret_cast<void*>(
274 5 : reinterpret_cast<intptr_t>(elements->external_pointer()) +
275 : byte_offset);
276 : elements->set_external_pointer(pointer_with_offset);
277 : }
278 339969633 : } else if (obj->IsJSArrayBuffer()) {
279 : JSArrayBuffer buffer = JSArrayBuffer::cast(obj);
280 : // Only fixup for the off-heap case.
281 55 : if (buffer->backing_store() != nullptr) {
282 : Smi store_index(reinterpret_cast<Address>(buffer->backing_store()));
283 70 : void* backing_store = off_heap_backing_stores_[store_index->value()];
284 :
285 : buffer->set_backing_store(backing_store);
286 70 : isolate_->heap()->RegisterNewArrayBuffer(buffer);
287 : }
288 339969578 : } else if (obj->IsFixedTypedArrayBase()) {
289 : FixedTypedArrayBase fta = FixedTypedArrayBase::cast(obj);
290 : // Only fixup for the off-heap case.
291 676316 : if (fta->base_pointer() == Smi::kZero) {
292 : Smi store_index(reinterpret_cast<Address>(fta->external_pointer()));
293 100 : void* backing_store = off_heap_backing_stores_[store_index->value()];
294 : fta->set_external_pointer(backing_store);
295 : }
296 339293262 : } else if (obj->IsBytecodeArray()) {
297 : // TODO(mythria): Remove these once we store the default values for these
298 : // fields in the serializer.
299 : BytecodeArray bytecode_array = BytecodeArray::cast(obj);
300 739 : bytecode_array->set_interrupt_budget(
301 : interpreter::Interpreter::InterruptBudget());
302 : bytecode_array->set_osr_loop_nesting_level(0);
303 : }
304 : #ifdef DEBUG
305 : if (obj->IsDescriptorArray()) {
306 : DescriptorArray descriptor_array = DescriptorArray::cast(obj);
307 : DCHECK_EQ(0, descriptor_array->raw_number_of_marked_descriptors());
308 : }
309 : #endif
310 :
311 : // Check alignment.
312 : DCHECK_EQ(0, Heap::GetFillToAlign(obj->address(),
313 : HeapObject::RequiredAlignment(obj->map())));
314 434202799 : return obj;
315 : }
316 :
317 184937353 : HeapObject Deserializer::GetBackReferencedObject(int space) {
318 184937353 : HeapObject obj;
319 184937353 : switch (space) {
320 : case LO_SPACE:
321 20 : obj = allocator()->GetLargeObject(source_.GetInt());
322 10 : break;
323 : case MAP_SPACE:
324 24412411 : obj = allocator()->GetMap(source_.GetInt());
325 12206204 : break;
326 : case RO_SPACE: {
327 4365045 : uint32_t chunk_index = source_.GetInt();
328 4365045 : uint32_t chunk_offset = source_.GetInt();
329 4365043 : if (isolate()->heap()->deserialization_complete()) {
330 : PagedSpace* read_only_space = isolate()->heap()->read_only_space();
331 : Page* page = read_only_space->first_page();
332 157 : for (uint32_t i = 0; i < chunk_index; ++i) {
333 : page = page->next_page();
334 : }
335 157 : Address address = page->OffsetToAddress(chunk_offset);
336 157 : obj = HeapObject::FromAddress(address);
337 : } else {
338 : obj = allocator()->GetObject(static_cast<AllocationSpace>(space),
339 8729772 : chunk_index, chunk_offset);
340 : }
341 : break;
342 : }
343 : default: {
344 168366092 : uint32_t chunk_index = source_.GetInt();
345 168366165 : uint32_t chunk_offset = source_.GetInt();
346 : obj = allocator()->GetObject(static_cast<AllocationSpace>(space),
347 336732368 : chunk_index, chunk_offset);
348 168365752 : break;
349 : }
350 : }
351 :
352 184946202 : if (deserializing_user_code() && obj->IsThinString()) {
353 136 : obj = ThinString::cast(obj)->actual();
354 : }
355 :
356 : hot_objects_.Add(obj);
357 : DCHECK(!HasWeakHeapObjectTag(obj->ptr()));
358 184936442 : return obj;
359 : }
360 :
361 86696145 : HeapObject Deserializer::ReadObject() {
362 : MaybeObject object;
363 : // We are reading to a location outside of JS heap, so pass NEW_SPACE to
364 : // avoid triggering write barriers.
365 : bool filled =
366 86696719 : ReadData(FullMaybeObjectSlot(&object), FullMaybeObjectSlot(&object + 1),
367 86696145 : NEW_SPACE, kNullAddress);
368 86696719 : CHECK(filled);
369 86696719 : return object.GetHeapObjectAssumeStrong();
370 : }
371 :
372 434204103 : HeapObject Deserializer::ReadObject(int space_number) {
373 434204103 : const int size = source_.GetInt() << kObjectAlignmentBits;
374 :
375 : Address address =
376 868406898 : allocator()->Allocate(static_cast<AllocationSpace>(space_number), size);
377 : HeapObject obj = HeapObject::FromAddress(address);
378 :
379 868413846 : isolate_->heap()->OnAllocationEvent(obj, size);
380 : MaybeObjectSlot current(address);
381 434206280 : MaybeObjectSlot limit(address + size);
382 :
383 434206280 : if (ReadData(current, limit, space_number, address)) {
384 : // Only post process if object content has not been deferred.
385 434017020 : obj = PostProcessNewObject(obj, space_number);
386 : }
387 :
388 : #ifdef DEBUG
389 : if (obj->IsCode()) {
390 : DCHECK(space_number == CODE_SPACE || space_number == CODE_LO_SPACE);
391 : } else {
392 : DCHECK(space_number != CODE_SPACE && space_number != CODE_LO_SPACE);
393 : }
394 : #endif // DEBUG
395 434207111 : return obj;
396 : }
397 :
398 93316129 : void Deserializer::ReadCodeObjectBody(int space_number,
399 : Address code_object_address) {
400 : // At this point the code object is already allocated, its map field is
401 : // initialized and its raw data fields and code stream are also read.
402 : // Now we read the rest of code header's fields.
403 93316129 : MaybeObjectSlot current(code_object_address + HeapObject::kHeaderSize);
404 93316129 : MaybeObjectSlot limit(code_object_address + Code::kDataStart);
405 93316129 : bool filled = ReadData(current, limit, space_number, code_object_address);
406 93314905 : CHECK(filled);
407 :
408 : // Now iterate RelocInfos the same way it was done by the serialzier and
409 : // deserialize respective data into RelocInfos.
410 93314905 : Code code = Code::cast(HeapObject::FromAddress(code_object_address));
411 93314905 : RelocIterator it(code, Code::BodyDescriptor::kRelocModeMask);
412 280442913 : for (; !it.done(); it.next()) {
413 93565051 : RelocInfo rinfo = *it.rinfo();
414 93565051 : rinfo.Visit(this);
415 : }
416 93315721 : }
417 :
418 430346 : void Deserializer::VisitCodeTarget(Code host, RelocInfo* rinfo) {
419 430346 : HeapObject object = ReadObject();
420 430345 : rinfo->set_target_address(Code::cast(object)->raw_instruction_start());
421 430346 : }
422 :
423 20 : void Deserializer::VisitEmbeddedPointer(Code host, RelocInfo* rinfo) {
424 20 : HeapObject object = ReadObject();
425 : // Embedded object reference must be a strong one.
426 : rinfo->set_target_object(isolate_->heap(), object);
427 20 : }
428 :
429 0 : void Deserializer::VisitRuntimeEntry(Code host, RelocInfo* rinfo) {
430 : // We no longer serialize code that contains runtime entries.
431 0 : UNREACHABLE();
432 : }
433 :
434 98 : void Deserializer::VisitExternalReference(Code host, RelocInfo* rinfo) {
435 : byte data = source_.Get();
436 98 : CHECK_EQ(data, kExternalReference);
437 :
438 : Address address = ReadExternalReferenceCase();
439 :
440 98 : if (rinfo->IsCodedSpecially()) {
441 : Address location_of_branch_data = rinfo->pc();
442 : Assembler::deserialization_set_special_target_at(location_of_branch_data,
443 : host, address);
444 : } else {
445 : WriteUnalignedValue(rinfo->target_address_address(), address);
446 : }
447 98 : }
448 :
449 0 : void Deserializer::VisitInternalReference(Code host, RelocInfo* rinfo) {
450 : byte data = source_.Get();
451 0 : CHECK_EQ(data, kInternalReference);
452 :
453 : // Internal reference target is encoded as an offset from code entry.
454 0 : int target_offset = source_.GetInt();
455 : DCHECK_LT(static_cast<unsigned>(target_offset),
456 : static_cast<unsigned>(host->raw_instruction_size()));
457 0 : Address target = host->entry() + target_offset;
458 : Assembler::deserialization_set_target_internal_reference_at(
459 : rinfo->pc(), target, rinfo->rmode());
460 0 : }
461 :
462 93133844 : void Deserializer::VisitOffHeapTarget(Code host, RelocInfo* rinfo) {
463 : DCHECK(FLAG_embedded_builtins);
464 : byte data = source_.Get();
465 93133844 : CHECK_EQ(data, kOffHeapTarget);
466 :
467 93133844 : int builtin_index = source_.GetInt();
468 : DCHECK(Builtins::IsBuiltinId(builtin_index));
469 :
470 93133456 : CHECK_NOT_NULL(isolate_->embedded_blob());
471 93132241 : EmbeddedData d = EmbeddedData::FromBlob();
472 93132241 : Address address = d.InstructionStartOfBuiltin(builtin_index);
473 93131980 : CHECK_NE(kNullAddress, address);
474 :
475 : // TODO(ishell): implement RelocInfo::set_target_off_heap_target()
476 93131980 : if (RelocInfo::OffHeapTargetIsCodedSpecially()) {
477 : Address location_of_branch_data = rinfo->pc();
478 : Assembler::deserialization_set_special_target_at(location_of_branch_data,
479 : host, address);
480 : } else {
481 : WriteUnalignedValue(rinfo->target_address_address(), address);
482 : }
483 93131844 : }
484 :
485 : template <typename TSlot>
486 86265756 : TSlot Deserializer::ReadRepeatedObject(TSlot current, int repeat_count) {
487 86265756 : CHECK_LE(2, repeat_count);
488 :
489 86265756 : HeapObject heap_object = ReadObject();
490 : DCHECK(!Heap::InYoungGeneration(heap_object));
491 986625452 : for (int i = 0; i < repeat_count; i++) {
492 : // Repeated values are not subject to the write barrier so we don't need
493 : // to trigger it.
494 : current = Write(current, MaybeObject::FromObject(heap_object));
495 : }
496 86266314 : return current;
497 : }
498 :
499 0 : static void NoExternalReferencesCallback() {
500 : // The following check will trigger if a function or object template
501 : // with references to native functions have been deserialized from
502 : // snapshot, but no actual external references were provided when the
503 : // isolate was created.
504 0 : CHECK_WITH_MSG(false, "No external references provided via API");
505 : }
506 :
507 : template <typename TSlot>
508 814549126 : bool Deserializer::ReadData(TSlot current, TSlot limit, int source_space,
509 : Address current_object_address) {
510 814549126 : Isolate* const isolate = isolate_;
511 : // Write barrier support costs around 1% in startup time. In fact there
512 : // are no new space objects in current boot snapshots, so it's not needed,
513 : // but that may change.
514 : bool write_barrier_needed =
515 : (current_object_address != kNullAddress && source_space != NEW_SPACE &&
516 814549126 : source_space != CODE_SPACE);
517 3610228706 : while (current < limit) {
518 : byte data = source_.Get();
519 2795873344 : switch (data) {
520 : #define CASE_STATEMENT(bytecode, space_number) \
521 : case bytecode + space_number: \
522 : STATIC_ASSERT((space_number & ~kSpaceMask) == 0);
523 :
524 : #define CASE_BODY(bytecode, space_number_if_any) \
525 : current = ReadDataCase<TSlot, bytecode, space_number_if_any>( \
526 : isolate, current, current_object_address, data, write_barrier_needed); \
527 : break;
528 :
529 : // This generates a case and a body for the new space (which has to do extra
530 : // write barrier handling) and handles the other spaces with fall-through cases
531 : // and one body.
532 : #define ALL_SPACES(bytecode) \
533 : CASE_STATEMENT(bytecode, NEW_SPACE) \
534 : CASE_BODY(bytecode, NEW_SPACE) \
535 : CASE_STATEMENT(bytecode, OLD_SPACE) \
536 : V8_FALLTHROUGH; \
537 : CASE_STATEMENT(bytecode, CODE_SPACE) \
538 : V8_FALLTHROUGH; \
539 : CASE_STATEMENT(bytecode, MAP_SPACE) \
540 : V8_FALLTHROUGH; \
541 : CASE_STATEMENT(bytecode, LO_SPACE) \
542 : V8_FALLTHROUGH; \
543 : CASE_STATEMENT(bytecode, RO_SPACE) \
544 : CASE_BODY(bytecode, kAnyOldSpace)
545 :
546 : #define FOUR_CASES(byte_code) \
547 : case byte_code: \
548 : case byte_code + 1: \
549 : case byte_code + 2: \
550 : case byte_code + 3:
551 :
552 : #define SIXTEEN_CASES(byte_code) \
553 : FOUR_CASES(byte_code) \
554 : FOUR_CASES(byte_code + 4) \
555 : FOUR_CASES(byte_code + 8) \
556 : FOUR_CASES(byte_code + 12)
557 :
558 : #define SINGLE_CASE(bytecode, space) \
559 : CASE_STATEMENT(bytecode, space) \
560 : CASE_BODY(bytecode, space)
561 :
562 : // Deserialize a new object and write a pointer to it to the current
563 : // object.
564 760 : ALL_SPACES(kNewObject)
565 : // Find a recently deserialized object using its offset from the current
566 : // allocation point and write a pointer to it to the current object.
567 0 : ALL_SPACES(kBackref)
568 : // Find an object in the roots array and write a pointer to it to the
569 : // current object.
570 206782919 : SINGLE_CASE(kRootArray, RO_SPACE)
571 : // Find an object in the partial snapshots cache and write a pointer to it
572 : // to the current object.
573 150769127 : SINGLE_CASE(kPartialSnapshotCache, RO_SPACE)
574 : // Find an object in the partial snapshots cache and write a pointer to it
575 : // to the current object.
576 28094152 : SINGLE_CASE(kReadOnlyObjectCache, RO_SPACE)
577 : // Find an object in the attached references and write a pointer to it to
578 : // the current object.
579 271376 : SINGLE_CASE(kAttachedReference, RO_SPACE)
580 :
581 : #undef CASE_STATEMENT
582 : #undef CASE_BODY
583 : #undef ALL_SPACES
584 :
585 : // Find an external reference and write a pointer to it to the current
586 : // object.
587 : case kExternalReference: {
588 : Address address = ReadExternalReferenceCase();
589 : current = WriteAddress(current, address);
590 2028753 : break;
591 : }
592 :
593 : case kInternalReference:
594 : case kOffHeapTarget: {
595 : // These bytecodes are expected only during RelocInfo iteration.
596 0 : UNREACHABLE();
597 : break;
598 : }
599 :
600 : case kNop:
601 : break;
602 :
603 : case kNextChunk: {
604 : int space = source_.Get();
605 2353846 : allocator()->MoveToNextChunk(static_cast<AllocationSpace>(space));
606 1176922 : break;
607 : }
608 :
609 : case kDeferred: {
610 : // Deferred can only occur right after the heap object header.
611 : DCHECK_EQ(current.address(), current_object_address + kTaggedSize);
612 : HeapObject obj = HeapObject::FromAddress(current_object_address);
613 : // If the deferred object is a map, its instance type may be used
614 : // during deserialization. Initialize it with a temporary value.
615 187563 : if (obj->IsMap()) Map::cast(obj)->set_instance_type(FILLER_TYPE);
616 : current = limit;
617 : return false;
618 : }
619 :
620 : case kSynchronize:
621 : // If we get here then that indicates that you have a mismatch between
622 : // the number of GC roots when serializing and deserializing.
623 0 : UNREACHABLE();
624 :
625 : // Deserialize raw data of variable length.
626 : case kVariableRawData: {
627 213715 : int size_in_bytes = source_.GetInt();
628 : DCHECK(IsAligned(size_in_bytes, kTaggedSize));
629 : source_.CopyRaw(current.ToVoidPtr(), size_in_bytes);
630 213714 : current = TSlot(current.address() + size_in_bytes);
631 213714 : break;
632 : }
633 :
634 : // Deserialize raw code directly into the body of the code object.
635 : case kVariableRawCode: {
636 : // VariableRawCode can only occur right after the heap object header.
637 : DCHECK_EQ(current.address(), current_object_address + kTaggedSize);
638 93314606 : int size_in_bytes = source_.GetInt();
639 : DCHECK(IsAligned(size_in_bytes, kTaggedSize));
640 93314677 : source_.CopyRaw(
641 : reinterpret_cast<void*>(current_object_address + Code::kDataStart),
642 : size_in_bytes);
643 : // Deserialize tagged fields in the code object header and reloc infos.
644 93314677 : ReadCodeObjectBody(source_space, current_object_address);
645 : // Set current to the code object end.
646 93315676 : current = TSlot(current.address() + Code::kDataStart -
647 : HeapObject::kHeaderSize + size_in_bytes);
648 93315676 : CHECK_EQ(current, limit);
649 : break;
650 : }
651 :
652 : case kVariableRepeat: {
653 517186 : int repeats = DecodeVariableRepeatCount(source_.GetInt());
654 517186 : current = ReadRepeatedObject(current, repeats);
655 517185 : break;
656 : }
657 :
658 : case kOffHeapBackingStore: {
659 35 : int byte_length = source_.GetInt();
660 : byte* backing_store = static_cast<byte*>(
661 35 : isolate->array_buffer_allocator()->AllocateUninitialized(
662 35 : byte_length));
663 35 : CHECK_NOT_NULL(backing_store);
664 : source_.CopyRaw(backing_store, byte_length);
665 35 : off_heap_backing_stores_.push_back(backing_store);
666 : break;
667 : }
668 :
669 : case kApiReference: {
670 120 : uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());
671 : Address address;
672 120 : if (isolate->api_external_references()) {
673 : DCHECK_WITH_MSG(
674 : reference_id < num_api_references_,
675 : "too few external references provided through the API");
676 100 : address = static_cast<Address>(
677 : isolate->api_external_references()[reference_id]);
678 : } else {
679 20 : address = reinterpret_cast<Address>(NoExternalReferencesCallback);
680 : }
681 : current = WriteAddress(current, address);
682 120 : break;
683 : }
684 :
685 : case kClearedWeakReference:
686 337686 : current = Write(current, HeapObjectReference::ClearedValue(isolate_));
687 337686 : break;
688 :
689 : case kWeakPrefix:
690 : DCHECK(!allocator()->next_reference_is_weak());
691 : allocator()->set_next_reference_is_weak(true);
692 : break;
693 :
694 : case kAlignmentPrefix:
695 : case kAlignmentPrefix + 1:
696 : case kAlignmentPrefix + 2: {
697 0 : int alignment = data - (SerializerDeserializer::kAlignmentPrefix - 1);
698 0 : allocator()->SetAlignment(static_cast<AllocationAlignment>(alignment));
699 : break;
700 : }
701 :
702 : // First kNumberOfRootArrayConstants roots are guaranteed to be in
703 : // the old space.
704 : STATIC_ASSERT(
705 : static_cast<int>(RootIndex::kFirstImmortalImmovableRoot) == 0);
706 : STATIC_ASSERT(kNumberOfRootArrayConstants <=
707 : static_cast<int>(RootIndex::kLastImmortalImmovableRoot));
708 : STATIC_ASSERT(kNumberOfRootArrayConstants == 32);
709 : SIXTEEN_CASES(kRootArrayConstants)
710 : SIXTEEN_CASES(kRootArrayConstants + 16) {
711 : int id = data & kRootArrayConstantsMask;
712 : RootIndex root_index = static_cast<RootIndex>(id);
713 : MaybeObject object = MaybeObject::FromObject(isolate->root(root_index));
714 : DCHECK(!Heap::InYoungGeneration(object));
715 : current = Write(current, object);
716 : break;
717 : }
718 :
719 : STATIC_ASSERT(kNumberOfHotObjects == 8);
720 : FOUR_CASES(kHotObject)
721 : FOUR_CASES(kHotObject + 4) {
722 503581313 : int index = data & kHotObjectMask;
723 : Object hot_object = hot_objects_.Get(index);
724 : MaybeObject hot_maybe_object = MaybeObject::FromObject(hot_object);
725 503581313 : if (allocator()->GetAndClearNextReferenceIsWeak()) {
726 : hot_maybe_object = MaybeObject::MakeWeak(hot_maybe_object);
727 : }
728 : // Don't update current pointer here as it may be needed for write
729 : // barrier.
730 : Write(current, hot_maybe_object);
731 819185916 : if (write_barrier_needed && Heap::InYoungGeneration(hot_object)) {
732 : HeapObject current_object =
733 0 : HeapObject::FromAddress(current_object_address);
734 0 : GenerationalBarrier(current_object,
735 : MaybeObjectSlot(current.address()),
736 : hot_maybe_object);
737 : }
738 : ++current;
739 : break;
740 : }
741 :
742 : // Deserialize raw data of fixed length from 1 to 32 words.
743 : STATIC_ASSERT(kNumberOfFixedRawData == 32);
744 : SIXTEEN_CASES(kFixedRawData)
745 : SIXTEEN_CASES(kFixedRawData + 16) {
746 449384188 : int size_in_tagged = data - kFixedRawDataStart;
747 449384188 : source_.CopyRaw(current.ToVoidPtr(), size_in_tagged * kTaggedSize);
748 : current += size_in_tagged;
749 : break;
750 : }
751 :
752 : STATIC_ASSERT(kNumberOfFixedRepeat == 16);
753 : SIXTEEN_CASES(kFixedRepeat) {
754 85748712 : int repeats = DecodeFixedRepeatCount(data);
755 85748712 : current = ReadRepeatedObject(current, repeats);
756 85749143 : break;
757 : }
758 :
759 : #ifdef DEBUG
760 : #define UNUSED_CASE(byte_code) \
761 : case byte_code: \
762 : UNREACHABLE();
763 : UNUSED_SERIALIZER_BYTE_CODES(UNUSED_CASE)
764 : #endif
765 : #undef UNUSED_CASE
766 :
767 : #undef SIXTEEN_CASES
768 : #undef FOUR_CASES
769 : #undef SINGLE_CASE
770 : }
771 : }
772 814355362 : CHECK_EQ(limit, current);
773 : return true;
774 : }
775 :
776 : Address Deserializer::ReadExternalReferenceCase() {
777 2028851 : uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());
778 2028851 : return isolate_->external_reference_table()->address(reference_id);
779 : }
780 :
781 : template <typename TSlot, SerializerDeserializer::Bytecode bytecode,
782 : int space_number_if_any>
783 1004897303 : TSlot Deserializer::ReadDataCase(Isolate* isolate, TSlot current,
784 : Address current_object_address, byte data,
785 : bool write_barrier_needed) {
786 : bool emit_write_barrier = false;
787 : int space_number = space_number_if_any == kAnyOldSpace ? (data & kSpaceMask)
788 618978962 : : space_number_if_any;
789 : HeapObject heap_object;
790 : HeapObjectReferenceType reference_type =
791 : allocator()->GetAndClearNextReferenceIsWeak()
792 : ? HeapObjectReferenceType::WEAK
793 1004897303 : : HeapObjectReferenceType::STRONG;
794 :
795 : if (bytecode == kNewObject) {
796 434227660 : heap_object = ReadObject(space_number);
797 434223414 : emit_write_barrier = (space_number == NEW_SPACE);
798 : } else if (bytecode == kBackref) {
799 184752062 : heap_object = GetBackReferencedObject(space_number);
800 184751308 : emit_write_barrier = (space_number == NEW_SPACE);
801 : } else if (bytecode == kRootArray) {
802 206782919 : int id = source_.GetInt();
803 206782601 : RootIndex root_index = static_cast<RootIndex>(id);
804 : heap_object = HeapObject::cast(isolate->root(root_index));
805 : emit_write_barrier = Heap::InYoungGeneration(heap_object);
806 : hot_objects_.Add(heap_object);
807 : } else if (bytecode == kReadOnlyObjectCache) {
808 28094154 : int cache_index = source_.GetInt();
809 : heap_object = HeapObject::cast(
810 28094153 : isolate->heap()->read_only_heap()->read_only_object_cache()->at(
811 : cache_index));
812 : DCHECK(!Heap::InYoungGeneration(heap_object));
813 : emit_write_barrier = false;
814 : } else if (bytecode == kPartialSnapshotCache) {
815 150769132 : int cache_index = source_.GetInt();
816 : heap_object =
817 150769016 : HeapObject::cast(isolate->partial_snapshot_cache()->at(cache_index));
818 : emit_write_barrier = Heap::InYoungGeneration(heap_object);
819 : } else {
820 : DCHECK_EQ(bytecode, kAttachedReference);
821 271376 : int index = source_.GetInt();
822 271376 : heap_object = *attached_objects_[index];
823 : emit_write_barrier = Heap::InYoungGeneration(heap_object);
824 : }
825 : HeapObjectReference heap_object_ref =
826 : reference_type == HeapObjectReferenceType::STRONG
827 : ? HeapObjectReference::Strong(heap_object)
828 1004892628 : : HeapObjectReference::Weak(heap_object);
829 : // Don't update current pointer here as it may be needed for write barrier.
830 : Write(current, heap_object_ref);
831 976798475 : if (emit_write_barrier && write_barrier_needed) {
832 271951 : HeapObject host_object = HeapObject::FromAddress(current_object_address);
833 : SLOW_DCHECK(isolate->heap()->Contains(host_object));
834 271951 : GenerationalBarrier(host_object, MaybeObjectSlot(current.address()),
835 : heap_object_ref);
836 : }
837 1004892627 : return current + 1;
838 : }
839 :
840 : } // namespace internal
841 120216 : } // namespace v8
|