Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/snapshot/serializer.h"
6 :
7 : #include "src/assembler-inl.h"
8 : #include "src/heap/heap-inl.h" // For Space::identity().
9 : #include "src/heap/read-only-heap.h"
10 : #include "src/interpreter/interpreter.h"
11 : #include "src/objects/code.h"
12 : #include "src/objects/js-array-buffer-inl.h"
13 : #include "src/objects/js-array-inl.h"
14 : #include "src/objects/map.h"
15 : #include "src/objects/slots-inl.h"
16 : #include "src/objects/smi.h"
17 : #include "src/snapshot/natives.h"
18 : #include "src/snapshot/snapshot.h"
19 :
20 : namespace v8 {
21 : namespace internal {
22 :
23 1137 : Serializer::Serializer(Isolate* isolate)
24 : : isolate_(isolate),
25 : external_reference_encoder_(isolate),
26 : root_index_map_(isolate),
27 4548 : allocator_(this) {
28 : #ifdef OBJECT_PRINT
29 : if (FLAG_serialization_statistics) {
30 : for (int space = 0; space < LAST_SPACE; ++space) {
31 : instance_type_count_[space] = NewArray<int>(kInstanceTypes);
32 : instance_type_size_[space] = NewArray<size_t>(kInstanceTypes);
33 : for (int i = 0; i < kInstanceTypes; i++) {
34 : instance_type_count_[space][i] = 0;
35 : instance_type_size_[space][i] = 0;
36 : }
37 : }
38 : } else {
39 : for (int space = 0; space < LAST_SPACE; ++space) {
40 : instance_type_count_[space] = nullptr;
41 : instance_type_size_[space] = nullptr;
42 : }
43 : }
44 : #endif // OBJECT_PRINT
45 1137 : }
46 :
47 4548 : Serializer::~Serializer() {
48 1137 : if (code_address_map_ != nullptr) delete code_address_map_;
49 : #ifdef OBJECT_PRINT
50 : for (int space = 0; space < LAST_SPACE; ++space) {
51 : if (instance_type_count_[space] != nullptr) {
52 : DeleteArray(instance_type_count_[space]);
53 : DeleteArray(instance_type_size_[space]);
54 : }
55 : }
56 : #endif // OBJECT_PRINT
57 1137 : }
58 :
59 : #ifdef OBJECT_PRINT
60 : void Serializer::CountInstanceType(Map map, int size, AllocationSpace space) {
61 : int instance_type = map->instance_type();
62 : instance_type_count_[space][instance_type]++;
63 : instance_type_size_[space][instance_type] += size;
64 : }
65 : #endif // OBJECT_PRINT
66 :
67 1137 : void Serializer::OutputStatistics(const char* name) {
68 1137 : if (!FLAG_serialization_statistics) return;
69 :
70 0 : PrintF("%s:\n", name);
71 0 : allocator()->OutputStatistics();
72 :
73 : #ifdef OBJECT_PRINT
74 : PrintF(" Instance types (count and bytes):\n");
75 : #define PRINT_INSTANCE_TYPE(Name) \
76 : for (int space = 0; space < LAST_SPACE; ++space) { \
77 : if (instance_type_count_[space][Name]) { \
78 : PrintF("%10d %10" PRIuS " %-10s %s\n", \
79 : instance_type_count_[space][Name], \
80 : instance_type_size_[space][Name], \
81 : Heap::GetSpaceName(static_cast<AllocationSpace>(space)), #Name); \
82 : } \
83 : }
84 : INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE)
85 : #undef PRINT_INSTANCE_TYPE
86 :
87 : PrintF("\n");
88 : #endif // OBJECT_PRINT
89 : }
90 :
91 1137 : void Serializer::SerializeDeferredObjects() {
92 8989 : while (!deferred_objects_.empty()) {
93 3926 : HeapObject obj = deferred_objects_.back();
94 : deferred_objects_.pop_back();
95 3926 : ObjectSerializer obj_serializer(this, obj, &sink_);
96 3926 : obj_serializer.SerializeDeferred();
97 : }
98 : sink_.Put(kSynchronize, "Finished with deferred objects");
99 1137 : }
100 :
101 1726696 : bool Serializer::MustBeDeferred(HeapObject object) { return false; }
102 :
103 402294 : void Serializer::VisitRootPointers(Root root, const char* description,
104 : FullObjectSlot start, FullObjectSlot end) {
105 1209753 : for (FullObjectSlot current = start; current < end; ++current) {
106 405165 : SerializeRootObject(*current);
107 : }
108 402294 : }
109 :
110 559155 : void Serializer::SerializeRootObject(Object object) {
111 559155 : if (object->IsSmi()) {
112 4173 : PutSmi(Smi::cast(object));
113 : } else {
114 554982 : SerializeObject(HeapObject::cast(object));
115 : }
116 559155 : }
117 :
118 : #ifdef DEBUG
119 : void Serializer::PrintStack() {
120 : for (const auto o : stack_) {
121 : o->Print();
122 : PrintF("\n");
123 : }
124 : }
125 : #endif // DEBUG
126 :
127 4918568 : bool Serializer::SerializeRoot(HeapObject obj) {
128 : RootIndex root_index;
129 : // Derived serializers are responsible for determining if the root has
130 : // actually been serialized before calling this.
131 4918568 : if (root_index_map()->Lookup(obj, &root_index)) {
132 3571465 : PutRoot(root_index, obj);
133 3571465 : return true;
134 : }
135 : return false;
136 : }
137 :
138 9096003 : bool Serializer::SerializeHotObject(HeapObject obj) {
139 : // Encode a reference to a hot object by its index in the working set.
140 : int index = hot_objects_.Find(obj);
141 9096003 : if (index == HotObjectsList::kNotFound) return false;
142 : DCHECK(index >= 0 && index < kNumberOfHotObjects);
143 2268148 : if (FLAG_trace_serializer) {
144 0 : PrintF(" Encoding hot object %d:", index);
145 0 : obj->ShortPrint();
146 0 : PrintF("\n");
147 : }
148 : // TODO(ishell): remove kHotObjectWithSkip
149 2268148 : sink_.Put(kHotObject + index, "HotObject");
150 2268148 : return true;
151 : }
152 :
153 3142133 : bool Serializer::SerializeBackReference(HeapObject obj) {
154 : SerializerReference reference =
155 3142133 : reference_map_.LookupReference(reinterpret_cast<void*>(obj.ptr()));
156 3142133 : if (!reference.is_valid()) return false;
157 : // Encode the location of an already deserialized object in order to write
158 : // its location into a later object. We can encode the location as an
159 : // offset fromthe start of the deserialized objects or as an offset
160 : // backwards from thecurrent allocation pointer.
161 706471 : if (reference.is_attached_reference()) {
162 1117 : if (FLAG_trace_serializer) {
163 : PrintF(" Encoding attached reference %d\n",
164 0 : reference.attached_reference_index());
165 : }
166 1117 : PutAttachedReference(reference);
167 : } else {
168 : DCHECK(reference.is_back_reference());
169 705354 : if (FLAG_trace_serializer) {
170 0 : PrintF(" Encoding back reference to: ");
171 0 : obj->ShortPrint();
172 0 : PrintF("\n");
173 : }
174 :
175 : PutAlignmentPrefix(obj);
176 : AllocationSpace space = reference.space();
177 705354 : sink_.Put(kBackref + space, "BackRef");
178 705354 : PutBackReference(obj, reference);
179 : }
180 : return true;
181 : }
182 :
183 0 : bool Serializer::ObjectIsBytecodeHandler(HeapObject obj) const {
184 0 : if (!obj->IsCode()) return false;
185 0 : return (Code::cast(obj)->kind() == Code::BYTECODE_HANDLER);
186 : }
187 :
188 3571465 : void Serializer::PutRoot(RootIndex root, HeapObject object) {
189 3571465 : int root_index = static_cast<int>(root);
190 3571465 : if (FLAG_trace_serializer) {
191 0 : PrintF(" Encoding root %d:", root_index);
192 0 : object->ShortPrint();
193 0 : PrintF("\n");
194 : }
195 :
196 : // Assert that the first 32 root array items are a conscious choice. They are
197 : // chosen so that the most common ones can be encoded more efficiently.
198 : STATIC_ASSERT(static_cast<int>(RootIndex::kArgumentsMarker) ==
199 : kNumberOfRootArrayConstants - 1);
200 :
201 : // TODO(ulan): Check that it works with young large objects.
202 6541296 : if (root_index < kNumberOfRootArrayConstants &&
203 : !Heap::InYoungGeneration(object)) {
204 2969831 : sink_.Put(kRootArrayConstants + root_index, "RootConstant");
205 : } else {
206 : sink_.Put(kRootArray, "RootSerialization");
207 601634 : sink_.PutInt(root_index, "root_index");
208 : hot_objects_.Add(object);
209 : }
210 3571465 : }
211 :
212 4173 : void Serializer::PutSmi(Smi smi) {
213 : sink_.Put(kOnePointerRawData, "Smi");
214 : Tagged_t raw_value = static_cast<Tagged_t>(smi.ptr());
215 : byte bytes[kTaggedSize];
216 : memcpy(bytes, &raw_value, kTaggedSize);
217 70941 : for (int i = 0; i < kTaggedSize; i++) sink_.Put(bytes[i], "Byte");
218 4173 : }
219 :
220 709280 : void Serializer::PutBackReference(HeapObject object,
221 : SerializerReference reference) {
222 : DCHECK(allocator()->BackReferenceIsAlreadyAllocated(reference));
223 709280 : switch (reference.space()) {
224 : case MAP_SPACE:
225 36737 : sink_.PutInt(reference.map_index(), "BackRefMapIndex");
226 36737 : break;
227 :
228 : case LO_SPACE:
229 10 : sink_.PutInt(reference.large_object_index(), "BackRefLargeObjectIndex");
230 10 : break;
231 :
232 : default:
233 672533 : sink_.PutInt(reference.chunk_index(), "BackRefChunkIndex");
234 672533 : sink_.PutInt(reference.chunk_offset(), "BackRefChunkOffset");
235 672533 : break;
236 : }
237 :
238 : hot_objects_.Add(object);
239 709280 : }
240 :
241 1117 : void Serializer::PutAttachedReference(SerializerReference reference) {
242 : DCHECK(reference.is_attached_reference());
243 : sink_.Put(kAttachedReference, "AttachedRef");
244 1117 : sink_.PutInt(reference.attached_reference_index(), "AttachedRefIndex");
245 1117 : }
246 :
247 0 : int Serializer::PutAlignmentPrefix(HeapObject object) {
248 : AllocationAlignment alignment = HeapObject::RequiredAlignment(object->map());
249 : if (alignment != kWordAligned) {
250 : DCHECK(1 <= alignment && alignment <= 3);
251 : byte prefix = (kAlignmentPrefix - 1) + alignment;
252 : sink_.Put(prefix, "Alignment");
253 : return Heap::GetMaximumFillToAlign(alignment);
254 : }
255 : return 0;
256 : }
257 :
258 10109 : void Serializer::PutNextChunk(int space) {
259 : sink_.Put(kNextChunk, "NextChunk");
260 : sink_.Put(space, "NextChunkSpace");
261 10109 : }
262 :
263 480381 : void Serializer::PutRepeat(int repeat_count) {
264 480381 : if (repeat_count <= kLastEncodableFixedRepeatCount) {
265 : sink_.Put(EncodeFixedRepeat(repeat_count), "FixedRepeat");
266 : } else {
267 : sink_.Put(kVariableRepeat, "VariableRepeat");
268 1869 : sink_.PutInt(EncodeVariableRepeatCount(repeat_count), "repeat count");
269 : }
270 480381 : }
271 :
272 1137 : void Serializer::Pad(int padding_offset) {
273 : // The non-branching GetInt will read up to 3 bytes too far, so we need
274 : // to pad the snapshot to make sure we don't read over the end.
275 7959 : for (unsigned i = 0; i < sizeof(int32_t) - 1; i++) {
276 : sink_.Put(kNop, "Padding");
277 : }
278 : // Pad up to pointer size for checksum.
279 4937 : while (!IsAligned(sink_.Position() + padding_offset, kPointerAlignment)) {
280 : sink_.Put(kNop, "Padding");
281 : }
282 1137 : }
283 :
284 502 : void Serializer::InitializeCodeAddressMap() {
285 502 : isolate_->InitializeLoggingAndCounters();
286 502 : code_address_map_ = new CodeAddressMap(isolate_);
287 502 : }
288 :
289 398032 : Code Serializer::CopyCode(Code code) {
290 : code_buffer_.clear(); // Clear buffer without deleting backing store.
291 : int size = code->CodeSize();
292 : code_buffer_.insert(code_buffer_.end(),
293 : reinterpret_cast<byte*>(code->address()),
294 398032 : reinterpret_cast<byte*>(code->address() + size));
295 : // When pointer compression is enabled the checked cast will try to
296 : // decompress map field of off-heap Code object.
297 : return Code::unchecked_cast(HeapObject::FromAddress(
298 796064 : reinterpret_cast<Address>(&code_buffer_.front())));
299 : }
300 :
301 1928219 : void Serializer::ObjectSerializer::SerializePrologue(AllocationSpace space,
302 : int size, Map map) {
303 1928219 : if (serializer_->code_address_map_) {
304 : const char* code_name =
305 : serializer_->code_address_map_->Lookup(object_->address());
306 1692610 : LOG(serializer_->isolate_,
307 : CodeNameEvent(object_->address(), sink_->Position(), code_name));
308 : }
309 :
310 : SerializerReference back_reference;
311 1928219 : if (space == LO_SPACE) {
312 60 : sink_->Put(kNewObject + space, "NewLargeObject");
313 60 : sink_->PutInt(size >> kObjectAlignmentBits, "ObjectSizeInWords");
314 60 : CHECK(!object_->IsCode());
315 120 : back_reference = serializer_->allocator()->AllocateLargeObject(size);
316 1928159 : } else if (space == MAP_SPACE) {
317 : DCHECK_EQ(Map::kSize, size);
318 121622 : back_reference = serializer_->allocator()->AllocateMap();
319 60811 : sink_->Put(kNewObject + space, "NewMap");
320 : // This is redundant, but we include it anyways.
321 60811 : sink_->PutInt(size >> kObjectAlignmentBits, "ObjectSizeInWords");
322 : } else {
323 : int fill = serializer_->PutAlignmentPrefix(object_);
324 3734696 : back_reference = serializer_->allocator()->Allocate(space, size + fill);
325 1867348 : sink_->Put(kNewObject + space, "NewObject");
326 1867348 : sink_->PutInt(size >> kObjectAlignmentBits, "ObjectSizeInWords");
327 : }
328 :
329 : #ifdef OBJECT_PRINT
330 : if (FLAG_serialization_statistics) {
331 : serializer_->CountInstanceType(map, size, space);
332 : }
333 : #endif // OBJECT_PRINT
334 :
335 : // Mark this object as already serialized.
336 1928219 : serializer_->reference_map()->Add(reinterpret_cast<void*>(object_.ptr()),
337 : back_reference);
338 :
339 : // Serialize the map (first word of the object).
340 1928219 : serializer_->SerializeObject(map);
341 1928219 : }
342 :
343 80 : int32_t Serializer::ObjectSerializer::SerializeBackingStore(
344 : void* backing_store, int32_t byte_length) {
345 : SerializerReference reference =
346 160 : serializer_->reference_map()->LookupReference(backing_store);
347 :
348 : // Serialize the off-heap backing store.
349 80 : if (!reference.is_valid()) {
350 35 : sink_->Put(kOffHeapBackingStore, "Off-heap backing store");
351 35 : sink_->PutInt(byte_length, "length");
352 35 : sink_->PutRaw(static_cast<byte*>(backing_store), byte_length,
353 35 : "BackingStore");
354 70 : reference = serializer_->allocator()->AllocateOffHeapBackingStore();
355 : // Mark this backing store as already serialized.
356 35 : serializer_->reference_map()->Add(backing_store, reference);
357 : }
358 :
359 80 : return static_cast<int32_t>(reference.off_heap_backing_store_index());
360 : }
361 :
362 65 : void Serializer::ObjectSerializer::SerializeJSTypedArray() {
363 : JSTypedArray typed_array = JSTypedArray::cast(object_);
364 : FixedTypedArrayBase elements =
365 : FixedTypedArrayBase::cast(typed_array->elements());
366 :
367 65 : if (!typed_array->WasDetached()) {
368 60 : if (!typed_array->is_on_heap()) {
369 : // Explicitly serialize the backing store now.
370 : JSArrayBuffer buffer = JSArrayBuffer::cast(typed_array->buffer());
371 45 : CHECK_LE(buffer->byte_length(), Smi::kMaxValue);
372 45 : CHECK_LE(typed_array->byte_offset(), Smi::kMaxValue);
373 45 : int32_t byte_length = static_cast<int32_t>(buffer->byte_length());
374 45 : int32_t byte_offset = static_cast<int32_t>(typed_array->byte_offset());
375 :
376 : // We need to calculate the backing store from the external pointer
377 : // because the ArrayBuffer may already have been serialized.
378 : void* backing_store = reinterpret_cast<void*>(
379 45 : reinterpret_cast<intptr_t>(elements->external_pointer()) -
380 45 : byte_offset);
381 45 : int32_t ref = SerializeBackingStore(backing_store, byte_length);
382 :
383 : // The external_pointer is the backing_store + typed_array->byte_offset.
384 : // To properly share the buffer, we set the backing store ref here. On
385 : // deserialization we re-add the byte_offset to external_pointer.
386 : elements->set_external_pointer(
387 : reinterpret_cast<void*>(Smi::FromInt(ref).ptr()));
388 : }
389 : } else {
390 : // When a JSArrayBuffer is detached, the FixedTypedArray that points to the
391 : // same backing store does not know anything about it. This fixup step finds
392 : // detached TypedArrays and clears the values in the FixedTypedArray so that
393 : // we don't try to serialize the now invalid backing store.
394 : elements->set_external_pointer(reinterpret_cast<void*>(Smi::kZero.ptr()));
395 : elements->set_length(0);
396 : }
397 65 : SerializeObject();
398 65 : }
399 :
400 55 : void Serializer::ObjectSerializer::SerializeJSArrayBuffer() {
401 : JSArrayBuffer buffer = JSArrayBuffer::cast(object_);
402 : void* backing_store = buffer->backing_store();
403 : // We cannot store byte_length larger than Smi range in the snapshot.
404 55 : CHECK_LE(buffer->byte_length(), Smi::kMaxValue);
405 55 : int32_t byte_length = static_cast<int32_t>(buffer->byte_length());
406 :
407 : // The embedder-allocated backing store only exists for the off-heap case.
408 55 : if (backing_store != nullptr) {
409 35 : int32_t ref = SerializeBackingStore(backing_store, byte_length);
410 : buffer->set_backing_store(reinterpret_cast<void*>(Smi::FromInt(ref).ptr()));
411 : }
412 55 : SerializeObject();
413 : buffer->set_backing_store(backing_store);
414 55 : }
415 :
416 236 : void Serializer::ObjectSerializer::SerializeExternalString() {
417 236 : Heap* heap = serializer_->isolate()->heap();
418 : // For external strings with known resources, we replace the resource field
419 : // with the encoded external reference, which we restore upon deserialize.
420 : // for native native source code strings, we replace the resource field
421 : // with the native source id.
422 : // For the rest we serialize them to look like ordinary sequential strings.
423 236 : if (object_->map() != ReadOnlyRoots(heap).native_source_string_map()) {
424 30 : ExternalString string = ExternalString::cast(object_);
425 : Address resource = string->resource_as_address();
426 : ExternalReferenceEncoder::Value reference;
427 60 : if (serializer_->external_reference_encoder_.TryEncode(resource).To(
428 : &reference)) {
429 : DCHECK(reference.is_from_api());
430 10 : string->set_uint32_as_resource(reference.index());
431 10 : SerializeObject();
432 10 : string->set_address_as_resource(resource);
433 : } else {
434 20 : SerializeExternalStringAsSequentialString();
435 : }
436 : } else {
437 206 : ExternalOneByteString string = ExternalOneByteString::cast(object_);
438 : DCHECK(string->is_uncached());
439 : const NativesExternalStringResource* resource =
440 : reinterpret_cast<const NativesExternalStringResource*>(
441 : string->resource());
442 : // Replace the resource field with the type and index of the native source.
443 : string->set_resource(resource->EncodeForSerialization());
444 206 : SerializeObject();
445 : // Restore the resource field.
446 : string->set_resource(resource);
447 : }
448 236 : }
449 :
450 20 : void Serializer::ObjectSerializer::SerializeExternalStringAsSequentialString() {
451 : // Instead of serializing this as an external string, we serialize
452 : // an imaginary sequential string with the same content.
453 20 : ReadOnlyRoots roots(serializer_->isolate());
454 : DCHECK(object_->IsExternalString());
455 : DCHECK(object_->map() != roots.native_source_string_map());
456 : ExternalString string = ExternalString::cast(object_);
457 : int length = string->length();
458 : Map map;
459 : int content_size;
460 : int allocation_size;
461 : const byte* resource;
462 : // Find the map and size for the imaginary sequential string.
463 : bool internalized = object_->IsInternalizedString();
464 20 : if (object_->IsExternalOneByteString()) {
465 : map = internalized ? roots.one_byte_internalized_string_map()
466 30 : : roots.one_byte_string_map();
467 : allocation_size = SeqOneByteString::SizeFor(length);
468 : content_size = length * kCharSize;
469 : resource = reinterpret_cast<const byte*>(
470 15 : ExternalOneByteString::cast(string)->resource()->data());
471 : } else {
472 10 : map = internalized ? roots.internalized_string_map() : roots.string_map();
473 : allocation_size = SeqTwoByteString::SizeFor(length);
474 5 : content_size = length * kShortSize;
475 : resource = reinterpret_cast<const byte*>(
476 5 : ExternalTwoByteString::cast(string)->resource()->data());
477 : }
478 :
479 : AllocationSpace space =
480 20 : (allocation_size > kMaxRegularHeapObjectSize) ? LO_SPACE : OLD_SPACE;
481 20 : SerializePrologue(space, allocation_size, map);
482 :
483 : // Output the rest of the imaginary string.
484 20 : int bytes_to_output = allocation_size - HeapObject::kHeaderSize;
485 : DCHECK(IsAligned(bytes_to_output, kTaggedSize));
486 :
487 : // Output raw data header. Do not bother with common raw length cases here.
488 20 : sink_->Put(kVariableRawData, "RawDataForString");
489 20 : sink_->PutInt(bytes_to_output, "length");
490 :
491 : // Serialize string header (except for map).
492 : uint8_t* string_start = reinterpret_cast<uint8_t*>(string->address());
493 340 : for (int i = HeapObject::kHeaderSize; i < SeqString::kHeaderSize; i++) {
494 160 : sink_->PutSection(string_start[i], "StringHeader");
495 : }
496 :
497 : // Serialize string content.
498 20 : sink_->PutRaw(resource, content_size, "StringContent");
499 :
500 : // Since the allocation size is rounded up to object alignment, there
501 : // maybe left-over bytes that need to be padded.
502 20 : int padding_size = allocation_size - SeqString::kHeaderSize - content_size;
503 : DCHECK(0 <= padding_size && padding_size < kObjectAlignment);
504 80 : for (int i = 0; i < padding_size; i++) sink_->PutSection(0, "StringPadding");
505 20 : }
506 :
507 : // Clear and later restore the next link in the weak cell or allocation site.
508 : // TODO(all): replace this with proper iteration of weak slots in serializer.
509 : class UnlinkWeakNextScope {
510 : public:
511 1928199 : explicit UnlinkWeakNextScope(Heap* heap, HeapObject object) {
512 5784597 : if (object->IsAllocationSite() &&
513 1928199 : AllocationSite::cast(object)->HasWeakNext()) {
514 0 : object_ = object;
515 0 : next_ = AllocationSite::cast(object)->weak_next();
516 0 : AllocationSite::cast(object)->set_weak_next(
517 0 : ReadOnlyRoots(heap).undefined_value());
518 : }
519 1928199 : }
520 :
521 1928199 : ~UnlinkWeakNextScope() {
522 1928199 : if (!object_.is_null()) {
523 0 : AllocationSite::cast(object_)->set_weak_next(next_,
524 0 : UPDATE_WEAK_WRITE_BARRIER);
525 : }
526 : }
527 :
528 : private:
529 : HeapObject object_;
530 : Object next_;
531 : DISALLOW_HEAP_ALLOCATION(no_gc_)
532 : };
533 :
534 1928219 : void Serializer::ObjectSerializer::Serialize() {
535 1928219 : if (FLAG_trace_serializer) {
536 0 : PrintF(" Encoding heap object: ");
537 0 : object_->ShortPrint();
538 0 : PrintF("\n");
539 : }
540 :
541 1928219 : if (object_->IsExternalString()) {
542 236 : SerializeExternalString();
543 236 : return;
544 1927983 : } else if (!ReadOnlyHeap::Contains(object_)) {
545 : // Only clear padding for strings outside RO_SPACE. RO_SPACE should have
546 : // been cleared elsewhere.
547 1729623 : if (object_->IsSeqOneByteString()) {
548 : // Clear padding bytes at the end. Done here to avoid having to do this
549 : // at allocation sites in generated code.
550 104511 : SeqOneByteString::cast(object_)->clear_padding();
551 1625112 : } else if (object_->IsSeqTwoByteString()) {
552 0 : SeqTwoByteString::cast(object_)->clear_padding();
553 : }
554 : }
555 1927983 : if (object_->IsJSTypedArray()) {
556 65 : SerializeJSTypedArray();
557 65 : return;
558 : }
559 1927918 : if (object_->IsJSArrayBuffer()) {
560 55 : SerializeJSArrayBuffer();
561 55 : return;
562 : }
563 :
564 : // We don't expect fillers.
565 : DCHECK(!object_->IsFiller());
566 :
567 1927863 : if (object_->IsScript()) {
568 : // Clear cached line ends.
569 2324 : Object undefined = ReadOnlyRoots(serializer_->isolate()).undefined_value();
570 1162 : Script::cast(object_)->set_line_ends(undefined);
571 : }
572 :
573 1927863 : SerializeObject();
574 : }
575 :
576 1928199 : void Serializer::ObjectSerializer::SerializeObject() {
577 1928199 : int size = object_->Size();
578 1928199 : Map map = object_->map();
579 : AllocationSpace space =
580 : MemoryChunk::FromHeapObject(object_)->owner()->identity();
581 : // Young generation large objects are tenured.
582 1928199 : if (space == NEW_LO_SPACE) {
583 : space = LO_SPACE;
584 : }
585 1928199 : SerializePrologue(space, size, map);
586 :
587 : // Serialize the rest of the object.
588 1928199 : CHECK_EQ(0, bytes_processed_so_far_);
589 1928199 : bytes_processed_so_far_ = kTaggedSize;
590 :
591 1928199 : RecursionScope recursion(serializer_);
592 : // Objects that are immediately post processed during deserialization
593 : // cannot be deferred, since post processing requires the object content.
594 3853255 : if ((recursion.ExceedsMaximum() && CanBeDeferred(object_)) ||
595 1925056 : serializer_->MustBeDeferred(object_)) {
596 3926 : serializer_->QueueDeferredObject(object_);
597 3926 : sink_->Put(kDeferred, "Deferring object content");
598 : return;
599 : }
600 :
601 1924273 : SerializeContent(map, size);
602 : }
603 :
604 3926 : void Serializer::ObjectSerializer::SerializeDeferred() {
605 3926 : if (FLAG_trace_serializer) {
606 0 : PrintF(" Encoding deferred heap object: ");
607 0 : object_->ShortPrint();
608 0 : PrintF("\n");
609 : }
610 :
611 3926 : int size = object_->Size();
612 3926 : Map map = object_->map();
613 : SerializerReference back_reference =
614 3926 : serializer_->reference_map()->LookupReference(
615 7852 : reinterpret_cast<void*>(object_.ptr()));
616 : DCHECK(back_reference.is_back_reference());
617 :
618 : // Serialize the rest of the object.
619 3926 : CHECK_EQ(0, bytes_processed_so_far_);
620 3926 : bytes_processed_so_far_ = kTaggedSize;
621 :
622 : serializer_->PutAlignmentPrefix(object_);
623 3926 : sink_->Put(kNewObject + back_reference.space(), "deferred object");
624 3926 : serializer_->PutBackReference(object_, back_reference);
625 3926 : sink_->PutInt(size >> kTaggedSizeLog2, "deferred object size");
626 :
627 3926 : SerializeContent(map, size);
628 3926 : }
629 :
630 1928199 : void Serializer::ObjectSerializer::SerializeContent(Map map, int size) {
631 3856398 : UnlinkWeakNextScope unlink_weak_next(serializer_->isolate()->heap(), object_);
632 1928199 : if (object_->IsCode()) {
633 : // For code objects, output raw bytes first.
634 398032 : OutputCode(size);
635 : // Then iterate references via reloc info.
636 398032 : object_->IterateBody(map, size, this);
637 : } else {
638 : // For other objects, iterate references first.
639 1530167 : object_->IterateBody(map, size, this);
640 : // Then output data payload, if any.
641 1530167 : OutputRawData(object_->address() + size);
642 : }
643 1928199 : }
644 :
645 2313918 : void Serializer::ObjectSerializer::VisitPointers(HeapObject host,
646 : ObjectSlot start,
647 : ObjectSlot end) {
648 4627836 : VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
649 2313918 : }
650 :
651 2476453 : void Serializer::ObjectSerializer::VisitPointers(HeapObject host,
652 : MaybeObjectSlot start,
653 : MaybeObjectSlot end) {
654 2476453 : MaybeObjectSlot current = start;
655 7831865 : while (current < end) {
656 9508714 : while (current < end && (*current)->IsSmi()) {
657 : ++current;
658 : }
659 2677706 : if (current < end) {
660 2323612 : OutputRawData(current.address());
661 : }
662 : // TODO(ishell): Revisit this change once we stick to 32-bit compressed
663 : // tagged values.
664 5083412 : while (current < end && (*current)->IsCleared()) {
665 41082 : sink_->Put(kClearedWeakReference, "ClearedWeakReference");
666 41082 : bytes_processed_so_far_ += kTaggedSize;
667 : ++current;
668 : }
669 2677706 : HeapObject current_contents;
670 : HeapObjectReferenceType reference_type;
671 21855037 : while (current < end &&
672 : (*current)->GetHeapObject(¤t_contents, &reference_type)) {
673 : RootIndex root_index;
674 : // Compute repeat count and write repeat prefix if applicable.
675 : // Repeats are not subject to the write barrier so we can only use
676 : // immortal immovable root members. They are never in new space.
677 : MaybeObjectSlot repeat_end = current + 1;
678 10774697 : if (repeat_end < end &&
679 9164954 : serializer_->root_index_map()->Lookup(current_contents,
680 2777006 : &root_index) &&
681 11746232 : RootsTable::IsImmortalImmovable(root_index) &&
682 : *current == *repeat_end) {
683 : DCHECK_EQ(reference_type, HeapObjectReferenceType::STRONG);
684 : DCHECK(!Heap::InYoungGeneration(current_contents));
685 5467454 : while (repeat_end < end && *repeat_end == *current) {
686 : repeat_end++;
687 : }
688 480381 : int repeat_count = static_cast<int>(repeat_end - current);
689 : current = repeat_end;
690 480381 : bytes_processed_so_far_ += repeat_count * kTaggedSize;
691 480381 : serializer_->PutRepeat(repeat_count);
692 : } else {
693 5711839 : bytes_processed_so_far_ += kTaggedSize;
694 : ++current;
695 : }
696 : // Now write the object itself.
697 6192220 : if (reference_type == HeapObjectReferenceType::WEAK) {
698 188739 : sink_->Put(kWeakPrefix, "WeakReference");
699 : }
700 6192220 : serializer_->SerializeObject(current_contents);
701 : }
702 : }
703 2476453 : }
704 :
705 20 : void Serializer::ObjectSerializer::VisitEmbeddedPointer(Code host,
706 : RelocInfo* rinfo) {
707 : Object object = rinfo->target_object();
708 20 : serializer_->SerializeObject(HeapObject::cast(object));
709 20 : bytes_processed_so_far_ += rinfo->target_address_size();
710 20 : }
711 :
712 8728 : void Serializer::ObjectSerializer::VisitExternalReference(Foreign host,
713 : Address* p) {
714 : auto encoded_reference =
715 8728 : serializer_->EncodeExternalReference(host->foreign_address());
716 8728 : if (encoded_reference.is_from_api()) {
717 110 : sink_->Put(kApiReference, "ApiRef");
718 : } else {
719 8618 : sink_->Put(kExternalReference, "ExternalRef");
720 : }
721 8728 : sink_->PutInt(encoded_reference.index(), "reference index");
722 8728 : bytes_processed_so_far_ += kSystemPointerSize;
723 8728 : }
724 :
725 98 : void Serializer::ObjectSerializer::VisitExternalReference(Code host,
726 : RelocInfo* rinfo) {
727 : Address target = rinfo->target_external_reference();
728 98 : auto encoded_reference = serializer_->EncodeExternalReference(target);
729 98 : if (encoded_reference.is_from_api()) {
730 : DCHECK(!rinfo->IsCodedSpecially());
731 0 : sink_->Put(kApiReference, "ApiRef");
732 : } else {
733 98 : sink_->Put(kExternalReference, "ExternalRef");
734 : }
735 : DCHECK_NE(target, kNullAddress); // Code does not reference null.
736 98 : sink_->PutInt(encoded_reference.index(), "reference index");
737 98 : bytes_processed_so_far_ += rinfo->target_address_size();
738 98 : }
739 :
740 0 : void Serializer::ObjectSerializer::VisitInternalReference(Code host,
741 : RelocInfo* rinfo) {
742 : Address entry = Code::cast(object_)->entry();
743 : DCHECK_GE(rinfo->target_internal_reference(), entry);
744 0 : uintptr_t target_offset = rinfo->target_internal_reference() - entry;
745 : DCHECK_LE(target_offset, Code::cast(object_)->raw_instruction_size());
746 0 : sink_->Put(kInternalReference, "InternalRef");
747 0 : sink_->PutInt(target_offset, "internal ref value");
748 0 : }
749 :
750 0 : void Serializer::ObjectSerializer::VisitRuntimeEntry(Code host,
751 : RelocInfo* rinfo) {
752 : // We no longer serialize code that contains runtime entries.
753 0 : UNREACHABLE();
754 : }
755 :
756 397242 : void Serializer::ObjectSerializer::VisitOffHeapTarget(Code host,
757 : RelocInfo* rinfo) {
758 : DCHECK(FLAG_embedded_builtins);
759 : STATIC_ASSERT(EmbeddedData::kTableSize == Builtins::builtin_count);
760 :
761 : Address addr = rinfo->target_off_heap_target();
762 397242 : CHECK_NE(kNullAddress, addr);
763 :
764 397242 : Code target = InstructionStream::TryLookupCode(serializer_->isolate(), addr);
765 397242 : CHECK(Builtins::IsIsolateIndependentBuiltin(target));
766 :
767 397242 : sink_->Put(kOffHeapTarget, "OffHeapTarget");
768 397242 : sink_->PutInt(target->builtin_index(), "builtin index");
769 397242 : bytes_processed_so_far_ += rinfo->target_address_size();
770 397242 : }
771 :
772 1827 : void Serializer::ObjectSerializer::VisitCodeTarget(Code host,
773 : RelocInfo* rinfo) {
774 : #ifdef V8_TARGET_ARCH_ARM
775 : DCHECK(!RelocInfo::IsRelativeCodeTarget(rinfo->rmode()));
776 : #endif
777 1827 : Code object = Code::GetCodeFromTargetAddress(rinfo->target_address());
778 1827 : serializer_->SerializeObject(object);
779 1827 : bytes_processed_so_far_ += rinfo->target_address_size();
780 1827 : }
781 :
782 : namespace {
783 :
784 : // Similar to OutputRawData, but substitutes the given field with the given
785 : // value instead of reading it from the object.
786 298366 : void OutputRawWithCustomField(SnapshotByteSink* sink, Address object_start,
787 : int written_so_far, int bytes_to_write,
788 : int field_offset, int field_size,
789 : const byte* field_value) {
790 298366 : int offset = field_offset - written_so_far;
791 298366 : if (0 <= offset && offset < bytes_to_write) {
792 : DCHECK_GE(bytes_to_write, offset + field_size);
793 45650 : sink->PutRaw(reinterpret_cast<byte*>(object_start + written_so_far), offset,
794 45650 : "Bytes");
795 45650 : sink->PutRaw(field_value, field_size, "Bytes");
796 45650 : written_so_far += offset + field_size;
797 45650 : bytes_to_write -= offset + field_size;
798 45650 : sink->PutRaw(reinterpret_cast<byte*>(object_start + written_so_far),
799 45650 : bytes_to_write, "Bytes");
800 : } else {
801 252716 : sink->PutRaw(reinterpret_cast<byte*>(object_start + written_so_far),
802 252716 : bytes_to_write, "Bytes");
803 : }
804 298366 : }
805 : } // anonymous namespace
806 :
807 3853779 : void Serializer::ObjectSerializer::OutputRawData(Address up_to) {
808 : Address object_start = object_->address();
809 3853779 : int base = bytes_processed_so_far_;
810 3853779 : int up_to_offset = static_cast<int>(up_to - object_start);
811 3853779 : int to_skip = up_to_offset - bytes_processed_so_far_;
812 : int bytes_to_output = to_skip;
813 3853779 : bytes_processed_so_far_ += to_skip;
814 : DCHECK_GE(to_skip, 0);
815 3853779 : if (bytes_to_output != 0) {
816 : DCHECK(to_skip == bytes_to_output);
817 2039818 : if (IsAligned(bytes_to_output, kObjectAlignment) &&
818 : bytes_to_output <= kNumberOfFixedRawData * kTaggedSize) {
819 2038659 : int size_in_words = bytes_to_output >> kTaggedSizeLog2;
820 2038659 : sink_->PutSection(kFixedRawDataStart + size_in_words, "FixedRawData");
821 : } else {
822 1159 : sink_->Put(kVariableRawData, "VariableRawData");
823 1159 : sink_->PutInt(bytes_to_output, "length");
824 : }
825 : #ifdef MEMORY_SANITIZER
826 : // Check that we do not serialize uninitialized memory.
827 : __msan_check_mem_is_initialized(
828 : reinterpret_cast<void*>(object_start + base), bytes_to_output);
829 : #endif // MEMORY_SANITIZER
830 2039818 : if (object_->IsBytecodeArray()) {
831 : // The bytecode age field can be changed by GC concurrently.
832 3158 : byte field_value = BytecodeArray::kNoAgeBytecodeAge;
833 3158 : OutputRawWithCustomField(sink_, object_start, base, bytes_to_output,
834 : BytecodeArray::kBytecodeAgeOffset,
835 3158 : sizeof(field_value), &field_value);
836 2036660 : } else if (object_->IsDescriptorArray()) {
837 : // The number of marked descriptors field can be changed by GC
838 : // concurrently.
839 : byte field_value[2];
840 295208 : field_value[0] = 0;
841 295208 : field_value[1] = 0;
842 295208 : OutputRawWithCustomField(
843 : sink_, object_start, base, bytes_to_output,
844 : DescriptorArray::kRawNumberOfMarkedDescriptorsOffset,
845 295208 : sizeof(field_value), field_value);
846 : } else {
847 1741452 : sink_->PutRaw(reinterpret_cast<byte*>(object_start + base),
848 1741452 : bytes_to_output, "Bytes");
849 : }
850 : }
851 3853779 : }
852 :
853 398032 : void Serializer::ObjectSerializer::OutputCode(int size) {
854 : DCHECK_EQ(kTaggedSize, bytes_processed_so_far_);
855 : Code on_heap_code = Code::cast(object_);
856 : // To make snapshots reproducible, we make a copy of the code object
857 : // and wipe all pointers in the copy, which we then serialize.
858 398032 : Code off_heap_code = serializer_->CopyCode(on_heap_code);
859 : int mode_mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
860 : RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
861 : RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
862 : RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
863 : RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
864 : RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) |
865 : RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
866 : // With enabled pointer compression normal accessors no longer work for
867 : // off-heap objects, so we have to get the relocation info data via the
868 : // on-heap code object.
869 398032 : ByteArray relocation_info = on_heap_code->unchecked_relocation_info();
870 797219 : for (RelocIterator it(off_heap_code, relocation_info, mode_mask); !it.done();
871 399187 : it.next()) {
872 : RelocInfo* rinfo = it.rinfo();
873 : rinfo->WipeOut();
874 : }
875 : // We need to wipe out the header fields *after* wiping out the
876 : // relocations, because some of these fields are needed for the latter.
877 398032 : off_heap_code->WipeOutHeader();
878 :
879 398032 : Address start = off_heap_code->address() + Code::kDataStart;
880 398032 : int bytes_to_output = size - Code::kDataStart;
881 : DCHECK(IsAligned(bytes_to_output, kTaggedSize));
882 :
883 398032 : sink_->Put(kVariableRawCode, "VariableRawCode");
884 398032 : sink_->PutInt(bytes_to_output, "length");
885 :
886 : #ifdef MEMORY_SANITIZER
887 : // Check that we do not serialize uninitialized memory.
888 : __msan_check_mem_is_initialized(reinterpret_cast<void*>(start),
889 : bytes_to_output);
890 : #endif // MEMORY_SANITIZER
891 398032 : sink_->PutRaw(reinterpret_cast<byte*>(start), bytes_to_output, "Code");
892 398032 : }
893 :
894 : } // namespace internal
895 122036 : } // namespace v8
|