Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/snapshot/serializer.h"
6 :
7 : #include "src/assembler-inl.h"
8 : #include "src/heap/heap.h"
9 : #include "src/interpreter/interpreter.h"
10 : #include "src/objects/code.h"
11 : #include "src/objects/js-array-buffer-inl.h"
12 : #include "src/objects/js-array-inl.h"
13 : #include "src/objects/map.h"
14 : #include "src/objects/slots-inl.h"
15 : #include "src/objects/smi.h"
16 : #include "src/snapshot/natives.h"
17 : #include "src/snapshot/snapshot.h"
18 :
19 : namespace v8 {
20 : namespace internal {
21 :
22 1112 : Serializer::Serializer(Isolate* isolate)
23 : : isolate_(isolate),
24 : external_reference_encoder_(isolate),
25 : root_index_map_(isolate),
26 4448 : allocator_(this) {
27 : #ifdef OBJECT_PRINT
28 : if (FLAG_serialization_statistics) {
29 : for (int space = 0; space < LAST_SPACE; ++space) {
30 : instance_type_count_[space] = NewArray<int>(kInstanceTypes);
31 : instance_type_size_[space] = NewArray<size_t>(kInstanceTypes);
32 : for (int i = 0; i < kInstanceTypes; i++) {
33 : instance_type_count_[space][i] = 0;
34 : instance_type_size_[space][i] = 0;
35 : }
36 : }
37 : } else {
38 : for (int space = 0; space < LAST_SPACE; ++space) {
39 : instance_type_count_[space] = nullptr;
40 : instance_type_size_[space] = nullptr;
41 : }
42 : }
43 : #endif // OBJECT_PRINT
44 1112 : }
45 :
46 3336 : Serializer::~Serializer() {
47 1112 : if (code_address_map_ != nullptr) delete code_address_map_;
48 : #ifdef OBJECT_PRINT
49 : for (int space = 0; space < LAST_SPACE; ++space) {
50 : if (instance_type_count_[space] != nullptr) {
51 : DeleteArray(instance_type_count_[space]);
52 : DeleteArray(instance_type_size_[space]);
53 : }
54 : }
55 : #endif // OBJECT_PRINT
56 1112 : }
57 :
58 : #ifdef OBJECT_PRINT
59 : void Serializer::CountInstanceType(Map map, int size, AllocationSpace space) {
60 : int instance_type = map->instance_type();
61 : instance_type_count_[space][instance_type]++;
62 : instance_type_size_[space][instance_type] += size;
63 : }
64 : #endif // OBJECT_PRINT
65 :
66 1112 : void Serializer::OutputStatistics(const char* name) {
67 2224 : if (!FLAG_serialization_statistics) return;
68 :
69 0 : PrintF("%s:\n", name);
70 0 : allocator()->OutputStatistics();
71 :
72 : #ifdef OBJECT_PRINT
73 : PrintF(" Instance types (count and bytes):\n");
74 : #define PRINT_INSTANCE_TYPE(Name) \
75 : for (int space = 0; space < LAST_SPACE; ++space) { \
76 : if (instance_type_count_[space][Name]) { \
77 : PrintF("%10d %10" PRIuS " %-10s %s\n", \
78 : instance_type_count_[space][Name], \
79 : instance_type_size_[space][Name], \
80 : AllocationSpaceName(static_cast<AllocationSpace>(space)), #Name); \
81 : } \
82 : }
83 : INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE)
84 : #undef PRINT_INSTANCE_TYPE
85 :
86 : PrintF("\n");
87 : #endif // OBJECT_PRINT
88 : }
89 :
90 1112 : void Serializer::SerializeDeferredObjects() {
91 6120 : while (!deferred_objects_.empty()) {
92 3896 : HeapObject obj = deferred_objects_.back();
93 : deferred_objects_.pop_back();
94 3896 : ObjectSerializer obj_serializer(this, obj, &sink_, kPlain, kStartOfObject);
95 3896 : obj_serializer.SerializeDeferred();
96 : }
97 : sink_.Put(kSynchronize, "Finished with deferred objects");
98 1112 : }
99 :
100 1646767 : bool Serializer::MustBeDeferred(HeapObject object) { return false; }
101 :
102 384138 : void Serializer::VisitRootPointers(Root root, const char* description,
103 : FullObjectSlot start, FullObjectSlot end) {
104 1155175 : for (FullObjectSlot current = start; current < end; ++current) {
105 386899 : SerializeRootObject(*current);
106 : }
107 384138 : }
108 :
109 532730 : void Serializer::SerializeRootObject(Object object) {
110 532730 : if (object->IsSmi()) {
111 4013 : PutSmi(Smi::cast(object));
112 : } else {
113 1057434 : SerializeObject(HeapObject::cast(object), kPlain, kStartOfObject, 0);
114 : }
115 532730 : }
116 :
117 : #ifdef DEBUG
118 : void Serializer::PrintStack() {
119 : for (const auto o : stack_) {
120 : o->Print();
121 : PrintF("\n");
122 : }
123 : }
124 : #endif // DEBUG
125 :
126 4694928 : bool Serializer::SerializeRoot(HeapObject obj, HowToCode how_to_code,
127 : WhereToPoint where_to_point, int skip) {
128 : RootIndex root_index;
129 : // Derived serializers are responsible for determining if the root has
130 : // actually been serialized before calling this.
131 4694928 : if (root_index_map()->Lookup(obj, &root_index)) {
132 3407065 : PutRoot(root_index, obj, how_to_code, where_to_point, skip);
133 3407065 : return true;
134 : }
135 : return false;
136 : }
137 :
138 8687221 : bool Serializer::SerializeHotObject(HeapObject obj, HowToCode how_to_code,
139 : WhereToPoint where_to_point, int skip) {
140 8687221 : if (how_to_code != kPlain || where_to_point != kStartOfObject) return false;
141 : // Encode a reference to a hot object by its index in the working set.
142 : int index = hot_objects_.Find(obj);
143 8687221 : if (index == HotObjectsList::kNotFound) return false;
144 : DCHECK(index >= 0 && index < kNumberOfHotObjects);
145 2171271 : if (FLAG_trace_serializer) {
146 0 : PrintF(" Encoding hot object %d:", index);
147 0 : obj->ShortPrint();
148 0 : PrintF("\n");
149 : }
150 2171271 : if (skip != 0) {
151 15 : sink_.Put(kHotObjectWithSkip + index, "HotObjectWithSkip");
152 15 : sink_.PutInt(skip, "HotObjectSkipDistance");
153 : } else {
154 2171256 : sink_.Put(kHotObject + index, "HotObject");
155 : }
156 : return true;
157 : }
158 :
159 3000597 : bool Serializer::SerializeBackReference(HeapObject obj, HowToCode how_to_code,
160 : WhereToPoint where_to_point, int skip) {
161 : SerializerReference reference =
162 3000597 : reference_map_.LookupReference(reinterpret_cast<void*>(obj.ptr()));
163 3000597 : if (!reference.is_valid()) return false;
164 : // Encode the location of an already deserialized object in order to write
165 : // its location into a later object. We can encode the location as an
166 : // offset fromthe start of the deserialized objects or as an offset
167 : // backwards from thecurrent allocation pointer.
168 679389 : if (reference.is_attached_reference()) {
169 : FlushSkip(skip);
170 1092 : if (FLAG_trace_serializer) {
171 : PrintF(" Encoding attached reference %d\n",
172 0 : reference.attached_reference_index());
173 : }
174 1092 : PutAttachedReference(reference, how_to_code, where_to_point);
175 : } else {
176 : DCHECK(reference.is_back_reference());
177 678297 : if (FLAG_trace_serializer) {
178 0 : PrintF(" Encoding back reference to: ");
179 0 : obj->ShortPrint();
180 0 : PrintF("\n");
181 : }
182 :
183 : PutAlignmentPrefix(obj);
184 : AllocationSpace space = reference.space();
185 678297 : if (skip == 0) {
186 678292 : sink_.Put(kBackref + how_to_code + where_to_point + space, "BackRef");
187 : } else {
188 : sink_.Put(kBackrefWithSkip + how_to_code + where_to_point + space,
189 5 : "BackRefWithSkip");
190 5 : sink_.PutInt(skip, "BackRefSkipDistance");
191 : }
192 678297 : PutBackReference(obj, reference);
193 : }
194 : return true;
195 : }
196 :
197 0 : bool Serializer::ObjectIsBytecodeHandler(HeapObject obj) const {
198 0 : if (!obj->IsCode()) return false;
199 0 : return (Code::cast(obj)->kind() == Code::BYTECODE_HANDLER);
200 : }
201 :
202 3407065 : void Serializer::PutRoot(RootIndex root, HeapObject object,
203 : SerializerDeserializer::HowToCode how_to_code,
204 : SerializerDeserializer::WhereToPoint where_to_point,
205 : int skip) {
206 3407065 : int root_index = static_cast<int>(root);
207 3407065 : if (FLAG_trace_serializer) {
208 0 : PrintF(" Encoding root %d:", root_index);
209 0 : object->ShortPrint();
210 0 : PrintF("\n");
211 : }
212 :
213 : // Assert that the first 32 root array items are a conscious choice. They are
214 : // chosen so that the most common ones can be encoded more efficiently.
215 : STATIC_ASSERT(static_cast<int>(RootIndex::kArgumentsMarker) ==
216 : kNumberOfRootArrayConstants - 1);
217 :
218 10221195 : if (how_to_code == kPlain && where_to_point == kStartOfObject &&
219 6263621 : root_index < kNumberOfRootArrayConstants && !Heap::InNewSpace(object)) {
220 2856556 : if (skip == 0) {
221 2856556 : sink_.Put(kRootArrayConstants + root_index, "RootConstant");
222 : } else {
223 0 : sink_.Put(kRootArrayConstantsWithSkip + root_index, "RootConstant");
224 0 : sink_.PutInt(skip, "SkipInPutRoot");
225 : }
226 : } else {
227 : FlushSkip(skip);
228 550509 : sink_.Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
229 550509 : sink_.PutInt(root_index, "root_index");
230 : hot_objects_.Add(object);
231 : }
232 3407065 : }
233 :
234 4013 : void Serializer::PutSmi(Smi smi) {
235 : sink_.Put(kOnePointerRawData, "Smi");
236 : Tagged_t raw_value = static_cast<Tagged_t>(smi.ptr());
237 : byte bytes[kTaggedSize];
238 : memcpy(bytes, &raw_value, kTaggedSize);
239 36117 : for (int i = 0; i < kTaggedSize; i++) sink_.Put(bytes[i], "Byte");
240 4013 : }
241 :
242 682193 : void Serializer::PutBackReference(HeapObject object,
243 : SerializerReference reference) {
244 : DCHECK(allocator()->BackReferenceIsAlreadyAllocated(reference));
245 682193 : switch (reference.space()) {
246 : case MAP_SPACE:
247 33320 : sink_.PutInt(reference.map_index(), "BackRefMapIndex");
248 33320 : break;
249 :
250 : case LO_SPACE:
251 10 : sink_.PutInt(reference.large_object_index(), "BackRefLargeObjectIndex");
252 10 : break;
253 :
254 : default:
255 648863 : sink_.PutInt(reference.chunk_index(), "BackRefChunkIndex");
256 648863 : sink_.PutInt(reference.chunk_offset(), "BackRefChunkOffset");
257 648863 : break;
258 : }
259 :
260 : hot_objects_.Add(object);
261 682193 : }
262 :
263 1092 : void Serializer::PutAttachedReference(SerializerReference reference,
264 : HowToCode how_to_code,
265 : WhereToPoint where_to_point) {
266 : DCHECK(reference.is_attached_reference());
267 : DCHECK((how_to_code == kPlain && where_to_point == kStartOfObject) ||
268 : (how_to_code == kFromCode && where_to_point == kStartOfObject) ||
269 : (how_to_code == kFromCode && where_to_point == kInnerPointer));
270 1092 : sink_.Put(kAttachedReference + how_to_code + where_to_point, "AttachedRef");
271 1092 : sink_.PutInt(reference.attached_reference_index(), "AttachedRefIndex");
272 1092 : }
273 :
274 0 : int Serializer::PutAlignmentPrefix(HeapObject object) {
275 : AllocationAlignment alignment = HeapObject::RequiredAlignment(object->map());
276 : if (alignment != kWordAligned) {
277 : DCHECK(1 <= alignment && alignment <= 3);
278 : byte prefix = (kAlignmentPrefix - 1) + alignment;
279 : sink_.Put(prefix, "Alignment");
280 : return Heap::GetMaximumFillToAlign(alignment);
281 : }
282 : return 0;
283 : }
284 :
285 9350 : void Serializer::PutNextChunk(int space) {
286 : sink_.Put(kNextChunk, "NextChunk");
287 9350 : sink_.Put(space, "NextChunkSpace");
288 9350 : }
289 :
290 1112 : void Serializer::Pad(int padding_offset) {
291 : // The non-branching GetInt will read up to 3 bytes too far, so we need
292 : // to pad the snapshot to make sure we don't read over the end.
293 4448 : for (unsigned i = 0; i < sizeof(int32_t) - 1; i++) {
294 : sink_.Put(kNop, "Padding");
295 : }
296 : // Pad up to pointer size for checksum.
297 11594 : while (!IsAligned(sink_.Position() + padding_offset, kPointerAlignment)) {
298 : sink_.Put(kNop, "Padding");
299 : }
300 1112 : }
301 :
302 482 : void Serializer::InitializeCodeAddressMap() {
303 482 : isolate_->InitializeLoggingAndCounters();
304 482 : code_address_map_ = new CodeAddressMap(isolate_);
305 482 : }
306 :
307 379269 : Code Serializer::CopyCode(Code code) {
308 : code_buffer_.clear(); // Clear buffer without deleting backing store.
309 : int size = code->CodeSize();
310 : code_buffer_.insert(code_buffer_.end(),
311 : reinterpret_cast<byte*>(code->address()),
312 379269 : reinterpret_cast<byte*>(code->address() + size));
313 : // When pointer compression is enabled the checked cast will try to
314 : // decompress map field of off-heap Code object.
315 : return Code::unchecked_cast(HeapObject::FromAddress(
316 758538 : reinterpret_cast<Address>(&code_buffer_.front())));
317 : }
318 :
319 1837929 : void Serializer::ObjectSerializer::SerializePrologue(AllocationSpace space,
320 : int size, Map map) {
321 1837929 : if (serializer_->code_address_map_) {
322 : const char* code_name =
323 : serializer_->code_address_map_->Lookup(object_->address());
324 1613347 : LOG(serializer_->isolate_,
325 : CodeNameEvent(object_->address(), sink_->Position(), code_name));
326 : }
327 :
328 : SerializerReference back_reference;
329 1837929 : if (space == LO_SPACE) {
330 : sink_->Put(kNewObject + reference_representation_ + space,
331 45 : "NewLargeObject");
332 45 : sink_->PutInt(size >> kObjectAlignmentBits, "ObjectSizeInWords");
333 90 : CHECK(!object_->IsCode());
334 45 : back_reference = serializer_->allocator()->AllocateLargeObject(size);
335 1837884 : } else if (space == MAP_SPACE) {
336 : DCHECK_EQ(Map::kSize, size);
337 56895 : back_reference = serializer_->allocator()->AllocateMap();
338 56895 : sink_->Put(kNewObject + reference_representation_ + space, "NewMap");
339 : // This is redundant, but we include it anyways.
340 56895 : sink_->PutInt(size >> kObjectAlignmentBits, "ObjectSizeInWords");
341 : } else {
342 : int fill = serializer_->PutAlignmentPrefix(object_);
343 1780989 : back_reference = serializer_->allocator()->Allocate(space, size + fill);
344 1780989 : sink_->Put(kNewObject + reference_representation_ + space, "NewObject");
345 1780989 : sink_->PutInt(size >> kObjectAlignmentBits, "ObjectSizeInWords");
346 : }
347 :
348 : #ifdef OBJECT_PRINT
349 : if (FLAG_serialization_statistics) {
350 : serializer_->CountInstanceType(map, size, space);
351 : }
352 : #endif // OBJECT_PRINT
353 :
354 : // Mark this object as already serialized.
355 : serializer_->reference_map()->Add(reinterpret_cast<void*>(object_.ptr()),
356 1837929 : back_reference);
357 :
358 : // Serialize the map (first word of the object).
359 1837929 : serializer_->SerializeObject(map, kPlain, kStartOfObject, 0);
360 1837929 : }
361 :
362 80 : int32_t Serializer::ObjectSerializer::SerializeBackingStore(
363 : void* backing_store, int32_t byte_length) {
364 : SerializerReference reference =
365 80 : serializer_->reference_map()->LookupReference(backing_store);
366 :
367 : // Serialize the off-heap backing store.
368 80 : if (!reference.is_valid()) {
369 35 : sink_->Put(kOffHeapBackingStore, "Off-heap backing store");
370 35 : sink_->PutInt(byte_length, "length");
371 : sink_->PutRaw(static_cast<byte*>(backing_store), byte_length,
372 35 : "BackingStore");
373 35 : reference = serializer_->allocator()->AllocateOffHeapBackingStore();
374 : // Mark this backing store as already serialized.
375 35 : serializer_->reference_map()->Add(backing_store, reference);
376 : }
377 :
378 80 : return static_cast<int32_t>(reference.off_heap_backing_store_index());
379 : }
380 :
381 60 : void Serializer::ObjectSerializer::SerializeJSTypedArray() {
382 60 : JSTypedArray typed_array = JSTypedArray::cast(object_);
383 : FixedTypedArrayBase elements =
384 120 : FixedTypedArrayBase::cast(typed_array->elements());
385 :
386 60 : if (!typed_array->WasDetached()) {
387 55 : if (!typed_array->is_on_heap()) {
388 : // Explicitly serialize the backing store now.
389 45 : JSArrayBuffer buffer = JSArrayBuffer::cast(typed_array->buffer());
390 45 : CHECK_LE(buffer->byte_length(), Smi::kMaxValue);
391 45 : CHECK_LE(typed_array->byte_offset(), Smi::kMaxValue);
392 45 : int32_t byte_length = static_cast<int32_t>(buffer->byte_length());
393 45 : int32_t byte_offset = static_cast<int32_t>(typed_array->byte_offset());
394 :
395 : // We need to calculate the backing store from the external pointer
396 : // because the ArrayBuffer may already have been serialized.
397 : void* backing_store = reinterpret_cast<void*>(
398 45 : reinterpret_cast<intptr_t>(elements->external_pointer()) -
399 45 : byte_offset);
400 45 : int32_t ref = SerializeBackingStore(backing_store, byte_length);
401 :
402 : // The external_pointer is the backing_store + typed_array->byte_offset.
403 : // To properly share the buffer, we set the backing store ref here. On
404 : // deserialization we re-add the byte_offset to external_pointer.
405 : elements->set_external_pointer(
406 : reinterpret_cast<void*>(Smi::FromInt(ref).ptr()));
407 : }
408 : } else {
409 : // When a JSArrayBuffer is detached, the FixedTypedArray that points to the
410 : // same backing store does not know anything about it. This fixup step finds
411 : // detached TypedArrays and clears the values in the FixedTypedArray so that
412 : // we don't try to serialize the now invalid backing store.
413 : elements->set_external_pointer(reinterpret_cast<void*>(Smi::kZero.ptr()));
414 : elements->set_length(0);
415 : }
416 60 : SerializeObject();
417 60 : }
418 :
419 50 : void Serializer::ObjectSerializer::SerializeJSArrayBuffer() {
420 : JSArrayBuffer buffer = JSArrayBuffer::cast(object_);
421 : void* backing_store = buffer->backing_store();
422 : // We cannot store byte_length larger than Smi range in the snapshot.
423 50 : CHECK_LE(buffer->byte_length(), Smi::kMaxValue);
424 50 : int32_t byte_length = static_cast<int32_t>(buffer->byte_length());
425 :
426 : // The embedder-allocated backing store only exists for the off-heap case.
427 50 : if (backing_store != nullptr) {
428 35 : int32_t ref = SerializeBackingStore(backing_store, byte_length);
429 : buffer->set_backing_store(reinterpret_cast<void*>(Smi::FromInt(ref).ptr()));
430 : }
431 50 : SerializeObject();
432 : buffer->set_backing_store(backing_store);
433 50 : }
434 :
435 226 : void Serializer::ObjectSerializer::SerializeExternalString() {
436 226 : Heap* heap = serializer_->isolate()->heap();
437 : // For external strings with known resources, we replace the resource field
438 : // with the encoded external reference, which we restore upon deserialize.
439 : // for native native source code strings, we replace the resource field
440 : // with the native source id.
441 : // For the rest we serialize them to look like ordinary sequential strings.
442 226 : if (object_->map() != ReadOnlyRoots(heap).native_source_string_map()) {
443 30 : ExternalString string = ExternalString::cast(object_);
444 : Address resource = string->resource_as_address();
445 : ExternalReferenceEncoder::Value reference;
446 60 : if (serializer_->external_reference_encoder_.TryEncode(resource).To(
447 60 : &reference)) {
448 : DCHECK(reference.is_from_api());
449 10 : string->set_uint32_as_resource(reference.index());
450 10 : SerializeObject();
451 10 : string->set_address_as_resource(resource);
452 : } else {
453 20 : SerializeExternalStringAsSequentialString();
454 : }
455 : } else {
456 196 : ExternalOneByteString string = ExternalOneByteString::cast(object_);
457 : DCHECK(string->is_uncached());
458 196 : const NativesExternalStringResource* resource =
459 : reinterpret_cast<const NativesExternalStringResource*>(
460 : string->resource());
461 : // Replace the resource field with the type and index of the native source.
462 : string->set_resource(resource->EncodeForSerialization());
463 196 : SerializeObject();
464 : // Restore the resource field.
465 : string->set_resource(resource);
466 : }
467 226 : }
468 :
469 20 : void Serializer::ObjectSerializer::SerializeExternalStringAsSequentialString() {
470 : // Instead of serializing this as an external string, we serialize
471 : // an imaginary sequential string with the same content.
472 20 : ReadOnlyRoots roots(serializer_->isolate());
473 : DCHECK(object_->IsExternalString());
474 : DCHECK(object_->map() != roots.native_source_string_map());
475 : ExternalString string = ExternalString::cast(object_);
476 : int length = string->length();
477 : Map map;
478 : int content_size;
479 : int allocation_size;
480 : const byte* resource;
481 : // Find the map and size for the imaginary sequential string.
482 20 : bool internalized = object_->IsInternalizedString();
483 20 : if (object_->IsExternalOneByteString()) {
484 : map = internalized ? roots.one_byte_internalized_string_map()
485 30 : : roots.one_byte_string_map();
486 : allocation_size = SeqOneByteString::SizeFor(length);
487 : content_size = length * kCharSize;
488 : resource = reinterpret_cast<const byte*>(
489 15 : ExternalOneByteString::cast(string)->resource()->data());
490 : } else {
491 10 : map = internalized ? roots.internalized_string_map() : roots.string_map();
492 : allocation_size = SeqTwoByteString::SizeFor(length);
493 5 : content_size = length * kShortSize;
494 : resource = reinterpret_cast<const byte*>(
495 5 : ExternalTwoByteString::cast(string)->resource()->data());
496 : }
497 :
498 : AllocationSpace space =
499 20 : (allocation_size > kMaxRegularHeapObjectSize) ? LO_SPACE : OLD_SPACE;
500 20 : SerializePrologue(space, allocation_size, map);
501 :
502 : // Output the rest of the imaginary string.
503 20 : int bytes_to_output = allocation_size - HeapObject::kHeaderSize;
504 :
505 : // Output raw data header. Do not bother with common raw length cases here.
506 20 : sink_->Put(kVariableRawData, "RawDataForString");
507 20 : sink_->PutInt(bytes_to_output, "length");
508 :
509 : // Serialize string header (except for map).
510 20 : uint8_t* string_start = reinterpret_cast<uint8_t*>(string->address());
511 180 : for (int i = HeapObject::kHeaderSize; i < SeqString::kHeaderSize; i++) {
512 160 : sink_->PutSection(string_start[i], "StringHeader");
513 : }
514 :
515 : // Serialize string content.
516 20 : sink_->PutRaw(resource, content_size, "StringContent");
517 :
518 : // Since the allocation size is rounded up to object alignment, there
519 : // maybe left-over bytes that need to be padded.
520 20 : int padding_size = allocation_size - SeqString::kHeaderSize - content_size;
521 : DCHECK(0 <= padding_size && padding_size < kObjectAlignment);
522 50 : for (int i = 0; i < padding_size; i++) sink_->PutSection(0, "StringPadding");
523 20 : }
524 :
525 : // Clear and later restore the next link in the weak cell or allocation site.
526 : // TODO(all): replace this with proper iteration of weak slots in serializer.
527 : class UnlinkWeakNextScope {
528 : public:
529 1837909 : explicit UnlinkWeakNextScope(Heap* heap, HeapObject object) {
530 5513727 : if (object->IsAllocationSite() &&
531 1837909 : AllocationSite::cast(object)->HasWeakNext()) {
532 0 : object_ = object;
533 0 : next_ = AllocationSite::cast(object)->weak_next();
534 : AllocationSite::cast(object)->set_weak_next(
535 0 : ReadOnlyRoots(heap).undefined_value());
536 : }
537 1837909 : }
538 :
539 1837909 : ~UnlinkWeakNextScope() {
540 1837909 : if (!object_.is_null()) {
541 : AllocationSite::cast(object_)->set_weak_next(next_,
542 0 : UPDATE_WEAK_WRITE_BARRIER);
543 : }
544 1837909 : }
545 :
546 : private:
547 : HeapObject object_;
548 : Object next_;
549 : DISALLOW_HEAP_ALLOCATION(no_gc_);
550 : };
551 :
552 1837929 : void Serializer::ObjectSerializer::Serialize() {
553 1837929 : if (FLAG_trace_serializer) {
554 0 : PrintF(" Encoding heap object: ");
555 0 : object_->ShortPrint();
556 0 : PrintF("\n");
557 : }
558 :
559 3675858 : if (object_->IsExternalString()) {
560 226 : SerializeExternalString();
561 226 : return;
562 3676538 : } else if (!serializer_->isolate()->heap()->InReadOnlySpace(object_)) {
563 : // Only clear padding for strings outside RO_SPACE. RO_SPACE should have
564 : // been cleared elsewhere.
565 1649704 : if (object_->IsSeqOneByteString()) {
566 : // Clear padding bytes at the end. Done here to avoid having to do this
567 : // at allocation sites in generated code.
568 101283 : SeqOneByteString::cast(object_)->clear_padding();
569 1548421 : } else if (object_->IsSeqTwoByteString()) {
570 0 : SeqTwoByteString::cast(object_)->clear_padding();
571 : }
572 : }
573 1837703 : if (object_->IsJSTypedArray()) {
574 60 : SerializeJSTypedArray();
575 60 : return;
576 : }
577 1837643 : if (object_->IsJSArrayBuffer()) {
578 50 : SerializeJSArrayBuffer();
579 50 : return;
580 : }
581 :
582 : // We don't expect fillers.
583 : DCHECK(!object_->IsFiller());
584 :
585 1837593 : if (object_->IsScript()) {
586 : // Clear cached line ends.
587 2264 : Object undefined = ReadOnlyRoots(serializer_->isolate()).undefined_value();
588 1132 : Script::cast(object_)->set_line_ends(undefined);
589 : }
590 :
591 1837593 : SerializeObject();
592 : }
593 :
594 1837909 : void Serializer::ObjectSerializer::SerializeObject() {
595 1837909 : int size = object_->Size();
596 1837909 : Map map = object_->map();
597 : AllocationSpace space =
598 1837909 : MemoryChunk::FromHeapObject(object_)->owner()->identity();
599 : // Young generation large objects are tenured.
600 1837909 : if (space == NEW_LO_SPACE) {
601 : space = LO_SPACE;
602 : }
603 1837909 : SerializePrologue(space, size, map);
604 :
605 : // Serialize the rest of the object.
606 1837909 : CHECK_EQ(0, bytes_processed_so_far_);
607 1837909 : bytes_processed_so_far_ = kTaggedSize;
608 :
609 1837909 : RecursionScope recursion(serializer_);
610 : // Objects that are immediately post processed during deserialization
611 : // cannot be deferred, since post processing requires the object content.
612 3672675 : if ((recursion.ExceedsMaximum() && CanBeDeferred(object_)) ||
613 1834766 : serializer_->MustBeDeferred(object_)) {
614 3896 : serializer_->QueueDeferredObject(object_);
615 3896 : sink_->Put(kDeferred, "Deferring object content");
616 1837909 : return;
617 : }
618 :
619 1834013 : SerializeContent(map, size);
620 : }
621 :
622 3896 : void Serializer::ObjectSerializer::SerializeDeferred() {
623 3896 : if (FLAG_trace_serializer) {
624 0 : PrintF(" Encoding deferred heap object: ");
625 0 : object_->ShortPrint();
626 0 : PrintF("\n");
627 : }
628 :
629 3896 : int size = object_->Size();
630 3896 : Map map = object_->map();
631 : SerializerReference back_reference =
632 : serializer_->reference_map()->LookupReference(
633 3896 : reinterpret_cast<void*>(object_.ptr()));
634 : DCHECK(back_reference.is_back_reference());
635 :
636 : // Serialize the rest of the object.
637 3896 : CHECK_EQ(0, bytes_processed_so_far_);
638 3896 : bytes_processed_so_far_ = kTaggedSize;
639 :
640 : serializer_->PutAlignmentPrefix(object_);
641 7792 : sink_->Put(kNewObject + back_reference.space(), "deferred object");
642 3896 : serializer_->PutBackReference(object_, back_reference);
643 3896 : sink_->PutInt(size >> kTaggedSizeLog2, "deferred object size");
644 :
645 3896 : SerializeContent(map, size);
646 3896 : }
647 :
648 1837909 : void Serializer::ObjectSerializer::SerializeContent(Map map, int size) {
649 1837909 : UnlinkWeakNextScope unlink_weak_next(serializer_->isolate()->heap(), object_);
650 3675818 : if (object_->IsCode()) {
651 : // For code objects, output raw bytes first.
652 379269 : OutputCode(size);
653 : // Then iterate references via reloc info.
654 379269 : object_->IterateBody(map, size, this);
655 : // Finally skip to the end.
656 758538 : serializer_->FlushSkip(SkipTo(object_->address() + size));
657 : } else {
658 : // For other objects, iterate references first.
659 1458640 : object_->IterateBody(map, size, this);
660 : // Then output data payload, if any.
661 1458640 : OutputRawData(object_->address() + size);
662 1837909 : }
663 1837909 : }
664 :
665 2392017 : void Serializer::ObjectSerializer::VisitPointers(HeapObject host,
666 : ObjectSlot start,
667 : ObjectSlot end) {
668 4784034 : VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
669 2392017 : }
670 :
671 2545165 : void Serializer::ObjectSerializer::VisitPointers(HeapObject host,
672 : MaybeObjectSlot start,
673 : MaybeObjectSlot end) {
674 2545165 : MaybeObjectSlot current = start;
675 7630304 : while (current < end) {
676 8881674 : while (current < end && (*current)->IsSmi()) {
677 : ++current;
678 : }
679 2539974 : if (current < end) {
680 2217696 : OutputRawData(current.address());
681 : }
682 : // TODO(ishell): Revisit this change once we stick to 32-bit compressed
683 : // tagged values.
684 4837646 : while (current < end && (*current)->IsCleared()) {
685 40023 : sink_->Put(kClearedWeakReference, "ClearedWeakReference");
686 40023 : bytes_processed_so_far_ += kTaggedSize;
687 : ++current;
688 : }
689 2539974 : HeapObject current_contents;
690 : HeapObjectReferenceType reference_type;
691 29575206 : while (current < end &&
692 23079142 : (*current)->GetHeapObject(¤t_contents, &reference_type)) {
693 : RootIndex root_index;
694 : // Repeats are not subject to the write barrier so we can only use
695 : // immortal immovable root members. They are never in new space.
696 11249963 : if (current != start &&
697 : serializer_->root_index_map()->Lookup(current_contents,
698 7400052 : &root_index) &&
699 11715488 : RootsTable::IsImmortalImmovable(root_index) &&
700 : *current == *(current - 1)) {
701 : DCHECK_EQ(reference_type, HeapObjectReferenceType::STRONG);
702 : DCHECK(!Heap::InNewSpace(current_contents));
703 : int repeat_count = 1;
704 4391850 : while (current + repeat_count < end - 1 &&
705 : *(current + repeat_count) == *current) {
706 1739768 : repeat_count++;
707 : }
708 : current += repeat_count;
709 548922 : bytes_processed_so_far_ += repeat_count * kTaggedSize;
710 548922 : if (repeat_count > kNumberOfFixedRepeat) {
711 1814 : sink_->Put(kVariableRepeat, "VariableRepeat");
712 1814 : sink_->PutInt(repeat_count, "repeat count");
713 : } else {
714 547108 : sink_->Put(kFixedRepeatStart + repeat_count, "FixedRepeat");
715 : }
716 : } else {
717 5922848 : if (reference_type == HeapObjectReferenceType::WEAK) {
718 179084 : sink_->Put(kWeakPrefix, "WeakReference");
719 : }
720 : serializer_->SerializeObject(current_contents, kPlain, kStartOfObject,
721 5922848 : 0);
722 5922848 : bytes_processed_so_far_ += kTaggedSize;
723 : ++current;
724 : }
725 : }
726 : }
727 2545165 : }
728 :
729 25 : void Serializer::ObjectSerializer::VisitEmbeddedPointer(Code host,
730 50 : RelocInfo* rinfo) {
731 : int skip = SkipTo(rinfo->target_address_address());
732 25 : HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
733 : Object object = rinfo->target_object();
734 : serializer_->SerializeObject(HeapObject::cast(object), how_to_code,
735 50 : kStartOfObject, skip);
736 25 : bytes_processed_so_far_ += rinfo->target_address_size();
737 25 : }
738 :
739 8398 : void Serializer::ObjectSerializer::VisitExternalReference(Foreign host,
740 : Address* p) {
741 8398 : int skip = SkipTo(reinterpret_cast<Address>(p));
742 8398 : Address target = *p;
743 8398 : auto encoded_reference = serializer_->EncodeExternalReference(target);
744 8398 : if (encoded_reference.is_from_api()) {
745 110 : sink_->Put(kApiReference, "ApiRef");
746 : } else {
747 8288 : sink_->Put(kExternalReference + kPlain + kStartOfObject, "ExternalRef");
748 : }
749 8398 : sink_->PutInt(skip, "SkipB4ExternalRef");
750 8398 : sink_->PutInt(encoded_reference.index(), "reference index");
751 8398 : bytes_processed_so_far_ += kSystemPointerSize;
752 8398 : }
753 :
754 118 : void Serializer::ObjectSerializer::VisitExternalReference(Code host,
755 236 : RelocInfo* rinfo) {
756 : int skip = SkipTo(rinfo->target_address_address());
757 : Address target = rinfo->target_external_reference();
758 118 : auto encoded_reference = serializer_->EncodeExternalReference(target);
759 118 : if (encoded_reference.is_from_api()) {
760 : DCHECK(!rinfo->IsCodedSpecially());
761 0 : sink_->Put(kApiReference, "ApiRef");
762 : } else {
763 118 : HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
764 : sink_->Put(kExternalReference + how_to_code + kStartOfObject,
765 118 : "ExternalRef");
766 : }
767 118 : sink_->PutInt(skip, "SkipB4ExternalRef");
768 : DCHECK_NE(target, kNullAddress); // Code does not reference null.
769 118 : sink_->PutInt(encoded_reference.index(), "reference index");
770 118 : bytes_processed_so_far_ += rinfo->target_address_size();
771 118 : }
772 :
773 0 : void Serializer::ObjectSerializer::VisitInternalReference(Code host,
774 0 : RelocInfo* rinfo) {
775 : // We do not use skip from last patched pc to find the pc to patch, since
776 : // target_address_address may not return addresses in ascending order when
777 : // used for internal references. External references may be stored at the
778 : // end of the code in the constant pool, whereas internal references are
779 : // inline. That would cause the skip to be negative. Instead, we store the
780 : // offset from code entry.
781 : Address entry = Code::cast(object_)->entry();
782 : DCHECK_GE(rinfo->target_internal_reference_address(), entry);
783 0 : uintptr_t pc_offset = rinfo->target_internal_reference_address() - entry;
784 : DCHECK_LE(pc_offset, Code::cast(object_)->raw_instruction_size());
785 : DCHECK_GE(rinfo->target_internal_reference(), entry);
786 0 : uintptr_t target_offset = rinfo->target_internal_reference() - entry;
787 : DCHECK_LE(target_offset, Code::cast(object_)->raw_instruction_size());
788 : sink_->Put(rinfo->rmode() == RelocInfo::INTERNAL_REFERENCE
789 : ? kInternalReference
790 : : kInternalReferenceEncoded,
791 0 : "InternalRef");
792 0 : sink_->PutInt(pc_offset, "internal ref address");
793 0 : sink_->PutInt(target_offset, "internal ref value");
794 0 : }
795 :
796 0 : void Serializer::ObjectSerializer::VisitRuntimeEntry(Code host,
797 0 : RelocInfo* rinfo) {
798 : int skip = SkipTo(rinfo->target_address_address());
799 0 : HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
800 : Address target = rinfo->target_address();
801 0 : auto encoded_reference = serializer_->EncodeExternalReference(target);
802 : DCHECK(!encoded_reference.is_from_api());
803 0 : sink_->Put(kExternalReference + how_to_code + kStartOfObject, "ExternalRef");
804 0 : sink_->PutInt(skip, "SkipB4ExternalRef");
805 0 : sink_->PutInt(encoded_reference.index(), "reference index");
806 0 : bytes_processed_so_far_ += rinfo->target_address_size();
807 0 : }
808 :
809 379261 : void Serializer::ObjectSerializer::VisitOffHeapTarget(Code host,
810 758522 : RelocInfo* rinfo) {
811 : DCHECK(FLAG_embedded_builtins);
812 : STATIC_ASSERT(EmbeddedData::kTableSize == Builtins::builtin_count);
813 :
814 : Address addr = rinfo->target_off_heap_target();
815 379261 : CHECK_NE(kNullAddress, addr);
816 :
817 379261 : Code target = InstructionStream::TryLookupCode(serializer_->isolate(), addr);
818 379261 : CHECK(Builtins::IsIsolateIndependentBuiltin(target));
819 :
820 : int skip = SkipTo(rinfo->target_address_address());
821 379261 : sink_->Put(kOffHeapTarget, "OffHeapTarget");
822 379261 : sink_->PutInt(skip, "SkipB4OffHeapTarget");
823 379261 : sink_->PutInt(target->builtin_index(), "builtin index");
824 379261 : bytes_processed_so_far_ += rinfo->target_address_size();
825 379261 : }
826 :
827 : namespace {
828 :
829 : class CompareRelocInfo {
830 : public:
831 : bool operator()(RelocInfo x, RelocInfo y) {
832 : // Everything that does not use target_address_address will compare equal.
833 : Address x_num = 0;
834 : Address y_num = 0;
835 589 : if (x.HasTargetAddressAddress()) x_num = x.target_address_address();
836 589 : if (y.HasTargetAddressAddress()) y_num = y.target_address_address();
837 : return x_num > y_num;
838 : }
839 : };
840 :
841 : } // namespace
842 :
843 1137942 : void Serializer::ObjectSerializer::VisitRelocInfo(RelocIterator* it) {
844 : std::priority_queue<RelocInfo, std::vector<RelocInfo>, CompareRelocInfo>
845 758538 : reloc_queue;
846 1517346 : for (; !it->done(); it->next()) {
847 379404 : reloc_queue.push(*it->rinfo());
848 : }
849 758673 : while (!reloc_queue.empty()) {
850 379404 : RelocInfo rinfo = reloc_queue.top();
851 379404 : reloc_queue.pop();
852 379404 : rinfo.Visit(this);
853 : }
854 379269 : }
855 :
856 0 : void Serializer::ObjectSerializer::VisitCodeTarget(Code host,
857 0 : RelocInfo* rinfo) {
858 : int skip = SkipTo(rinfo->target_address_address());
859 0 : Code object = Code::GetCodeFromTargetAddress(rinfo->target_address());
860 0 : serializer_->SerializeObject(object, kFromCode, kInnerPointer, skip);
861 0 : bytes_processed_so_far_ += rinfo->target_address_size();
862 0 : }
863 :
864 3676336 : void Serializer::ObjectSerializer::OutputRawData(Address up_to) {
865 : Address object_start = object_->address();
866 3676336 : int base = bytes_processed_so_far_;
867 3676336 : int up_to_offset = static_cast<int>(up_to - object_start);
868 3676336 : int to_skip = up_to_offset - bytes_processed_so_far_;
869 : int bytes_to_output = to_skip;
870 3676336 : bytes_processed_so_far_ += to_skip;
871 : DCHECK_GE(to_skip, 0);
872 3676336 : if (bytes_to_output != 0) {
873 : DCHECK(to_skip == bytes_to_output);
874 1892397 : if (IsAligned(bytes_to_output, kObjectAlignment) &&
875 : bytes_to_output <= kNumberOfFixedRawData * kTaggedSize) {
876 1891242 : int size_in_words = bytes_to_output >> kTaggedSizeLog2;
877 1891242 : sink_->PutSection(kFixedRawDataStart + size_in_words, "FixedRawData");
878 : } else {
879 1155 : sink_->Put(kVariableRawData, "VariableRawData");
880 1155 : sink_->PutInt(bytes_to_output, "length");
881 : }
882 : #ifdef MEMORY_SANITIZER
883 : // Check that we do not serialize uninitialized memory.
884 : __msan_check_mem_is_initialized(
885 : reinterpret_cast<void*>(object_start + base), bytes_to_output);
886 : #endif // MEMORY_SANITIZER
887 3784794 : if (object_->IsBytecodeArray()) {
888 : // The code age byte can be changed concurrently by GC.
889 3060 : const int bytes_to_age_byte = BytecodeArray::kBytecodeAgeOffset - base;
890 3060 : if (0 <= bytes_to_age_byte && bytes_to_age_byte < bytes_to_output) {
891 1530 : sink_->PutRaw(reinterpret_cast<byte*>(object_start + base),
892 1530 : bytes_to_age_byte, "Bytes");
893 1530 : byte bytecode_age = BytecodeArray::kNoAgeBytecodeAge;
894 1530 : sink_->PutRaw(&bytecode_age, 1, "Bytes");
895 1530 : const int bytes_written = bytes_to_age_byte + 1;
896 : sink_->PutRaw(
897 1530 : reinterpret_cast<byte*>(object_start + base + bytes_written),
898 3060 : bytes_to_output - bytes_written, "Bytes");
899 : } else {
900 1530 : sink_->PutRaw(reinterpret_cast<byte*>(object_start + base),
901 1530 : bytes_to_output, "Bytes");
902 : }
903 : } else {
904 1889337 : sink_->PutRaw(reinterpret_cast<byte*>(object_start + base),
905 1889337 : bytes_to_output, "Bytes");
906 : }
907 : }
908 3676336 : }
909 :
910 0 : int Serializer::ObjectSerializer::SkipTo(Address to) {
911 : Address object_start = object_->address();
912 767071 : int up_to_offset = static_cast<int>(to - object_start);
913 767071 : int to_skip = up_to_offset - bytes_processed_so_far_;
914 767071 : bytes_processed_so_far_ += to_skip;
915 : // This assert will fail if the reloc info gives us the target_address_address
916 : // locations in a non-ascending order. We make sure this doesn't happen by
917 : // sorting the relocation info.
918 : DCHECK_GE(to_skip, 0);
919 0 : return to_skip;
920 : }
921 :
922 379269 : void Serializer::ObjectSerializer::OutputCode(int size) {
923 : DCHECK_EQ(kTaggedSize, bytes_processed_so_far_);
924 : Code on_heap_code = Code::cast(object_);
925 : // To make snapshots reproducible, we make a copy of the code object
926 : // and wipe all pointers in the copy, which we then serialize.
927 379269 : Code off_heap_code = serializer_->CopyCode(on_heap_code);
928 : int mode_mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
929 : RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
930 : RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
931 : RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
932 : RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
933 : RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) |
934 : RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
935 : // With enabled pointer compression normal accessors no longer work for
936 : // off-heap objects, so we have to get the relocation info data via the
937 : // on-heap code object.
938 379269 : ByteArray relocation_info = on_heap_code->unchecked_relocation_info();
939 1137942 : for (RelocIterator it(off_heap_code, relocation_info, mode_mask); !it.done();
940 379404 : it.next()) {
941 : RelocInfo* rinfo = it.rinfo();
942 : rinfo->WipeOut();
943 : }
944 : // We need to wipe out the header fields *after* wiping out the
945 : // relocations, because some of these fields are needed for the latter.
946 379269 : off_heap_code->WipeOutHeader();
947 :
948 379269 : Address start = off_heap_code->address() + Code::kDataStart;
949 379269 : int bytes_to_output = size - Code::kDataStart;
950 :
951 379269 : sink_->Put(kVariableRawCode, "VariableRawCode");
952 379269 : sink_->PutInt(bytes_to_output, "length");
953 :
954 : #ifdef MEMORY_SANITIZER
955 : // Check that we do not serialize uninitialized memory.
956 : __msan_check_mem_is_initialized(reinterpret_cast<void*>(start),
957 : bytes_to_output);
958 : #endif // MEMORY_SANITIZER
959 379269 : sink_->PutRaw(reinterpret_cast<byte*>(start), bytes_to_output, "Code");
960 379269 : }
961 :
962 : } // namespace internal
963 183867 : } // namespace v8
|