Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/snapshot/serializer.h"
6 :
7 : #include "src/assembler-inl.h"
8 : #include "src/objects/map.h"
9 : #include "src/snapshot/builtin-serializer-allocator.h"
10 : #include "src/snapshot/natives.h"
11 :
12 : namespace v8 {
13 : namespace internal {
14 :
15 : template <class AllocatorT>
16 891 : Serializer<AllocatorT>::Serializer(Isolate* isolate)
17 : : isolate_(isolate),
18 : external_reference_encoder_(isolate),
19 : root_index_map_(isolate),
20 3564 : allocator_(this) {
21 : #ifdef OBJECT_PRINT
22 : if (FLAG_serialization_statistics) {
23 : instance_type_count_ = NewArray<int>(kInstanceTypes);
24 : instance_type_size_ = NewArray<size_t>(kInstanceTypes);
25 : for (int i = 0; i < kInstanceTypes; i++) {
26 : instance_type_count_[i] = 0;
27 : instance_type_size_[i] = 0;
28 : }
29 : } else {
30 : instance_type_count_ = nullptr;
31 : instance_type_size_ = nullptr;
32 : }
33 : #endif // OBJECT_PRINT
34 891 : }
35 :
36 : template <class AllocatorT>
37 891 : Serializer<AllocatorT>::~Serializer() {
38 891 : if (code_address_map_ != nullptr) delete code_address_map_;
39 : #ifdef OBJECT_PRINT
40 : if (instance_type_count_ != nullptr) {
41 : DeleteArray(instance_type_count_);
42 : DeleteArray(instance_type_size_);
43 : }
44 : #endif // OBJECT_PRINT
45 3403 : }
46 :
47 : #ifdef OBJECT_PRINT
48 : template <class AllocatorT>
49 : void Serializer<AllocatorT>::CountInstanceType(Map* map, int size) {
50 : int instance_type = map->instance_type();
51 : instance_type_count_[instance_type]++;
52 : instance_type_size_[instance_type] += size;
53 : }
54 : #endif // OBJECT_PRINT
55 :
56 : template <class AllocatorT>
57 891 : void Serializer<AllocatorT>::OutputStatistics(const char* name) {
58 1782 : if (!FLAG_serialization_statistics) return;
59 :
60 0 : PrintF("%s:\n", name);
61 0 : allocator()->OutputStatistics();
62 :
63 : #ifdef OBJECT_PRINT
64 : PrintF(" Instance types (count and bytes):\n");
65 : #define PRINT_INSTANCE_TYPE(Name) \
66 : if (instance_type_count_[Name]) { \
67 : PrintF("%10d %10" PRIuS " %s\n", instance_type_count_[Name], \
68 : instance_type_size_[Name], #Name); \
69 : }
70 : INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE)
71 : #undef PRINT_INSTANCE_TYPE
72 : PrintF("\n");
73 : #endif // OBJECT_PRINT
74 : }
75 :
76 : template <class AllocatorT>
77 730 : void Serializer<AllocatorT>::SerializeDeferredObjects() {
78 5088 : while (!deferred_objects_.empty()) {
79 3628 : HeapObject* obj = deferred_objects_.back();
80 : deferred_objects_.pop_back();
81 3628 : ObjectSerializer obj_serializer(this, obj, &sink_, kPlain, kStartOfObject);
82 3628 : obj_serializer.SerializeDeferred();
83 : }
84 : sink_.Put(kSynchronize, "Finished with deferred objects");
85 730 : }
86 :
87 : template <class AllocatorT>
88 645563 : bool Serializer<AllocatorT>::MustBeDeferred(HeapObject* object) {
89 645563 : return false;
90 : }
91 :
92 : template <class AllocatorT>
93 489598 : void Serializer<AllocatorT>::VisitRootPointers(Root root, Object** start,
94 : Object** end) {
95 : // Builtins are serialized in a separate pass by the BuiltinSerializer.
96 979196 : if (root == Root::kBuiltins) return;
97 :
98 492387 : for (Object** current = start; current < end; current++) {
99 984774 : if ((*current)->IsSmi()) {
100 2576 : PutSmi(Smi::cast(*current));
101 : } else {
102 489811 : SerializeObject(HeapObject::cast(*current), kPlain, kStartOfObject, 0);
103 : }
104 : }
105 : }
106 :
107 : #ifdef DEBUG
108 : template <class AllocatorT>
109 : void Serializer<AllocatorT>::PrintStack() {
110 : for (const auto o : stack_) {
111 : o->Print();
112 : PrintF("\n");
113 : }
114 : }
115 : #endif // DEBUG
116 :
117 : template <class AllocatorT>
118 6740124 : bool Serializer<AllocatorT>::SerializeHotObject(HeapObject* obj,
119 : HowToCode how_to_code,
120 : WhereToPoint where_to_point,
121 : int skip) {
122 6740124 : if (how_to_code != kPlain || where_to_point != kStartOfObject) return false;
123 : // Encode a reference to a hot object by its index in the working set.
124 : int index = hot_objects_.Find(obj);
125 6617144 : if (index == HotObjectsList::kNotFound) return false;
126 : DCHECK(index >= 0 && index < kNumberOfHotObjects);
127 1142248 : if (FLAG_trace_serializer) {
128 0 : PrintF(" Encoding hot object %d:", index);
129 0 : obj->ShortPrint();
130 0 : PrintF("\n");
131 : }
132 1142248 : if (skip != 0) {
133 0 : sink_.Put(kHotObjectWithSkip + index, "HotObjectWithSkip");
134 0 : sink_.PutInt(skip, "HotObjectSkipDistance");
135 : } else {
136 1142248 : sink_.Put(kHotObject + index, "HotObject");
137 : }
138 : return true;
139 : }
140 :
141 : template <class AllocatorT>
142 2427089 : bool Serializer<AllocatorT>::SerializeBackReference(HeapObject* obj,
143 : HowToCode how_to_code,
144 : WhereToPoint where_to_point,
145 : int skip) {
146 2427089 : SerializerReference reference = reference_map_.Lookup(obj);
147 2427089 : if (!reference.is_valid()) return false;
148 : // Encode the location of an already deserialized object in order to write
149 : // its location into a later object. We can encode the location as an
150 : // offset fromthe start of the deserialized objects or as an offset
151 : // backwards from thecurrent allocation pointer.
152 726185 : if (reference.is_attached_reference()) {
153 1009 : FlushSkip(skip);
154 1009 : if (FLAG_trace_serializer) {
155 0 : PrintF(" Encoding attached reference %d\n",
156 : reference.attached_reference_index());
157 : }
158 1009 : PutAttachedReference(reference, how_to_code, where_to_point);
159 : } else {
160 : DCHECK(reference.is_back_reference());
161 725176 : if (FLAG_trace_serializer) {
162 0 : PrintF(" Encoding back reference to: ");
163 0 : obj->ShortPrint();
164 0 : PrintF("\n");
165 : }
166 :
167 725176 : PutAlignmentPrefix(obj);
168 : AllocationSpace space = reference.space();
169 725176 : if (skip == 0) {
170 601223 : sink_.Put(kBackref + how_to_code + where_to_point + space, "BackRef");
171 : } else {
172 : sink_.Put(kBackrefWithSkip + how_to_code + where_to_point + space,
173 123953 : "BackRefWithSkip");
174 123953 : sink_.PutInt(skip, "BackRefSkipDistance");
175 : }
176 725176 : PutBackReference(obj, reference);
177 : }
178 : return true;
179 : }
180 :
181 : template <class AllocatorT>
182 7770771 : bool Serializer<AllocatorT>::SerializeBuiltinReference(
183 : HeapObject* obj, HowToCode how_to_code, WhereToPoint where_to_point,
184 : int skip, BuiltinReferenceSerializationMode mode) {
185 7770771 : if (!obj->IsCode()) return false;
186 :
187 : Code* code = Code::cast(obj);
188 : int builtin_index = code->builtin_index();
189 1477454 : if (builtin_index < 0) return false;
190 :
191 : DCHECK((how_to_code == kPlain && where_to_point == kStartOfObject) ||
192 : (how_to_code == kFromCode));
193 : DCHECK_LT(builtin_index, Builtins::builtin_count);
194 : DCHECK_LE(0, builtin_index);
195 :
196 1395209 : if (mode == kCanonicalizeCompileLazy &&
197 : code->is_interpreter_trampoline_builtin()) {
198 : builtin_index = static_cast<int>(Builtins::kCompileLazy);
199 : }
200 :
201 864862 : if (FLAG_trace_serializer) {
202 0 : PrintF(" Encoding builtin reference: %s\n",
203 0 : isolate()->builtins()->name(builtin_index));
204 : }
205 :
206 864862 : FlushSkip(skip);
207 864862 : sink_.Put(kBuiltin + how_to_code + where_to_point, "Builtin");
208 864862 : sink_.PutInt(builtin_index, "builtin_index");
209 :
210 864862 : return true;
211 : }
212 :
213 : template <class AllocatorT>
214 3622231 : void Serializer<AllocatorT>::PutRoot(
215 : int root_index, HeapObject* object,
216 : SerializerDeserializer::HowToCode how_to_code,
217 : SerializerDeserializer::WhereToPoint where_to_point, int skip) {
218 3622231 : if (FLAG_trace_serializer) {
219 0 : PrintF(" Encoding root %d:", root_index);
220 0 : object->ShortPrint();
221 0 : PrintF("\n");
222 : }
223 :
224 : // Assert that the first 32 root array items are a conscious choice. They are
225 : // chosen so that the most common ones can be encoded more efficiently.
226 : STATIC_ASSERT(Heap::kEmptyDescriptorArrayRootIndex ==
227 : kNumberOfRootArrayConstants - 1);
228 :
229 6897793 : if (how_to_code == kPlain && where_to_point == kStartOfObject &&
230 : root_index < kNumberOfRootArrayConstants &&
231 : !isolate()->heap()->InNewSpace(object)) {
232 3275562 : if (skip == 0) {
233 3275401 : sink_.Put(kRootArrayConstants + root_index, "RootConstant");
234 : } else {
235 161 : sink_.Put(kRootArrayConstantsWithSkip + root_index, "RootConstant");
236 161 : sink_.PutInt(skip, "SkipInPutRoot");
237 : }
238 : } else {
239 346669 : FlushSkip(skip);
240 346669 : sink_.Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
241 346669 : sink_.PutInt(root_index, "root_index");
242 : hot_objects_.Add(object);
243 : }
244 3622231 : }
245 :
246 : template <class AllocatorT>
247 2737 : void Serializer<AllocatorT>::PutSmi(Smi* smi) {
248 : sink_.Put(kOnePointerRawData, "Smi");
249 : byte* bytes = reinterpret_cast<byte*>(&smi);
250 24633 : for (int i = 0; i < kPointerSize; i++) sink_.Put(bytes[i], "Byte");
251 2737 : }
252 :
253 : template <class AllocatorT>
254 729019 : void Serializer<AllocatorT>::PutBackReference(HeapObject* object,
255 : SerializerReference reference) {
256 : DCHECK(allocator()->BackReferenceIsAlreadyAllocated(reference));
257 729019 : sink_.PutInt(reference.back_reference(), "BackRefValue");
258 : hot_objects_.Add(object);
259 729019 : }
260 :
261 : template <class AllocatorT>
262 1102 : void Serializer<AllocatorT>::PutAttachedReference(SerializerReference reference,
263 : HowToCode how_to_code,
264 : WhereToPoint where_to_point) {
265 : DCHECK(reference.is_attached_reference());
266 : DCHECK((how_to_code == kPlain && where_to_point == kStartOfObject) ||
267 : (how_to_code == kFromCode && where_to_point == kStartOfObject) ||
268 : (how_to_code == kFromCode && where_to_point == kInnerPointer));
269 1102 : sink_.Put(kAttachedReference + how_to_code + where_to_point, "AttachedRef");
270 1102 : sink_.PutInt(reference.attached_reference_index(), "AttachedRefIndex");
271 1102 : }
272 :
273 : template <class AllocatorT>
274 2180464 : int Serializer<AllocatorT>::PutAlignmentPrefix(HeapObject* object) {
275 : AllocationAlignment alignment = object->RequiredAlignment();
276 : if (alignment != kWordAligned) {
277 : DCHECK(1 <= alignment && alignment <= 3);
278 : byte prefix = (kAlignmentPrefix - 1) + alignment;
279 : sink_.Put(prefix, "Alignment");
280 : return Heap::GetMaximumFillToAlign(alignment);
281 : }
282 : return 0;
283 : }
284 :
285 : template <class AllocatorT>
286 55 : void Serializer<AllocatorT>::PutNextChunk(int space) {
287 : sink_.Put(kNextChunk, "NextChunk");
288 55 : sink_.Put(space, "NextChunkSpace");
289 55 : }
290 :
291 : template <class AllocatorT>
292 891 : void Serializer<AllocatorT>::Pad() {
293 : // The non-branching GetInt will read up to 3 bytes too far, so we need
294 : // to pad the snapshot to make sure we don't read over the end.
295 3564 : for (unsigned i = 0; i < sizeof(int32_t) - 1; i++) {
296 : sink_.Put(kNop, "Padding");
297 : }
298 : // Pad up to pointer size for checksum.
299 4144 : while (!IsAligned(sink_.Position(), kPointerAlignment)) {
300 : sink_.Put(kNop, "Padding");
301 : }
302 891 : }
303 :
304 : template <class AllocatorT>
305 312 : void Serializer<AllocatorT>::InitializeCodeAddressMap() {
306 312 : isolate_->InitializeLoggingAndCounters();
307 312 : code_address_map_ = new CodeAddressMap(isolate_);
308 312 : }
309 :
310 : template <class AllocatorT>
311 1206 : Code* Serializer<AllocatorT>::CopyCode(Code* code) {
312 : code_buffer_.clear(); // Clear buffer without deleting backing store.
313 : int size = code->CodeSize();
314 : code_buffer_.insert(code_buffer_.end(), code->address(),
315 1206 : code->address() + size);
316 1206 : return Code::cast(HeapObject::FromAddress(&code_buffer_.front()));
317 : }
318 :
319 : template <class AllocatorT>
320 1511447 : void Serializer<AllocatorT>::ObjectSerializer::SerializePrologue(
321 : AllocationSpace space, int size, Map* map) {
322 1511447 : if (serializer_->code_address_map_) {
323 : const char* code_name =
324 1348056 : serializer_->code_address_map_->Lookup(object_->address());
325 1348056 : LOG(serializer_->isolate_,
326 : CodeNameEvent(object_->address(), sink_->Position(), code_name));
327 : }
328 :
329 : SerializerReference back_reference;
330 1511447 : if (space == LO_SPACE) {
331 : sink_->Put(kNewObject + reference_representation_ + space,
332 40 : "NewLargeObject");
333 40 : sink_->PutInt(size >> kObjectAlignmentBits, "ObjectSizeInWords");
334 80 : if (object_->IsCode()) {
335 0 : sink_->Put(EXECUTABLE, "executable large object");
336 : } else {
337 40 : sink_->Put(NOT_EXECUTABLE, "not executable large object");
338 : }
339 40 : back_reference = serializer_->allocator()->AllocateLargeObject(size);
340 1511407 : } else if (space == MAP_SPACE) {
341 : DCHECK_EQ(Map::kSize, size);
342 59747 : back_reference = serializer_->allocator()->AllocateMap();
343 59747 : sink_->Put(kNewObject + reference_representation_ + space, "NewMap");
344 : // This is redundant, but we include it anyways.
345 59747 : sink_->PutInt(size >> kObjectAlignmentBits, "ObjectSizeInWords");
346 : } else {
347 1451660 : int fill = serializer_->PutAlignmentPrefix(object_);
348 1451660 : back_reference = serializer_->allocator()->Allocate(space, size + fill);
349 1451660 : sink_->Put(kNewObject + reference_representation_ + space, "NewObject");
350 1451660 : sink_->PutInt(size >> kObjectAlignmentBits, "ObjectSizeInWords");
351 : }
352 :
353 : #ifdef OBJECT_PRINT
354 : if (FLAG_serialization_statistics) {
355 : serializer_->CountInstanceType(map, size);
356 : }
357 : #endif // OBJECT_PRINT
358 :
359 : // Mark this object as already serialized.
360 1511447 : serializer_->reference_map()->Add(object_, back_reference);
361 :
362 : // Serialize the map (first word of the object).
363 1511447 : serializer_->SerializeObject(map, kPlain, kStartOfObject, 0);
364 1511447 : }
365 :
366 : template <class AllocatorT>
367 65 : int32_t Serializer<AllocatorT>::ObjectSerializer::SerializeBackingStore(
368 : void* backing_store, int32_t byte_length) {
369 : SerializerReference reference =
370 65 : serializer_->reference_map()->Lookup(backing_store);
371 :
372 : // Serialize the off-heap backing store.
373 65 : if (!reference.is_valid()) {
374 30 : sink_->Put(kOffHeapBackingStore, "Off-heap backing store");
375 30 : sink_->PutInt(byte_length, "length");
376 30 : sink_->PutRaw(static_cast<byte*>(backing_store), byte_length,
377 30 : "BackingStore");
378 30 : reference = serializer_->allocator()->AllocateOffHeapBackingStore();
379 : // Mark this backing store as already serialized.
380 30 : serializer_->reference_map()->Add(backing_store, reference);
381 : }
382 :
383 65 : return static_cast<int32_t>(reference.off_heap_backing_store_index());
384 : }
385 :
386 : // When a JSArrayBuffer is neutered, the FixedTypedArray that points to the
387 : // same backing store does not know anything about it. This fixup step finds
388 : // neutered TypedArrays and clears the values in the FixedTypedArray so that
389 : // we don't try to serialize the now invalid backing store.
390 : template <class AllocatorT>
391 50 : void Serializer<AllocatorT>::ObjectSerializer::FixupIfNeutered() {
392 50 : JSTypedArray* array = JSTypedArray::cast(object_);
393 100 : if (!array->WasNeutered()) return;
394 :
395 : FixedTypedArrayBase* fta = FixedTypedArrayBase::cast(array->elements());
396 : DCHECK_NULL(fta->base_pointer());
397 : fta->set_external_pointer(Smi::kZero);
398 : fta->set_length(0);
399 : }
400 :
401 : template <class AllocatorT>
402 45 : void Serializer<AllocatorT>::ObjectSerializer::SerializeJSArrayBuffer() {
403 45 : JSArrayBuffer* buffer = JSArrayBuffer::cast(object_);
404 : void* backing_store = buffer->backing_store();
405 : // We cannot store byte_length larger than Smi range in the snapshot.
406 : // Attempt to make sure that NumberToInt32 produces something sensible.
407 45 : CHECK(buffer->byte_length()->IsSmi());
408 45 : int32_t byte_length = NumberToInt32(buffer->byte_length());
409 :
410 : // The embedder-allocated backing store only exists for the off-heap case.
411 45 : if (backing_store != nullptr) {
412 30 : int32_t ref = SerializeBackingStore(backing_store, byte_length);
413 : buffer->set_backing_store(Smi::FromInt(ref));
414 : }
415 45 : SerializeObject();
416 45 : }
417 :
418 : template <class AllocatorT>
419 1499 : void Serializer<AllocatorT>::ObjectSerializer::SerializeFixedTypedArray() {
420 1499 : FixedTypedArrayBase* fta = FixedTypedArrayBase::cast(object_);
421 : void* backing_store = fta->DataPtr();
422 : // We cannot store byte_length larger than Smi range in the snapshot.
423 2998 : CHECK_LT(fta->ByteLength(), Smi::kMaxValue);
424 1499 : int32_t byte_length = static_cast<int32_t>(fta->ByteLength());
425 :
426 : // The heap contains empty FixedTypedArrays for each type, with a byte_length
427 : // of 0 (e.g. empty_fixed_uint8_array). These look like they are are 'on-heap'
428 : // but have no data to copy, so we skip the backing store here.
429 :
430 : // The embedder-allocated backing store only exists for the off-heap case.
431 1544 : if (byte_length > 0 && fta->base_pointer() == nullptr) {
432 35 : int32_t ref = SerializeBackingStore(backing_store, byte_length);
433 : fta->set_external_pointer(Smi::FromInt(ref));
434 : }
435 1499 : SerializeObject();
436 1499 : }
437 :
438 : template <class AllocatorT>
439 1335 : void Serializer<AllocatorT>::ObjectSerializer::SerializeExternalString() {
440 2670 : Heap* heap = serializer_->isolate()->heap();
441 2670 : if (object_->map() != heap->native_source_string_map()) {
442 : // Usually we cannot recreate resources for external strings. To work
443 : // around this, external strings are serialized to look like ordinary
444 : // sequential strings.
445 : // The exception are native source code strings, since we can recreate
446 : // their resources.
447 25 : SerializeExternalStringAsSequentialString();
448 : } else {
449 1310 : ExternalOneByteString* string = ExternalOneByteString::cast(object_);
450 : DCHECK(string->is_short());
451 1310 : const NativesExternalStringResource* resource =
452 : reinterpret_cast<const NativesExternalStringResource*>(
453 : string->resource());
454 : // Replace the resource field with the type and index of the native source.
455 : string->set_resource(resource->EncodeForSerialization());
456 1310 : SerializeObject();
457 : // Restore the resource field.
458 : string->set_resource(resource);
459 : }
460 1335 : }
461 :
462 : template <class AllocatorT>
463 25 : void Serializer<
464 : AllocatorT>::ObjectSerializer::SerializeExternalStringAsSequentialString() {
465 : // Instead of serializing this as an external string, we serialize
466 : // an imaginary sequential string with the same content.
467 25 : Isolate* isolate = serializer_->isolate();
468 : DCHECK(object_->IsExternalString());
469 : DCHECK(object_->map() != isolate->heap()->native_source_string_map());
470 25 : ExternalString* string = ExternalString::cast(object_);
471 : int length = string->length();
472 : Map* map;
473 : int content_size;
474 : int allocation_size;
475 : const byte* resource;
476 : // Find the map and size for the imaginary sequential string.
477 : bool internalized = object_->IsInternalizedString();
478 50 : if (object_->IsExternalOneByteString()) {
479 20 : map = internalized ? isolate->heap()->one_byte_internalized_string_map()
480 10 : : isolate->heap()->one_byte_string_map();
481 : allocation_size = SeqOneByteString::SizeFor(length);
482 : content_size = length * kCharSize;
483 20 : resource = reinterpret_cast<const byte*>(
484 20 : ExternalOneByteString::cast(string)->resource()->data());
485 : } else {
486 5 : map = internalized ? isolate->heap()->internalized_string_map()
487 0 : : isolate->heap()->string_map();
488 : allocation_size = SeqTwoByteString::SizeFor(length);
489 5 : content_size = length * kShortSize;
490 5 : resource = reinterpret_cast<const byte*>(
491 5 : ExternalTwoByteString::cast(string)->resource()->data());
492 : }
493 :
494 : AllocationSpace space =
495 25 : (allocation_size > kMaxRegularHeapObjectSize) ? LO_SPACE : OLD_SPACE;
496 25 : SerializePrologue(space, allocation_size, map);
497 :
498 : // Output the rest of the imaginary string.
499 25 : int bytes_to_output = allocation_size - HeapObject::kHeaderSize;
500 :
501 : // Output raw data header. Do not bother with common raw length cases here.
502 25 : sink_->Put(kVariableRawData, "RawDataForString");
503 25 : sink_->PutInt(bytes_to_output, "length");
504 :
505 : // Serialize string header (except for map).
506 25 : Address string_start = string->address();
507 425 : for (int i = HeapObject::kHeaderSize; i < SeqString::kHeaderSize; i++) {
508 400 : sink_->PutSection(string_start[i], "StringHeader");
509 : }
510 :
511 : // Serialize string content.
512 25 : sink_->PutRaw(resource, content_size, "StringContent");
513 :
514 : // Since the allocation size is rounded up to object alignment, there
515 : // maybe left-over bytes that need to be padded.
516 25 : int padding_size = allocation_size - SeqString::kHeaderSize - content_size;
517 : DCHECK(0 <= padding_size && padding_size < kObjectAlignment);
518 70 : for (int i = 0; i < padding_size; i++) sink_->PutSection(0, "StringPadding");
519 25 : }
520 :
521 : // Clear and later restore the next link in the weak cell or allocation site.
522 : // TODO(all): replace this with proper iteration of weak slots in serializer.
523 : class UnlinkWeakNextScope {
524 : public:
525 1511422 : explicit UnlinkWeakNextScope(HeapObject* object) : object_(nullptr) {
526 1511422 : if (object->IsAllocationSite()) {
527 0 : object_ = object;
528 0 : next_ = AllocationSite::cast(object)->weak_next();
529 : AllocationSite::cast(object)->set_weak_next(
530 0 : object->GetHeap()->undefined_value());
531 : }
532 1511422 : }
533 :
534 : ~UnlinkWeakNextScope() {
535 1511422 : if (object_ != nullptr) {
536 : AllocationSite::cast(object_)->set_weak_next(next_,
537 0 : UPDATE_WEAK_WRITE_BARRIER);
538 : }
539 : }
540 :
541 : private:
542 : HeapObject* object_;
543 : Object* next_;
544 : DisallowHeapAllocation no_gc_;
545 : };
546 :
547 : template <class AllocatorT>
548 1511447 : void Serializer<AllocatorT>::ObjectSerializer::Serialize() {
549 1511447 : if (FLAG_trace_serializer) {
550 0 : PrintF(" Encoding heap object: ");
551 0 : object_->ShortPrint();
552 0 : PrintF("\n");
553 : }
554 :
555 3022894 : if (object_->IsExternalString()) {
556 1335 : SerializeExternalString();
557 1335 : return;
558 3020224 : } else if (object_->IsSeqOneByteString()) {
559 : // Clear padding bytes at the end. Done here to avoid having to do this
560 : // at allocation sites in generated code.
561 177406 : SeqOneByteString::cast(object_)->clear_padding();
562 2665412 : } else if (object_->IsSeqTwoByteString()) {
563 0 : SeqTwoByteString::cast(object_)->clear_padding();
564 : }
565 3020224 : if (object_->IsJSTypedArray()) {
566 50 : FixupIfNeutered();
567 : }
568 3020224 : if (object_->IsJSArrayBuffer()) {
569 45 : SerializeJSArrayBuffer();
570 45 : return;
571 : }
572 3020134 : if (object_->IsFixedTypedArrayBase()) {
573 1499 : SerializeFixedTypedArray();
574 1499 : return;
575 : }
576 :
577 : // We don't expect fillers.
578 : DCHECK(!object_->IsFiller());
579 :
580 3017136 : if (object_->IsScript()) {
581 : // Clear cached line ends.
582 4170 : Object* undefined = serializer_->isolate()->heap()->undefined_value();
583 2085 : Script::cast(object_)->set_line_ends(undefined);
584 : }
585 :
586 1508568 : SerializeObject();
587 : }
588 :
589 : template <class AllocatorT>
590 1511422 : void Serializer<AllocatorT>::ObjectSerializer::SerializeObject() {
591 1511422 : int size = object_->Size();
592 1511422 : Map* map = object_->map();
593 : AllocationSpace space =
594 3022844 : MemoryChunk::FromAddress(object_->address())->owner()->identity();
595 1511422 : SerializePrologue(space, size, map);
596 :
597 : // Serialize the rest of the object.
598 1511422 : CHECK_EQ(0, bytes_processed_so_far_);
599 1511422 : bytes_processed_so_far_ = kPointerSize;
600 :
601 1511422 : RecursionScope recursion(serializer_);
602 : // Objects that are immediately post processed during deserialization
603 : // cannot be deferred, since post processing requires the object content.
604 1511422 : if ((recursion.ExceedsMaximum() && CanBeDeferred(object_)) ||
605 : serializer_->MustBeDeferred(object_)) {
606 3628 : serializer_->QueueDeferredObject(object_);
607 3628 : sink_->Put(kDeferred, "Deferring object content");
608 1511422 : return;
609 : }
610 :
611 1507794 : SerializeContent(map, size);
612 : }
613 :
614 : template <class AllocatorT>
615 3628 : void Serializer<AllocatorT>::ObjectSerializer::SerializeDeferred() {
616 3628 : if (FLAG_trace_serializer) {
617 0 : PrintF(" Encoding deferred heap object: ");
618 0 : object_->ShortPrint();
619 0 : PrintF("\n");
620 : }
621 :
622 3628 : int size = object_->Size();
623 3628 : Map* map = object_->map();
624 : SerializerReference back_reference =
625 3628 : serializer_->reference_map()->Lookup(object_);
626 : DCHECK(back_reference.is_back_reference());
627 :
628 : // Serialize the rest of the object.
629 3628 : CHECK_EQ(0, bytes_processed_so_far_);
630 3628 : bytes_processed_so_far_ = kPointerSize;
631 :
632 3628 : serializer_->PutAlignmentPrefix(object_);
633 3628 : sink_->Put(kNewObject + back_reference.space(), "deferred object");
634 3628 : serializer_->PutBackReference(object_, back_reference);
635 3628 : sink_->PutInt(size >> kPointerSizeLog2, "deferred object size");
636 :
637 3628 : SerializeContent(map, size);
638 3628 : }
639 :
640 : template <class AllocatorT>
641 1511422 : void Serializer<AllocatorT>::ObjectSerializer::SerializeContent(Map* map,
642 : int size) {
643 1511422 : UnlinkWeakNextScope unlink_weak_next(object_);
644 3022844 : if (object_->IsCode()) {
645 : // For code objects, output raw bytes first.
646 194478 : OutputCode(size);
647 : // Then iterate references via reloc info.
648 388956 : object_->IterateBody(map->instance_type(), size, this);
649 : // Finally skip to the end.
650 194478 : serializer_->FlushSkip(SkipTo(object_->address() + size));
651 : } else {
652 : // For other objects, iterate references first.
653 2633888 : object_->IterateBody(map->instance_type(), size, this);
654 : // Then output data payload, if any.
655 1316944 : OutputRawData(object_->address() + size);
656 : }
657 1511422 : }
658 :
659 : template <class AllocatorT>
660 1482433 : void Serializer<AllocatorT>::ObjectSerializer::VisitPointers(HeapObject* host,
661 : Object** start,
662 : Object** end) {
663 : Object** current = start;
664 5190491 : while (current < end) {
665 6919475 : while (current < end && (*current)->IsSmi()) current++;
666 2225625 : if (current < end) OutputRawData(reinterpret_cast<Address>(current));
667 :
668 15725408 : while (current < end && !(*current)->IsSmi()) {
669 6297769 : HeapObject* current_contents = HeapObject::cast(*current);
670 6297769 : int root_index = serializer_->root_index_map()->Lookup(current_contents);
671 : // Repeats are not subject to the write barrier so we can only use
672 : // immortal immovable root members. They are never in new space.
673 12450591 : if (current != start && root_index != RootIndexMap::kInvalidRootIndex &&
674 3095492 : Heap::RootIsImmortalImmovable(root_index) &&
675 3057330 : current_contents == current[-1]) {
676 : DCHECK(!serializer_->isolate()->heap()->InNewSpace(current_contents));
677 : int repeat_count = 1;
678 4829898 : while (¤t[repeat_count] < end - 1 &&
679 2346233 : current[repeat_count] == current_contents) {
680 1759554 : repeat_count++;
681 : }
682 : current += repeat_count;
683 724111 : bytes_processed_so_far_ += repeat_count * kPointerSize;
684 724111 : if (repeat_count > kNumberOfFixedRepeat) {
685 5186 : sink_->Put(kVariableRepeat, "VariableRepeat");
686 5186 : sink_->PutInt(repeat_count, "repeat count");
687 : } else {
688 718925 : sink_->Put(kFixedRepeatStart + repeat_count, "FixedRepeat");
689 : }
690 : } else {
691 5573658 : serializer_->SerializeObject(current_contents, kPlain, kStartOfObject,
692 : 0);
693 5573658 : bytes_processed_so_far_ += kPointerSize;
694 5573658 : current++;
695 : }
696 : }
697 : }
698 1482433 : }
699 :
700 : template <class AllocatorT>
701 44437 : void Serializer<AllocatorT>::ObjectSerializer::VisitEmbeddedPointer(
702 88874 : Code* host, RelocInfo* rinfo) {
703 44437 : int skip = SkipTo(rinfo->target_address_address());
704 44437 : HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
705 : Object* object = rinfo->target_object();
706 44437 : serializer_->SerializeObject(HeapObject::cast(object), how_to_code,
707 : kStartOfObject, skip);
708 44437 : bytes_processed_so_far_ += rinfo->target_address_size();
709 44437 : }
710 :
711 : template <class AllocatorT>
712 27968 : void Serializer<AllocatorT>::ObjectSerializer::VisitExternalReference(
713 : Foreign* host, Address* p) {
714 27968 : int skip = SkipTo(reinterpret_cast<Address>(p));
715 27968 : Address target = *p;
716 27968 : auto encoded_reference = serializer_->EncodeExternalReference(target);
717 27968 : if (encoded_reference.is_from_api()) {
718 60 : sink_->Put(kApiReference, "ApiRef");
719 : } else {
720 27908 : sink_->Put(kExternalReference + kPlain + kStartOfObject, "ExternalRef");
721 : }
722 27968 : sink_->PutInt(skip, "SkipB4ExternalRef");
723 27968 : sink_->PutInt(encoded_reference.index(), "reference index");
724 27968 : bytes_processed_so_far_ += kPointerSize;
725 27968 : }
726 :
727 : template <class AllocatorT>
728 817925 : void Serializer<AllocatorT>::ObjectSerializer::VisitExternalReference(
729 1635850 : Code* host, RelocInfo* rinfo) {
730 817925 : int skip = SkipTo(rinfo->target_address_address());
731 : Address target = rinfo->target_external_reference();
732 817925 : auto encoded_reference = serializer_->EncodeExternalReference(target);
733 817925 : if (encoded_reference.is_from_api()) {
734 : DCHECK(!rinfo->IsCodedSpecially());
735 0 : sink_->Put(kApiReference, "ApiRef");
736 : } else {
737 817925 : HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
738 : sink_->Put(kExternalReference + how_to_code + kStartOfObject,
739 817925 : "ExternalRef");
740 : }
741 817925 : sink_->PutInt(skip, "SkipB4ExternalRef");
742 : DCHECK_NOT_NULL(target); // Code does not reference null.
743 817925 : sink_->PutInt(encoded_reference.index(), "reference index");
744 817925 : bytes_processed_so_far_ += rinfo->target_address_size();
745 817925 : }
746 :
747 : template <class AllocatorT>
748 172753 : void Serializer<AllocatorT>::ObjectSerializer::VisitInternalReference(
749 345506 : Code* host, RelocInfo* rinfo) {
750 : // We do not use skip from last patched pc to find the pc to patch, since
751 : // target_address_address may not return addresses in ascending order when
752 : // used for internal references. External references may be stored at the
753 : // end of the code in the constant pool, whereas internal references are
754 : // inline. That would cause the skip to be negative. Instead, we store the
755 : // offset from code entry.
756 172753 : Address entry = Code::cast(object_)->entry();
757 172753 : intptr_t pc_offset = rinfo->target_internal_reference_address() - entry;
758 172753 : intptr_t target_offset = rinfo->target_internal_reference() - entry;
759 : DCHECK(0 <= pc_offset &&
760 : pc_offset <= Code::cast(object_)->instruction_size());
761 : DCHECK(0 <= target_offset &&
762 : target_offset <= Code::cast(object_)->instruction_size());
763 : sink_->Put(rinfo->rmode() == RelocInfo::INTERNAL_REFERENCE
764 : ? kInternalReference
765 : : kInternalReferenceEncoded,
766 172753 : "InternalRef");
767 172753 : sink_->PutInt(static_cast<uintptr_t>(pc_offset), "internal ref address");
768 172753 : sink_->PutInt(static_cast<uintptr_t>(target_offset), "internal ref value");
769 172753 : }
770 :
771 : template <class AllocatorT>
772 0 : void Serializer<AllocatorT>::ObjectSerializer::VisitRuntimeEntry(
773 0 : Code* host, RelocInfo* rinfo) {
774 0 : int skip = SkipTo(rinfo->target_address_address());
775 0 : HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
776 : Address target = rinfo->target_address();
777 0 : auto encoded_reference = serializer_->EncodeExternalReference(target);
778 : DCHECK(!encoded_reference.is_from_api());
779 0 : sink_->Put(kExternalReference + how_to_code + kStartOfObject, "ExternalRef");
780 0 : sink_->PutInt(skip, "SkipB4ExternalRef");
781 0 : sink_->PutInt(encoded_reference.index(), "reference index");
782 0 : bytes_processed_so_far_ += rinfo->target_address_size();
783 0 : }
784 :
785 : template <class AllocatorT>
786 856818 : void Serializer<AllocatorT>::ObjectSerializer::VisitCodeTarget(
787 1713636 : Code* host, RelocInfo* rinfo) {
788 856818 : int skip = SkipTo(rinfo->target_address_address());
789 856818 : Code* object = Code::GetCodeFromTargetAddress(rinfo->target_address());
790 856818 : serializer_->SerializeObject(object, kFromCode, kInnerPointer, skip);
791 856818 : bytes_processed_so_far_ += rinfo->target_address_size();
792 856818 : }
793 :
794 : template <class AllocatorT>
795 3163345 : void Serializer<AllocatorT>::ObjectSerializer::OutputRawData(Address up_to) {
796 3163345 : Address object_start = object_->address();
797 3163345 : int base = bytes_processed_so_far_;
798 3163345 : int up_to_offset = static_cast<int>(up_to - object_start);
799 3163345 : int to_skip = up_to_offset - bytes_processed_so_far_;
800 : int bytes_to_output = to_skip;
801 3163345 : bytes_processed_so_far_ += to_skip;
802 : DCHECK_GE(to_skip, 0);
803 3163345 : if (bytes_to_output != 0) {
804 : DCHECK(to_skip == bytes_to_output);
805 1704323 : if (IsAligned(bytes_to_output, kPointerAlignment) &&
806 : bytes_to_output <= kNumberOfFixedRawData * kPointerSize) {
807 1700736 : int size_in_words = bytes_to_output >> kPointerSizeLog2;
808 1700736 : sink_->PutSection(kFixedRawDataStart + size_in_words, "FixedRawData");
809 : } else {
810 3587 : sink_->Put(kVariableRawData, "VariableRawData");
811 3587 : sink_->PutInt(bytes_to_output, "length");
812 : }
813 : #ifdef MEMORY_SANITIZER
814 : // Check that we do not serialize uninitialized memory.
815 : __msan_check_mem_is_initialized(object_start + base, bytes_to_output);
816 : #endif // MEMORY_SANITIZER
817 1704323 : sink_->PutRaw(object_start + base, bytes_to_output, "Bytes");
818 : }
819 3163345 : }
820 :
821 : template <class AllocatorT>
822 1941626 : int Serializer<AllocatorT>::ObjectSerializer::SkipTo(Address to) {
823 1941626 : Address object_start = object_->address();
824 1941626 : int up_to_offset = static_cast<int>(to - object_start);
825 1941626 : int to_skip = up_to_offset - bytes_processed_so_far_;
826 1941626 : bytes_processed_so_far_ += to_skip;
827 : // This assert will fail if the reloc info gives us the target_address_address
828 : // locations in a non-ascending order. Luckily that doesn't happen.
829 : DCHECK_GE(to_skip, 0);
830 1941626 : return to_skip;
831 : }
832 :
833 : template <class AllocatorT>
834 194478 : void Serializer<AllocatorT>::ObjectSerializer::OutputCode(int size) {
835 : DCHECK_EQ(kPointerSize, bytes_processed_so_far_);
836 194478 : Code* code = Code::cast(object_);
837 194478 : if (FLAG_predictable) {
838 : // To make snapshots reproducible, we make a copy of the code object
839 : // and wipe all pointers in the copy, which we then serialize.
840 1206 : code = serializer_->CopyCode(code);
841 : int mode_mask = RelocInfo::kCodeTargetMask |
842 : RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
843 : RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
844 : RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
845 : RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
846 : RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED);
847 12954 : for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
848 : RelocInfo* rinfo = it.rinfo();
849 11748 : rinfo->WipeOut(serializer_->isolate());
850 : }
851 : // We need to wipe out the header fields *after* wiping out the
852 : // relocations, because some of these fields are needed for the latter.
853 1206 : code->WipeOutHeader();
854 : }
855 :
856 194478 : Address start = code->address() + Code::kDataStart;
857 194478 : int bytes_to_output = size - Code::kDataStart;
858 :
859 194478 : sink_->Put(kVariableRawCode, "VariableRawCode");
860 194478 : sink_->PutInt(bytes_to_output, "length");
861 :
862 : #ifdef MEMORY_SANITIZER
863 : // Check that we do not serialize uninitialized memory.
864 : __msan_check_mem_is_initialized(start, bytes_to_output);
865 : #endif // MEMORY_SANITIZER
866 194478 : sink_->PutRaw(start, bytes_to_output, "Code");
867 194478 : }
868 :
869 : // Explicit instantiation.
870 : template class Serializer<BuiltinSerializerAllocator>;
871 : template class Serializer<DefaultSerializerAllocator>;
872 :
873 : } // namespace internal
874 : } // namespace v8
|