Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_OBJECTS_CODE_H_
6 : #define V8_OBJECTS_CODE_H_
7 :
8 : #include "src/contexts.h"
9 : #include "src/handler-table.h"
10 : #include "src/objects.h"
11 : #include "src/objects/fixed-array.h"
12 : #include "src/objects/heap-object.h"
13 : #include "src/objects/struct.h"
14 :
15 : // Has to be the last include (doesn't have include guards):
16 : #include "src/objects/object-macros.h"
17 :
18 : namespace v8 {
19 : namespace internal {
20 :
21 : class ByteArray;
22 : class BytecodeArray;
23 : class CodeDataContainer;
24 : class CodeDesc;
25 : class MaybeObject;
26 :
27 : namespace interpreter {
28 : class Register;
29 : }
30 :
31 : // Code describes objects with on-the-fly generated machine code.
32 : class Code : public HeapObject {
33 : public:
34 : NEVER_READ_ONLY_SPACE
35 : // Opaque data type for encapsulating code flags like kind, inline
36 : // cache state, and arguments count.
37 : typedef uint32_t Flags;
38 :
39 : #define CODE_KIND_LIST(V) \
40 : V(OPTIMIZED_FUNCTION) \
41 : V(BYTECODE_HANDLER) \
42 : V(STUB) \
43 : V(BUILTIN) \
44 : V(REGEXP) \
45 : V(WASM_FUNCTION) \
46 : V(WASM_TO_JS_FUNCTION) \
47 : V(JS_TO_WASM_FUNCTION) \
48 : V(WASM_INTERPRETER_ENTRY) \
49 : V(C_WASM_ENTRY)
50 :
51 : enum Kind {
52 : #define DEFINE_CODE_KIND_ENUM(name) name,
53 : CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
54 : #undef DEFINE_CODE_KIND_ENUM
55 : NUMBER_OF_KINDS
56 : };
57 :
58 : static const char* Kind2String(Kind kind);
59 :
60 : #ifdef ENABLE_DISASSEMBLER
61 : const char* GetName(Isolate* isolate) const;
62 : void Disassemble(const char* name, std::ostream& os,
63 : Address current_pc = kNullAddress);
64 : #endif
65 :
66 : // [instruction_size]: Size of the native instructions, including embedded
67 : // data such as the safepoints table.
68 : inline int raw_instruction_size() const;
69 : inline void set_raw_instruction_size(int value);
70 :
71 : // Returns the size of the native instructions, including embedded
72 : // data such as the safepoints table. For off-heap code objects
73 : // this may differ from instruction_size in that this will return the size of
74 : // the off-heap instruction stream rather than the on-heap trampoline located
75 : // at instruction_start.
76 : inline int InstructionSize() const;
77 : int OffHeapInstructionSize() const;
78 :
79 : // [relocation_info]: Code relocation information
80 : DECL_ACCESSORS(relocation_info, ByteArray)
81 :
82 : // This function should be called only from GC.
83 : void ClearEmbeddedObjects(Heap* heap);
84 :
85 : // [deoptimization_data]: Array containing data for deopt.
86 : DECL_ACCESSORS(deoptimization_data, FixedArray)
87 :
88 : // [source_position_table]: ByteArray for the source positions table or
89 : // SourcePositionTableWithFrameCache.
90 : DECL_ACCESSORS(source_position_table, Object)
91 : inline ByteArray SourcePositionTable() const;
92 :
93 : // [code_data_container]: A container indirection for all mutable fields.
94 : DECL_ACCESSORS(code_data_container, CodeDataContainer)
95 :
96 : // [next_code_link]: Link for lists of optimized or deoptimized code.
97 : // Note that this field is stored in the {CodeDataContainer} to be mutable.
98 : inline Object next_code_link() const;
99 : inline void set_next_code_link(Object value);
100 :
101 : // Unchecked accessors to be used during GC.
102 : inline ByteArray unchecked_relocation_info() const;
103 :
104 : inline int relocation_size() const;
105 :
106 : // [kind]: Access to specific code kind.
107 : inline Kind kind() const;
108 :
109 : inline bool is_optimized_code() const;
110 : inline bool is_wasm_code() const;
111 :
112 : // Testers for interpreter builtins.
113 : inline bool is_interpreter_trampoline_builtin() const;
114 :
115 : // Tells whether the code checks the optimization marker in the function's
116 : // feedback vector.
117 : inline bool checks_optimization_marker() const;
118 :
119 : // Tells whether the outgoing parameters of this code are tagged pointers.
120 : inline bool has_tagged_params() const;
121 :
122 : // [is_turbofanned]: For kind STUB or OPTIMIZED_FUNCTION, tells whether the
123 : // code object was generated by the TurboFan optimizing compiler.
124 : inline bool is_turbofanned() const;
125 :
126 : // [can_have_weak_objects]: For kind OPTIMIZED_FUNCTION, tells whether the
127 : // embedded objects in code should be treated weakly.
128 : inline bool can_have_weak_objects() const;
129 : inline void set_can_have_weak_objects(bool value);
130 :
131 : // [builtin_index]: For builtins, tells which builtin index the code object
132 : // has. The builtin index is a non-negative integer for builtins, and -1
133 : // otherwise.
134 : inline int builtin_index() const;
135 : inline void set_builtin_index(int id);
136 : inline bool is_builtin() const;
137 :
138 : inline bool has_safepoint_info() const;
139 :
140 : // [stack_slots]: If {has_safepoint_info()}, the number of stack slots
141 : // reserved in the code prologue.
142 : inline int stack_slots() const;
143 :
144 : // [safepoint_table_offset]: If {has_safepoint_info()}, the offset in the
145 : // instruction stream where the safepoint table starts.
146 : inline int safepoint_table_offset() const;
147 : inline void set_safepoint_table_offset(int offset);
148 : int safepoint_table_size() const;
149 : bool has_safepoint_table() const;
150 :
151 : // [handler_table_offset]: The offset in the instruction stream where the
152 : // exception handler table starts.
153 : inline int handler_table_offset() const;
154 : inline void set_handler_table_offset(int offset);
155 : int handler_table_size() const;
156 : bool has_handler_table() const;
157 :
158 : // [constant_pool offset]: Offset of the constant pool.
159 : // Valid for FLAG_enable_embedded_constant_pool only
160 : inline int constant_pool_offset() const;
161 : inline void set_constant_pool_offset(int offset);
162 : int constant_pool_size() const;
163 : bool has_constant_pool() const;
164 :
165 : // [code_comments_offset]: Offset of the code comment section.
166 : inline int code_comments_offset() const;
167 : inline void set_code_comments_offset(int offset);
168 : inline Address code_comments() const;
169 : int code_comments_size() const;
170 : bool has_code_comments() const;
171 :
172 : // The size of the executable instruction area, without embedded metadata.
173 : int ExecutableInstructionSize() const;
174 :
175 : // [marked_for_deoptimization]: For kind OPTIMIZED_FUNCTION tells whether
176 : // the code is going to be deoptimized.
177 : inline bool marked_for_deoptimization() const;
178 : inline void set_marked_for_deoptimization(bool flag);
179 :
180 : // [embedded_objects_cleared]: For kind OPTIMIZED_FUNCTION tells whether
181 : // the embedded objects in the code marked for deoptimization were cleared.
182 : // Note that embedded_objects_cleared() implies marked_for_deoptimization().
183 : inline bool embedded_objects_cleared() const;
184 : inline void set_embedded_objects_cleared(bool flag);
185 :
186 : // [deopt_already_counted]: For kind OPTIMIZED_FUNCTION tells whether
187 : // the code was already deoptimized.
188 : inline bool deopt_already_counted() const;
189 : inline void set_deopt_already_counted(bool flag);
190 :
191 : // [is_promise_rejection]: For kind BUILTIN tells whether the
192 : // exception thrown by the code will lead to promise rejection or
193 : // uncaught if both this and is_exception_caught is set.
194 : // Use GetBuiltinCatchPrediction to access this.
195 : inline void set_is_promise_rejection(bool flag);
196 :
197 : // [is_exception_caught]: For kind BUILTIN tells whether the
198 : // exception thrown by the code will be caught internally or
199 : // uncaught if both this and is_promise_rejection is set.
200 : // Use GetBuiltinCatchPrediction to access this.
201 : inline void set_is_exception_caught(bool flag);
202 :
203 : // [is_off_heap_trampoline]: For kind BUILTIN tells whether
204 : // this is a trampoline to an off-heap builtin.
205 : inline bool is_off_heap_trampoline() const;
206 :
207 : // [constant_pool]: The constant pool for this function.
208 : inline Address constant_pool() const;
209 :
210 : // Get the safepoint entry for the given pc.
211 : SafepointEntry GetSafepointEntry(Address pc);
212 :
213 : // The entire code object including its header is copied verbatim to the
214 : // snapshot so that it can be written in one, fast, memcpy during
215 : // deserialization. The deserializer will overwrite some pointers, rather
216 : // like a runtime linker, but the random allocation addresses used in the
217 : // mksnapshot process would still be present in the unlinked snapshot data,
218 : // which would make snapshot production non-reproducible. This method wipes
219 : // out the to-be-overwritten header data for reproducible snapshots.
220 : inline void WipeOutHeader();
221 :
222 : // Clear uninitialized padding space. This ensures that the snapshot content
223 : // is deterministic. Depending on the V8 build mode there could be no padding.
224 : inline void clear_padding();
225 : // Initialize the flags field. Similar to clear_padding above this ensure that
226 : // the snapshot content is deterministic.
227 : inline void initialize_flags(Kind kind, bool has_unwinding_info,
228 : bool is_turbofanned, int stack_slots,
229 : bool is_off_heap_trampoline);
230 :
231 : // Convert a target address into a code object.
232 : static inline Code GetCodeFromTargetAddress(Address address);
233 :
234 : // Convert an entry address into an object.
235 : static inline Code GetObjectFromEntryAddress(Address location_of_address);
236 :
237 : // Returns the address of the first instruction.
238 : inline Address raw_instruction_start() const;
239 :
240 : // Returns the address of the first instruction. For off-heap code objects
241 : // this differs from instruction_start (which would point to the off-heap
242 : // trampoline instead).
243 : inline Address InstructionStart() const;
244 : Address OffHeapInstructionStart() const;
245 :
246 : // Returns the address right after the last instruction.
247 : inline Address raw_instruction_end() const;
248 :
249 : // Returns the address right after the last instruction. For off-heap code
250 : // objects this differs from instruction_end (which would point to the
251 : // off-heap trampoline instead).
252 : inline Address InstructionEnd() const;
253 : Address OffHeapInstructionEnd() const;
254 :
255 : // Returns the size of the instructions, padding, relocation and unwinding
256 : // information.
257 : inline int body_size() const;
258 :
259 : // Returns the size of code and its metadata. This includes the size of code
260 : // relocation information, deoptimization data and handler table.
261 : inline int SizeIncludingMetadata() const;
262 :
263 : // Returns the address of the first relocation info (read backwards!).
264 : inline byte* relocation_start() const;
265 :
266 : // Returns the address right after the relocation info (read backwards!).
267 : inline byte* relocation_end() const;
268 :
269 : // [has_unwinding_info]: Whether this code object has unwinding information.
270 : // If it doesn't, unwinding_information_start() will point to invalid data.
271 : //
272 : // The body of all code objects has the following layout.
273 : //
274 : // +--------------------------+ <-- raw_instruction_start()
275 : // | instructions |
276 : // | ... |
277 : // +--------------------------+
278 : // | embedded metadata | <-- safepoint_table_offset()
279 : // | ... | <-- handler_table_offset()
280 : // | | <-- constant_pool_offset()
281 : // | | <-- code_comments_offset()
282 : // | |
283 : // +--------------------------+ <-- raw_instruction_end()
284 : //
285 : // If has_unwinding_info() is false, raw_instruction_end() points to the first
286 : // memory location after the end of the code object. Otherwise, the body
287 : // continues as follows:
288 : //
289 : // +--------------------------+
290 : // | padding to the next |
291 : // | 8-byte aligned address |
292 : // +--------------------------+ <-- raw_instruction_end()
293 : // | [unwinding_info_size] |
294 : // | as uint64_t |
295 : // +--------------------------+ <-- unwinding_info_start()
296 : // | unwinding info |
297 : // | ... |
298 : // +--------------------------+ <-- unwinding_info_end()
299 : //
300 : // and unwinding_info_end() points to the first memory location after the end
301 : // of the code object.
302 : //
303 : inline bool has_unwinding_info() const;
304 :
305 : // [unwinding_info_size]: Size of the unwinding information.
306 : inline int unwinding_info_size() const;
307 : inline void set_unwinding_info_size(int value);
308 :
309 : // Returns the address of the unwinding information, if any.
310 : inline Address unwinding_info_start() const;
311 :
312 : // Returns the address right after the end of the unwinding information.
313 : inline Address unwinding_info_end() const;
314 :
315 : // Code entry point.
316 : inline Address entry() const;
317 :
318 : // Returns true if pc is inside this object's instructions.
319 : inline bool contains(Address pc);
320 :
321 : // Relocate the code by delta bytes. Called to signal that this code
322 : // object has been moved by delta bytes.
323 : void Relocate(intptr_t delta);
324 :
325 : // Migrate code from desc without flushing the instruction cache.
326 : void CopyFromNoFlush(Heap* heap, const CodeDesc& desc);
327 :
328 : // Copy the RelocInfo portion of |desc| to |dest|. The ByteArray must be
329 : // exactly the same size as the RelocInfo in |desc|.
330 : static inline void CopyRelocInfoToByteArray(ByteArray dest,
331 : const CodeDesc& desc);
332 :
333 : // Flushes the instruction cache for the executable instructions of this code
334 : // object. Make sure to call this while the code is still writable.
335 : void FlushICache() const;
336 :
337 : // Returns the object size for a given body (used for allocation).
338 : static int SizeFor(int body_size) {
339 : DCHECK_SIZE_TAG_ALIGNED(body_size);
340 457614310 : return RoundUp(kHeaderSize + body_size, kCodeAlignment);
341 : }
342 :
343 : // Calculate the size of the code object to report for log events. This takes
344 : // the layout of the code object into account.
345 : inline int ExecutableSize() const;
346 :
347 : DECL_CAST(Code)
348 :
349 : // Dispatched behavior.
350 : inline int CodeSize() const;
351 :
352 : DECL_PRINTER(Code)
353 : DECL_VERIFIER(Code)
354 :
355 : void PrintDeoptLocation(FILE* out, const char* str, Address pc);
356 : bool CanDeoptAt(Address pc);
357 :
358 : void SetMarkedForDeoptimization(const char* reason);
359 :
360 : inline HandlerTable::CatchPrediction GetBuiltinCatchPrediction();
361 :
362 : bool IsIsolateIndependent(Isolate* isolate);
363 :
364 : inline bool CanContainWeakObjects();
365 :
366 : inline bool IsWeakObject(HeapObject object);
367 :
368 : static inline bool IsWeakObjectInOptimizedCode(HeapObject object);
369 :
370 : // Return true if the function is inlined in the code.
371 : bool Inlines(SharedFunctionInfo sfi);
372 :
373 : class OptimizedCodeIterator;
374 :
375 : // Layout description.
376 : #define CODE_FIELDS(V) \
377 : V(kRelocationInfoOffset, kTaggedSize) \
378 : V(kDeoptimizationDataOffset, kTaggedSize) \
379 : V(kSourcePositionTableOffset, kTaggedSize) \
380 : V(kCodeDataContainerOffset, kTaggedSize) \
381 : /* Data or code not directly visited by GC directly starts here. */ \
382 : /* The serializer needs to copy bytes starting from here verbatim. */ \
383 : /* Objects embedded into code is visited via reloc info. */ \
384 : V(kDataStart, 0) \
385 : V(kInstructionSizeOffset, kIntSize) \
386 : V(kFlagsOffset, kIntSize) \
387 : V(kSafepointTableOffsetOffset, kIntSize) \
388 : V(kHandlerTableOffsetOffset, kIntSize) \
389 : V(kConstantPoolOffsetOffset, \
390 : FLAG_enable_embedded_constant_pool ? kIntSize : 0) \
391 : V(kCodeCommentsOffsetOffset, kIntSize) \
392 : V(kBuiltinIndexOffset, kIntSize) \
393 : V(kUnalignedHeaderSize, 0) \
394 : /* Add padding to align the instruction start following right after */ \
395 : /* the Code object header. */ \
396 : V(kOptionalPaddingOffset, CODE_POINTER_PADDING(kOptionalPaddingOffset)) \
397 : V(kHeaderSize, 0)
398 :
399 : DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_FIELDS)
400 : #undef CODE_FIELDS
401 :
402 : // This documents the amount of free space we have in each Code object header
403 : // due to padding for code alignment.
404 : #if V8_TARGET_ARCH_ARM64
405 : static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 20 : 0;
406 : #elif V8_TARGET_ARCH_MIPS64
407 : static constexpr int kHeaderPaddingSize = 0;
408 : #elif V8_TARGET_ARCH_X64
409 : static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 20 : 0;
410 : #elif V8_TARGET_ARCH_ARM
411 : static constexpr int kHeaderPaddingSize = 20;
412 : #elif V8_TARGET_ARCH_IA32
413 : static constexpr int kHeaderPaddingSize = 20;
414 : #elif V8_TARGET_ARCH_MIPS
415 : static constexpr int kHeaderPaddingSize = 20;
416 : #elif V8_TARGET_ARCH_PPC64
417 : static constexpr int kHeaderPaddingSize =
418 : FLAG_enable_embedded_constant_pool ? 28 : 0;
419 : #elif V8_TARGET_ARCH_S390X
420 : static constexpr int kHeaderPaddingSize = 0;
421 : #else
422 : #error Unknown architecture.
423 : #endif
424 : STATIC_ASSERT(FIELD_SIZE(kOptionalPaddingOffset) == kHeaderPaddingSize);
425 :
426 : inline int GetUnwindingInfoSizeOffset() const;
427 :
428 : class BodyDescriptor;
429 :
430 : // Flags layout. BitField<type, shift, size>.
431 : #define CODE_FLAGS_BIT_FIELDS(V, _) \
432 : V(HasUnwindingInfoField, bool, 1, _) \
433 : V(KindField, Kind, 5, _) \
434 : V(IsTurbofannedField, bool, 1, _) \
435 : V(StackSlotsField, int, 24, _) \
436 : V(IsOffHeapTrampoline, bool, 1, _)
437 : DEFINE_BIT_FIELDS(CODE_FLAGS_BIT_FIELDS)
438 : #undef CODE_FLAGS_BIT_FIELDS
439 : static_assert(NUMBER_OF_KINDS <= KindField::kMax, "Code::KindField size");
440 : static_assert(IsOffHeapTrampoline::kNext <= 32,
441 : "Code::flags field exhausted");
442 :
443 : // KindSpecificFlags layout (STUB, BUILTIN and OPTIMIZED_FUNCTION)
444 : #define CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS(V, _) \
445 : V(MarkedForDeoptimizationField, bool, 1, _) \
446 : V(EmbeddedObjectsClearedField, bool, 1, _) \
447 : V(DeoptAlreadyCountedField, bool, 1, _) \
448 : V(CanHaveWeakObjectsField, bool, 1, _) \
449 : V(IsPromiseRejectionField, bool, 1, _) \
450 : V(IsExceptionCaughtField, bool, 1, _)
451 : DEFINE_BIT_FIELDS(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS)
452 : #undef CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS
453 : static_assert(IsExceptionCaughtField::kNext <= 32, "KindSpecificFlags full");
454 :
455 : // The {marked_for_deoptimization} field is accessed from generated code.
456 : static const int kMarkedForDeoptimizationBit =
457 : MarkedForDeoptimizationField::kShift;
458 :
459 : static const int kArgumentsBits = 16;
460 : // Reserve one argument count value as the "don't adapt arguments" sentinel.
461 : static const int kMaxArguments = (1 << kArgumentsBits) - 2;
462 :
463 : private:
464 : friend class RelocIterator;
465 :
466 : bool is_promise_rejection() const;
467 : bool is_exception_caught() const;
468 :
469 1525422 : OBJECT_CONSTRUCTORS(Code, HeapObject);
470 : };
471 :
472 : class Code::OptimizedCodeIterator {
473 : public:
474 : explicit OptimizedCodeIterator(Isolate* isolate);
475 : Code Next();
476 :
477 : private:
478 : Context next_context_;
479 : Code current_code_;
480 : Isolate* isolate_;
481 :
482 : DISALLOW_HEAP_ALLOCATION(no_gc)
483 : DISALLOW_COPY_AND_ASSIGN(OptimizedCodeIterator);
484 : };
485 :
486 : // CodeDataContainer is a container for all mutable fields associated with its
487 : // referencing {Code} object. Since {Code} objects reside on write-protected
488 : // pages within the heap, its header fields need to be immutable. There always
489 : // is a 1-to-1 relation between {Code} and {CodeDataContainer}, the referencing
490 : // field {Code::code_data_container} itself is immutable.
491 : class CodeDataContainer : public HeapObject {
492 : public:
493 : NEVER_READ_ONLY_SPACE
494 : DECL_ACCESSORS(next_code_link, Object)
495 : DECL_INT_ACCESSORS(kind_specific_flags)
496 :
497 : // Clear uninitialized padding space. This ensures that the snapshot content
498 : // is deterministic.
499 : inline void clear_padding();
500 :
501 : DECL_CAST(CodeDataContainer)
502 :
503 : // Dispatched behavior.
504 : DECL_PRINTER(CodeDataContainer)
505 : DECL_VERIFIER(CodeDataContainer)
506 :
507 : // Layout description.
508 : #define CODE_DATA_FIELDS(V) \
509 : /* Weak pointer fields. */ \
510 : V(kPointerFieldsStrongEndOffset, 0) \
511 : V(kNextCodeLinkOffset, kTaggedSize) \
512 : V(kPointerFieldsWeakEndOffset, 0) \
513 : /* Raw data fields. */ \
514 : V(kKindSpecificFlagsOffset, kIntSize) \
515 : V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize)) \
516 : /* Total size. */ \
517 : V(kSize, 0)
518 :
519 : DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS)
520 : #undef CODE_DATA_FIELDS
521 :
522 : class BodyDescriptor;
523 :
524 1680 : OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObject);
525 : };
526 :
527 : class AbstractCode : public HeapObject {
528 : public:
529 : NEVER_READ_ONLY_SPACE
530 : // All code kinds and INTERPRETED_FUNCTION.
531 : enum Kind {
532 : #define DEFINE_CODE_KIND_ENUM(name) name,
533 : CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
534 : #undef DEFINE_CODE_KIND_ENUM
535 : INTERPRETED_FUNCTION,
536 : NUMBER_OF_KINDS
537 : };
538 :
539 : static const char* Kind2String(Kind kind);
540 :
541 : int SourcePosition(int offset);
542 : int SourceStatementPosition(int offset);
543 :
544 : // Returns the address of the first instruction.
545 : inline Address raw_instruction_start();
546 :
547 : // Returns the address of the first instruction. For off-heap code objects
548 : // this differs from instruction_start (which would point to the off-heap
549 : // trampoline instead).
550 : inline Address InstructionStart();
551 :
552 : // Returns the address right after the last instruction.
553 : inline Address raw_instruction_end();
554 :
555 : // Returns the address right after the last instruction. For off-heap code
556 : // objects this differs from instruction_end (which would point to the
557 : // off-heap trampoline instead).
558 : inline Address InstructionEnd();
559 :
560 : // Returns the size of the code instructions.
561 : inline int raw_instruction_size();
562 :
563 : // Returns the size of the native instructions, including embedded
564 : // data such as the safepoints table. For off-heap code objects
565 : // this may differ from instruction_size in that this will return the size of
566 : // the off-heap instruction stream rather than the on-heap trampoline located
567 : // at instruction_start.
568 : inline int InstructionSize();
569 :
570 : // Return the source position table.
571 : inline ByteArray source_position_table();
572 :
573 : inline Object stack_frame_cache();
574 : static void SetStackFrameCache(Handle<AbstractCode> abstract_code,
575 : Handle<SimpleNumberDictionary> cache);
576 : void DropStackFrameCache();
577 :
578 : // Returns the size of instructions and the metadata.
579 : inline int SizeIncludingMetadata();
580 :
581 : // Returns true if pc is inside this object's instructions.
582 : inline bool contains(Address pc);
583 :
584 : // Returns the AbstractCode::Kind of the code.
585 : inline Kind kind();
586 :
587 : // Calculate the size of the code object to report for log events. This takes
588 : // the layout of the code object into account.
589 : inline int ExecutableSize();
590 :
591 : DECL_CAST(AbstractCode)
592 : inline Code GetCode();
593 : inline BytecodeArray GetBytecodeArray();
594 :
595 : // Max loop nesting marker used to postpose OSR. We don't take loop
596 : // nesting that is deeper than 5 levels into account.
597 : static const int kMaxLoopNestingMarker = 6;
598 :
599 : OBJECT_CONSTRUCTORS(AbstractCode, HeapObject);
600 : };
601 :
602 : // Dependent code is a singly linked list of weak fixed arrays. Each array
603 : // contains weak pointers to code objects for one dependent group. The suffix of
604 : // the array can be filled with the undefined value if the number of codes is
605 : // less than the length of the array.
606 : //
607 : // +------+-----------------+--------+--------+-----+--------+-----------+-----+
608 : // | next | count & group 1 | code 1 | code 2 | ... | code n | undefined | ... |
609 : // +------+-----------------+--------+--------+-----+--------+-----------+-----+
610 : // |
611 : // V
612 : // +------+-----------------+--------+--------+-----+--------+-----------+-----+
613 : // | next | count & group 2 | code 1 | code 2 | ... | code m | undefined | ... |
614 : // +------+-----------------+--------+--------+-----+--------+-----------+-----+
615 : // |
616 : // V
617 : // empty_weak_fixed_array()
618 : //
619 : // The list of weak fixed arrays is ordered by dependency groups.
620 :
621 : class DependentCode : public WeakFixedArray {
622 : public:
623 : DECL_CAST(DependentCode)
624 :
625 : enum DependencyGroup {
626 : // Group of code that embed a transition to this map, and depend on being
627 : // deoptimized when the transition is replaced by a new version.
628 : kTransitionGroup,
629 : // Group of code that omit run-time prototype checks for prototypes
630 : // described by this map. The group is deoptimized whenever an object
631 : // described by this map changes shape (and transitions to a new map),
632 : // possibly invalidating the assumptions embedded in the code.
633 : kPrototypeCheckGroup,
634 : // Group of code that depends on global property values in property cells
635 : // not being changed.
636 : kPropertyCellChangedGroup,
637 : // Group of code that omit run-time checks for field(s) introduced by
638 : // this map, i.e. for the field type.
639 : kFieldOwnerGroup,
640 : // Group of code that omit run-time type checks for initial maps of
641 : // constructors.
642 : kInitialMapChangedGroup,
643 : // Group of code that depends on tenuring information in AllocationSites
644 : // not being changed.
645 : kAllocationSiteTenuringChangedGroup,
646 : // Group of code that depends on element transition information in
647 : // AllocationSites not being changed.
648 : kAllocationSiteTransitionChangedGroup
649 : };
650 :
651 : // Register a code dependency of {cell} on {object}.
652 : static void InstallDependency(Isolate* isolate, const MaybeObjectHandle& code,
653 : Handle<HeapObject> object,
654 : DependencyGroup group);
655 :
656 : void DeoptimizeDependentCodeGroup(Isolate* isolate, DependencyGroup group);
657 :
658 : bool MarkCodeForDeoptimization(Isolate* isolate, DependencyGroup group);
659 :
660 : // The following low-level accessors are exposed only for tests.
661 : inline DependencyGroup group();
662 : inline MaybeObject object_at(int i);
663 : inline int count();
664 : inline DependentCode next_link();
665 :
666 : private:
667 : static const char* DependencyGroupName(DependencyGroup group);
668 :
669 : // Get/Set {object}'s {DependentCode}.
670 : static DependentCode GetDependentCode(Handle<HeapObject> object);
671 : static void SetDependentCode(Handle<HeapObject> object,
672 : Handle<DependentCode> dep);
673 :
674 : static Handle<DependentCode> New(Isolate* isolate, DependencyGroup group,
675 : const MaybeObjectHandle& object,
676 : Handle<DependentCode> next);
677 : static Handle<DependentCode> EnsureSpace(Isolate* isolate,
678 : Handle<DependentCode> entries);
679 : static Handle<DependentCode> InsertWeakCode(Isolate* isolate,
680 : Handle<DependentCode> entries,
681 : DependencyGroup group,
682 : const MaybeObjectHandle& code);
683 :
684 : // Compact by removing cleared weak cells and return true if there was
685 : // any cleared weak cell.
686 : bool Compact();
687 :
688 : static int Grow(int number_of_entries) {
689 52325 : if (number_of_entries < 5) return number_of_entries + 1;
690 18857 : return number_of_entries * 5 / 4;
691 : }
692 :
693 : static const int kGroupCount = kAllocationSiteTransitionChangedGroup + 1;
694 : static const int kNextLinkIndex = 0;
695 : static const int kFlagsIndex = 1;
696 : static const int kCodesStartIndex = 2;
697 :
698 : inline void set_next_link(DependentCode next);
699 : inline void set_count(int value);
700 : inline void set_object_at(int i, MaybeObject object);
701 : inline void clear_at(int i);
702 : inline void copy(int from, int to);
703 :
704 : inline int flags();
705 : inline void set_flags(int flags);
706 : class GroupField : public BitField<int, 0, 3> {};
707 : class CountField : public BitField<int, 3, 27> {};
708 : STATIC_ASSERT(kGroupCount <= GroupField::kMax + 1);
709 :
710 : OBJECT_CONSTRUCTORS(DependentCode, WeakFixedArray);
711 : };
712 :
713 : // BytecodeArray represents a sequence of interpreter bytecodes.
714 : class BytecodeArray : public FixedArrayBase {
715 : public:
716 : enum Age {
717 : kNoAgeBytecodeAge = 0,
718 : kQuadragenarianBytecodeAge,
719 : kQuinquagenarianBytecodeAge,
720 : kSexagenarianBytecodeAge,
721 : kSeptuagenarianBytecodeAge,
722 : kOctogenarianBytecodeAge,
723 : kAfterLastBytecodeAge,
724 : kFirstBytecodeAge = kNoAgeBytecodeAge,
725 : kLastBytecodeAge = kAfterLastBytecodeAge - 1,
726 : kBytecodeAgeCount = kAfterLastBytecodeAge - kFirstBytecodeAge - 1,
727 : kIsOldBytecodeAge = kSexagenarianBytecodeAge
728 : };
729 :
730 : static constexpr int SizeFor(int length) {
731 16374648 : return OBJECT_POINTER_ALIGN(kHeaderSize + length);
732 : }
733 :
734 : // Setter and getter
735 : inline byte get(int index) const;
736 : inline void set(int index, byte value);
737 :
738 : // Returns data start address.
739 : inline Address GetFirstBytecodeAddress();
740 :
741 : // Accessors for frame size.
742 : inline int frame_size() const;
743 : inline void set_frame_size(int frame_size);
744 :
745 : // Accessor for register count (derived from frame_size).
746 : inline int register_count() const;
747 :
748 : // Accessors for parameter count (including implicit 'this' receiver).
749 : inline int parameter_count() const;
750 : inline void set_parameter_count(int number_of_parameters);
751 :
752 : // Register used to pass the incoming new.target or generator object from the
753 : // fucntion call.
754 : inline interpreter::Register incoming_new_target_or_generator_register()
755 : const;
756 : inline void set_incoming_new_target_or_generator_register(
757 : interpreter::Register incoming_new_target_or_generator_register);
758 :
759 : // Accessors for profiling count.
760 : inline int interrupt_budget() const;
761 : inline void set_interrupt_budget(int interrupt_budget);
762 :
763 : // Accessors for OSR loop nesting level.
764 : inline int osr_loop_nesting_level() const;
765 : inline void set_osr_loop_nesting_level(int depth);
766 :
767 : // Accessors for bytecode's code age.
768 : inline Age bytecode_age() const;
769 : inline void set_bytecode_age(Age age);
770 :
771 : // Accessors for the constant pool.
772 : DECL_ACCESSORS(constant_pool, FixedArray)
773 :
774 : // Accessors for handler table containing offsets of exception handlers.
775 : DECL_ACCESSORS(handler_table, ByteArray)
776 :
777 : // Accessors for source position table containing mappings between byte code
778 : // offset and source position or SourcePositionTableWithFrameCache.
779 : DECL_ACCESSORS(source_position_table, Object)
780 :
781 : inline ByteArray SourcePositionTable();
782 : inline bool HasSourcePositionTable();
783 : inline void ClearFrameCacheFromSourcePositionTable();
784 :
785 : DECL_CAST(BytecodeArray)
786 :
787 : // Dispatched behavior.
788 : inline int BytecodeArraySize();
789 :
790 : inline int raw_instruction_size();
791 :
792 : // Returns the size of bytecode and its metadata. This includes the size of
793 : // bytecode, constant pool, source position table, and handler table.
794 : inline int SizeIncludingMetadata();
795 :
796 : DECL_PRINTER(BytecodeArray)
797 : DECL_VERIFIER(BytecodeArray)
798 :
799 : void Disassemble(std::ostream& os);
800 :
801 : void CopyBytecodesTo(BytecodeArray to);
802 :
803 : // Bytecode aging
804 : bool IsOld() const;
805 : void MakeOlder();
806 :
807 : // Clear uninitialized padding space. This ensures that the snapshot content
808 : // is deterministic.
809 : inline void clear_padding();
810 :
811 : // Compares only the bytecode array but not any of the header fields.
812 : bool IsBytecodeEqual(const BytecodeArray other) const;
813 :
814 : // Layout description.
815 : #define BYTECODE_ARRAY_FIELDS(V) \
816 : /* Pointer fields. */ \
817 : V(kConstantPoolOffset, kTaggedSize) \
818 : V(kHandlerTableOffset, kTaggedSize) \
819 : V(kSourcePositionTableOffset, kTaggedSize) \
820 : V(kFrameSizeOffset, kIntSize) \
821 : V(kParameterSizeOffset, kIntSize) \
822 : V(kIncomingNewTargetOrGeneratorRegisterOffset, kIntSize) \
823 : V(kInterruptBudgetOffset, kIntSize) \
824 : V(kOSRNestingLevelOffset, kCharSize) \
825 : V(kBytecodeAgeOffset, kCharSize) \
826 : /* Total size. */ \
827 : V(kHeaderSize, 0)
828 :
829 : DEFINE_FIELD_OFFSET_CONSTANTS(FixedArrayBase::kHeaderSize,
830 : BYTECODE_ARRAY_FIELDS)
831 : #undef BYTECODE_ARRAY_FIELDS
832 :
833 : // Maximal memory consumption for a single BytecodeArray.
834 : static const int kMaxSize = 512 * MB;
835 : // Maximal length of a single BytecodeArray.
836 : static const int kMaxLength = kMaxSize - kHeaderSize;
837 :
838 : class BodyDescriptor;
839 :
840 : OBJECT_CONSTRUCTORS(BytecodeArray, FixedArrayBase);
841 : };
842 :
843 : // DeoptimizationData is a fixed array used to hold the deoptimization data for
844 : // optimized code. It also contains information about functions that were
845 : // inlined. If N different functions were inlined then the first N elements of
846 : // the literal array will contain these functions.
847 : //
848 : // It can be empty.
849 : class DeoptimizationData : public FixedArray {
850 : public:
851 : // Layout description. Indices in the array.
852 : static const int kTranslationByteArrayIndex = 0;
853 : static const int kInlinedFunctionCountIndex = 1;
854 : static const int kLiteralArrayIndex = 2;
855 : static const int kOsrBytecodeOffsetIndex = 3;
856 : static const int kOsrPcOffsetIndex = 4;
857 : static const int kOptimizationIdIndex = 5;
858 : static const int kSharedFunctionInfoIndex = 6;
859 : static const int kInliningPositionsIndex = 7;
860 : static const int kFirstDeoptEntryIndex = 8;
861 :
862 : // Offsets of deopt entry elements relative to the start of the entry.
863 : static const int kBytecodeOffsetRawOffset = 0;
864 : static const int kTranslationIndexOffset = 1;
865 : static const int kPcOffset = 2;
866 : static const int kDeoptEntrySize = 3;
867 :
868 : // Simple element accessors.
869 : #define DECL_ELEMENT_ACCESSORS(name, type) \
870 : inline type name() const; \
871 : inline void Set##name(type value);
872 :
873 : DECL_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
874 : DECL_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
875 : DECL_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
876 : DECL_ELEMENT_ACCESSORS(OsrBytecodeOffset, Smi)
877 : DECL_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
878 : DECL_ELEMENT_ACCESSORS(OptimizationId, Smi)
879 : DECL_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
880 : DECL_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
881 :
882 : #undef DECL_ELEMENT_ACCESSORS
883 :
884 : // Accessors for elements of the ith deoptimization entry.
885 : #define DECL_ENTRY_ACCESSORS(name, type) \
886 : inline type name(int i) const; \
887 : inline void Set##name(int i, type value);
888 :
889 : DECL_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
890 : DECL_ENTRY_ACCESSORS(TranslationIndex, Smi)
891 : DECL_ENTRY_ACCESSORS(Pc, Smi)
892 :
893 : #undef DECL_ENTRY_ACCESSORS
894 :
895 : inline BailoutId BytecodeOffset(int i);
896 :
897 : inline void SetBytecodeOffset(int i, BailoutId value);
898 :
899 : inline int DeoptCount();
900 :
901 : static const int kNotInlinedIndex = -1;
902 :
903 : // Returns the inlined function at the given position in LiteralArray, or the
904 : // outer function if index == kNotInlinedIndex.
905 : class SharedFunctionInfo GetInlinedFunction(int index);
906 :
907 : // Allocates a DeoptimizationData.
908 : static Handle<DeoptimizationData> New(Isolate* isolate, int deopt_entry_count,
909 : AllocationType allocation);
910 :
911 : // Return an empty DeoptimizationData.
912 : static Handle<DeoptimizationData> Empty(Isolate* isolate);
913 :
914 : DECL_CAST(DeoptimizationData)
915 :
916 : #ifdef ENABLE_DISASSEMBLER
917 : void DeoptimizationDataPrint(std::ostream& os); // NOLINT
918 : #endif
919 :
920 : private:
921 : static int IndexForEntry(int i) {
922 5409201 : return kFirstDeoptEntryIndex + (i * kDeoptEntrySize);
923 : }
924 :
925 : static int LengthFor(int entry_count) { return IndexForEntry(entry_count); }
926 :
927 : OBJECT_CONSTRUCTORS(DeoptimizationData, FixedArray);
928 : };
929 :
930 : class SourcePositionTableWithFrameCache : public Tuple2 {
931 : public:
932 : DECL_ACCESSORS(source_position_table, ByteArray)
933 : DECL_ACCESSORS(stack_frame_cache, SimpleNumberDictionary)
934 :
935 : DECL_CAST(SourcePositionTableWithFrameCache)
936 :
937 : // Layout description.
938 : #define SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS(V) \
939 : V(kSourcePositionTableIndex, kTaggedSize) \
940 : V(kStackFrameCacheIndex, kTaggedSize) \
941 : /* Total size. */ \
942 : V(kSize, 0)
943 :
944 : DEFINE_FIELD_OFFSET_CONSTANTS(Struct::kHeaderSize,
945 : SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS)
946 : #undef SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS
947 :
948 : OBJECT_CONSTRUCTORS(SourcePositionTableWithFrameCache, Tuple2);
949 : };
950 :
951 : } // namespace internal
952 : } // namespace v8
953 :
954 : #include "src/objects/object-macros-undef.h"
955 :
956 : #endif // V8_OBJECTS_CODE_H_
|