Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_OBJECTS_CODE_H_
6 : #define V8_OBJECTS_CODE_H_
7 :
8 : #include "src/contexts.h"
9 : #include "src/handler-table.h"
10 : #include "src/objects.h"
11 : #include "src/objects/fixed-array.h"
12 : #include "src/objects/heap-object.h"
13 : #include "src/objects/struct.h"
14 :
15 : // Has to be the last include (doesn't have include guards):
16 : #include "src/objects/object-macros.h"
17 :
18 : namespace v8 {
19 : namespace internal {
20 :
21 : class ByteArray;
22 : class BytecodeArray;
23 : class CodeDataContainer;
24 : class CodeDesc;
25 : class MaybeObject;
26 :
27 : namespace interpreter {
28 : class Register;
29 : }
30 :
31 : // Code describes objects with on-the-fly generated machine code.
32 : class Code : public HeapObject {
33 : public:
34 : NEVER_READ_ONLY_SPACE
35 : // Opaque data type for encapsulating code flags like kind, inline
36 : // cache state, and arguments count.
37 : typedef uint32_t Flags;
38 :
39 : #define CODE_KIND_LIST(V) \
40 : V(OPTIMIZED_FUNCTION) \
41 : V(BYTECODE_HANDLER) \
42 : V(STUB) \
43 : V(BUILTIN) \
44 : V(REGEXP) \
45 : V(WASM_FUNCTION) \
46 : V(WASM_TO_JS_FUNCTION) \
47 : V(JS_TO_WASM_FUNCTION) \
48 : V(WASM_INTERPRETER_ENTRY) \
49 : V(C_WASM_ENTRY)
50 :
51 : enum Kind {
52 : #define DEFINE_CODE_KIND_ENUM(name) name,
53 : CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
54 : #undef DEFINE_CODE_KIND_ENUM
55 : NUMBER_OF_KINDS
56 : };
57 :
58 : static const char* Kind2String(Kind kind);
59 :
60 : #ifdef ENABLE_DISASSEMBLER
61 : const char* GetName(Isolate* isolate) const;
62 : void Disassemble(const char* name, std::ostream& os,
63 : Address current_pc = kNullAddress);
64 : #endif
65 :
66 : // [instruction_size]: Size of the native instructions, including embedded
67 : // data such as the safepoints table.
68 : inline int raw_instruction_size() const;
69 : inline void set_raw_instruction_size(int value);
70 :
71 : // Returns the size of the native instructions, including embedded
72 : // data such as the safepoints table. For off-heap code objects
73 : // this may differ from instruction_size in that this will return the size of
74 : // the off-heap instruction stream rather than the on-heap trampoline located
75 : // at instruction_start.
76 : inline int InstructionSize() const;
77 : int OffHeapInstructionSize() const;
78 :
79 : // [relocation_info]: Code relocation information
80 : DECL_ACCESSORS(relocation_info, ByteArray)
81 :
82 : // This function should be called only from GC.
83 : void ClearEmbeddedObjects(Heap* heap);
84 :
85 : // [deoptimization_data]: Array containing data for deopt.
86 : DECL_ACCESSORS(deoptimization_data, FixedArray)
87 :
88 : // [source_position_table]: ByteArray for the source positions table or
89 : // SourcePositionTableWithFrameCache.
90 : DECL_ACCESSORS(source_position_table, Object)
91 : inline ByteArray SourcePositionTable() const;
92 :
93 : // [code_data_container]: A container indirection for all mutable fields.
94 : DECL_ACCESSORS(code_data_container, CodeDataContainer)
95 :
96 : // [next_code_link]: Link for lists of optimized or deoptimized code.
97 : // Note that this field is stored in the {CodeDataContainer} to be mutable.
98 : inline Object next_code_link() const;
99 : inline void set_next_code_link(Object value);
100 :
101 : // Unchecked accessors to be used during GC.
102 : inline ByteArray unchecked_relocation_info() const;
103 :
104 : inline int relocation_size() const;
105 :
106 : // [kind]: Access to specific code kind.
107 : inline Kind kind() const;
108 :
109 : inline bool is_optimized_code() const;
110 : inline bool is_wasm_code() const;
111 :
112 : // Testers for interpreter builtins.
113 : inline bool is_interpreter_trampoline_builtin() const;
114 :
115 : // Tells whether the code checks the optimization marker in the function's
116 : // feedback vector.
117 : inline bool checks_optimization_marker() const;
118 :
119 : // Tells whether the outgoing parameters of this code are tagged pointers.
120 : inline bool has_tagged_params() const;
121 :
122 : // [is_turbofanned]: For kind STUB or OPTIMIZED_FUNCTION, tells whether the
123 : // code object was generated by the TurboFan optimizing compiler.
124 : inline bool is_turbofanned() const;
125 :
126 : // [can_have_weak_objects]: For kind OPTIMIZED_FUNCTION, tells whether the
127 : // embedded objects in code should be treated weakly.
128 : inline bool can_have_weak_objects() const;
129 : inline void set_can_have_weak_objects(bool value);
130 :
131 : // [builtin_index]: For builtins, tells which builtin index the code object
132 : // has. The builtin index is a non-negative integer for builtins, and -1
133 : // otherwise.
134 : inline int builtin_index() const;
135 : inline void set_builtin_index(int id);
136 : inline bool is_builtin() const;
137 :
138 : inline bool has_safepoint_info() const;
139 :
140 : // [stack_slots]: If {has_safepoint_info()}, the number of stack slots
141 : // reserved in the code prologue.
142 : inline int stack_slots() const;
143 :
144 : // [safepoint_table_offset]: If {has_safepoint_info()}, the offset in the
145 : // instruction stream where the safepoint table starts.
146 : inline int safepoint_table_offset() const;
147 : inline void set_safepoint_table_offset(int offset);
148 : int safepoint_table_size() const;
149 : bool has_safepoint_table() const;
150 :
151 : // [handler_table_offset]: The offset in the instruction stream where the
152 : // exception handler table starts.
153 : inline int handler_table_offset() const;
154 : inline void set_handler_table_offset(int offset);
155 : int handler_table_size() const;
156 : bool has_handler_table() const;
157 :
158 : // [constant_pool offset]: Offset of the constant pool.
159 : // Valid for FLAG_enable_embedded_constant_pool only
160 : inline int constant_pool_offset() const;
161 : inline void set_constant_pool_offset(int offset);
162 : int constant_pool_size() const;
163 : bool has_constant_pool() const;
164 :
165 : // [code_comments_offset]: Offset of the code comment section.
166 : inline int code_comments_offset() const;
167 : inline void set_code_comments_offset(int offset);
168 : inline Address code_comments() const;
169 : int code_comments_size() const;
170 : bool has_code_comments() const;
171 :
172 : // The size of the executable instruction area, without embedded metadata.
173 : int ExecutableInstructionSize() const;
174 :
175 : // [marked_for_deoptimization]: For kind OPTIMIZED_FUNCTION tells whether
176 : // the code is going to be deoptimized.
177 : inline bool marked_for_deoptimization() const;
178 : inline void set_marked_for_deoptimization(bool flag);
179 :
180 : // [embedded_objects_cleared]: For kind OPTIMIZED_FUNCTION tells whether
181 : // the embedded objects in the code marked for deoptimization were cleared.
182 : // Note that embedded_objects_cleared() implies marked_for_deoptimization().
183 : inline bool embedded_objects_cleared() const;
184 : inline void set_embedded_objects_cleared(bool flag);
185 :
186 : // [deopt_already_counted]: For kind OPTIMIZED_FUNCTION tells whether
187 : // the code was already deoptimized.
188 : inline bool deopt_already_counted() const;
189 : inline void set_deopt_already_counted(bool flag);
190 :
191 : // [is_promise_rejection]: For kind BUILTIN tells whether the
192 : // exception thrown by the code will lead to promise rejection or
193 : // uncaught if both this and is_exception_caught is set.
194 : // Use GetBuiltinCatchPrediction to access this.
195 : inline void set_is_promise_rejection(bool flag);
196 :
197 : // [is_exception_caught]: For kind BUILTIN tells whether the
198 : // exception thrown by the code will be caught internally or
199 : // uncaught if both this and is_promise_rejection is set.
200 : // Use GetBuiltinCatchPrediction to access this.
201 : inline void set_is_exception_caught(bool flag);
202 :
203 : // [is_off_heap_trampoline]: For kind BUILTIN tells whether
204 : // this is a trampoline to an off-heap builtin.
205 : inline bool is_off_heap_trampoline() const;
206 :
207 : // [constant_pool]: The constant pool for this function.
208 : inline Address constant_pool() const;
209 :
210 : // Get the safepoint entry for the given pc.
211 : SafepointEntry GetSafepointEntry(Address pc);
212 :
213 : // The entire code object including its header is copied verbatim to the
214 : // snapshot so that it can be written in one, fast, memcpy during
215 : // deserialization. The deserializer will overwrite some pointers, rather
216 : // like a runtime linker, but the random allocation addresses used in the
217 : // mksnapshot process would still be present in the unlinked snapshot data,
218 : // which would make snapshot production non-reproducible. This method wipes
219 : // out the to-be-overwritten header data for reproducible snapshots.
220 : inline void WipeOutHeader();
221 :
222 : // Clear uninitialized padding space. This ensures that the snapshot content
223 : // is deterministic.
224 : inline void clear_padding();
225 : // Initialize the flags field. Similar to clear_padding above this ensure that
226 : // the snapshot content is deterministic.
227 : inline void initialize_flags(Kind kind, bool has_unwinding_info,
228 : bool is_turbofanned, int stack_slots,
229 : bool is_off_heap_trampoline);
230 :
231 : // Convert a target address into a code object.
232 : static inline Code GetCodeFromTargetAddress(Address address);
233 :
234 : // Convert an entry address into an object.
235 : static inline Code GetObjectFromEntryAddress(Address location_of_address);
236 :
237 : // Returns the address of the first instruction.
238 : inline Address raw_instruction_start() const;
239 :
240 : // Returns the address of the first instruction. For off-heap code objects
241 : // this differs from instruction_start (which would point to the off-heap
242 : // trampoline instead).
243 : inline Address InstructionStart() const;
244 : Address OffHeapInstructionStart() const;
245 :
246 : // Returns the address right after the last instruction.
247 : inline Address raw_instruction_end() const;
248 :
249 : // Returns the address right after the last instruction. For off-heap code
250 : // objects this differs from instruction_end (which would point to the
251 : // off-heap trampoline instead).
252 : inline Address InstructionEnd() const;
253 : Address OffHeapInstructionEnd() const;
254 :
255 : // Returns the size of the instructions, padding, relocation and unwinding
256 : // information.
257 : inline int body_size() const;
258 :
259 : // Returns the size of code and its metadata. This includes the size of code
260 : // relocation information, deoptimization data and handler table.
261 : inline int SizeIncludingMetadata() const;
262 :
263 : // Returns the address of the first relocation info (read backwards!).
264 : inline byte* relocation_start() const;
265 :
266 : // Returns the address right after the relocation info (read backwards!).
267 : inline byte* relocation_end() const;
268 :
269 : // [has_unwinding_info]: Whether this code object has unwinding information.
270 : // If it doesn't, unwinding_information_start() will point to invalid data.
271 : //
272 : // The body of all code objects has the following layout.
273 : //
274 : // +--------------------------+ <-- raw_instruction_start()
275 : // | instructions |
276 : // | ... |
277 : // +--------------------------+
278 : // | embedded metadata | <-- safepoint_table_offset()
279 : // | ... | <-- handler_table_offset()
280 : // | | <-- constant_pool_offset()
281 : // | | <-- code_comments_offset()
282 : // | |
283 : // +--------------------------+ <-- raw_instruction_end()
284 : //
285 : // If has_unwinding_info() is false, raw_instruction_end() points to the first
286 : // memory location after the end of the code object. Otherwise, the body
287 : // continues as follows:
288 : //
289 : // +--------------------------+
290 : // | padding to the next |
291 : // | 8-byte aligned address |
292 : // +--------------------------+ <-- raw_instruction_end()
293 : // | [unwinding_info_size] |
294 : // | as uint64_t |
295 : // +--------------------------+ <-- unwinding_info_start()
296 : // | unwinding info |
297 : // | ... |
298 : // +--------------------------+ <-- unwinding_info_end()
299 : //
300 : // and unwinding_info_end() points to the first memory location after the end
301 : // of the code object.
302 : //
303 : inline bool has_unwinding_info() const;
304 :
305 : // [unwinding_info_size]: Size of the unwinding information.
306 : inline int unwinding_info_size() const;
307 : inline void set_unwinding_info_size(int value);
308 :
309 : // Returns the address of the unwinding information, if any.
310 : inline Address unwinding_info_start() const;
311 :
312 : // Returns the address right after the end of the unwinding information.
313 : inline Address unwinding_info_end() const;
314 :
315 : // Code entry point.
316 : inline Address entry() const;
317 :
318 : // Returns true if pc is inside this object's instructions.
319 : inline bool contains(Address pc);
320 :
321 : // Relocate the code by delta bytes. Called to signal that this code
322 : // object has been moved by delta bytes.
323 : void Relocate(intptr_t delta);
324 :
325 : // Migrate code from desc without flushing the instruction cache.
326 : void CopyFromNoFlush(Heap* heap, const CodeDesc& desc);
327 :
328 : // Copy the RelocInfo portion of |desc| to |dest|. The ByteArray must be
329 : // exactly the same size as the RelocInfo in |desc|.
330 : static inline void CopyRelocInfoToByteArray(ByteArray dest,
331 : const CodeDesc& desc);
332 :
333 : // Flushes the instruction cache for the executable instructions of this code
334 : // object. Make sure to call this while the code is still writable.
335 : void FlushICache() const;
336 :
337 : // Returns the object size for a given body (used for allocation).
338 : static int SizeFor(int body_size) {
339 : DCHECK_SIZE_TAG_ALIGNED(body_size);
340 490367144 : return RoundUp(kHeaderSize + body_size, kCodeAlignment);
341 : }
342 :
343 : // Calculate the size of the code object to report for log events. This takes
344 : // the layout of the code object into account.
345 : inline int ExecutableSize() const;
346 :
347 : DECL_CAST(Code)
348 :
349 : // Dispatched behavior.
350 : inline int CodeSize() const;
351 :
352 : DECL_PRINTER(Code)
353 : DECL_VERIFIER(Code)
354 :
355 : void PrintDeoptLocation(FILE* out, const char* str, Address pc);
356 : bool CanDeoptAt(Address pc);
357 :
358 : void SetMarkedForDeoptimization(const char* reason);
359 :
360 : inline HandlerTable::CatchPrediction GetBuiltinCatchPrediction();
361 :
362 : bool IsIsolateIndependent(Isolate* isolate);
363 :
364 : inline bool CanContainWeakObjects();
365 :
366 : inline bool IsWeakObject(HeapObject object);
367 :
368 : static inline bool IsWeakObjectInOptimizedCode(HeapObject object);
369 :
370 : // Return true if the function is inlined in the code.
371 : bool Inlines(SharedFunctionInfo sfi);
372 :
373 : class OptimizedCodeIterator;
374 :
375 : // Layout description.
376 : #define CODE_FIELDS(V) \
377 : V(kRelocationInfoOffset, kTaggedSize) \
378 : V(kDeoptimizationDataOffset, kTaggedSize) \
379 : V(kSourcePositionTableOffset, kTaggedSize) \
380 : V(kCodeDataContainerOffset, kTaggedSize) \
381 : /* Data or code not directly visited by GC directly starts here. */ \
382 : /* The serializer needs to copy bytes starting from here verbatim. */ \
383 : /* Objects embedded into code is visited via reloc info. */ \
384 : V(kDataStart, 0) \
385 : V(kInstructionSizeOffset, kIntSize) \
386 : V(kFlagsOffset, kIntSize) \
387 : V(kSafepointTableOffsetOffset, kIntSize) \
388 : V(kHandlerTableOffsetOffset, kIntSize) \
389 : V(kConstantPoolOffsetOffset, \
390 : FLAG_enable_embedded_constant_pool ? kIntSize : 0) \
391 : V(kCodeCommentsOffsetOffset, kIntSize) \
392 : V(kBuiltinIndexOffset, kIntSize) \
393 : /* Add padding to align the instruction start following right after */ \
394 : /* the Code object header. */ \
395 : V(kHeaderPaddingStart, CODE_POINTER_PADDING(kHeaderPaddingStart)) \
396 : V(kHeaderSize, 0)
397 :
398 : DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_FIELDS)
399 : #undef CODE_FIELDS
400 :
401 : // This documents the amount of free space we have in each Code object header
402 : // due to padding for code alignment.
403 : #if V8_TARGET_ARCH_ARM64
404 : static constexpr int kHeaderPaddingSize = 0;
405 : STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
406 : #elif V8_TARGET_ARCH_MIPS64
407 : static constexpr int kHeaderPaddingSize = 0;
408 : STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
409 : #elif V8_TARGET_ARCH_X64
410 : static constexpr int kHeaderPaddingSize = 0;
411 : STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
412 : #elif V8_TARGET_ARCH_ARM
413 : static constexpr int kHeaderPaddingSize = 20;
414 : STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
415 : #elif V8_TARGET_ARCH_IA32
416 : static constexpr int kHeaderPaddingSize = 20;
417 : STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
418 : #elif V8_TARGET_ARCH_MIPS
419 : static constexpr int kHeaderPaddingSize = 20;
420 : STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
421 : #elif V8_TARGET_ARCH_PPC64
422 : // No static assert possible since padding size depends on the
423 : // FLAG_enable_embedded_constant_pool runtime flag.
424 : #elif V8_TARGET_ARCH_S390X
425 : static constexpr int kHeaderPaddingSize = 0;
426 : STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
427 : #else
428 : #error Unknown architecture.
429 : #endif
430 :
431 : inline int GetUnwindingInfoSizeOffset() const;
432 :
433 : class BodyDescriptor;
434 :
435 : // Flags layout. BitField<type, shift, size>.
436 : #define CODE_FLAGS_BIT_FIELDS(V, _) \
437 : V(HasUnwindingInfoField, bool, 1, _) \
438 : V(KindField, Kind, 5, _) \
439 : V(IsTurbofannedField, bool, 1, _) \
440 : V(StackSlotsField, int, 24, _) \
441 : V(IsOffHeapTrampoline, bool, 1, _)
442 : DEFINE_BIT_FIELDS(CODE_FLAGS_BIT_FIELDS)
443 : #undef CODE_FLAGS_BIT_FIELDS
444 : static_assert(NUMBER_OF_KINDS <= KindField::kMax, "Code::KindField size");
445 : static_assert(IsOffHeapTrampoline::kNext <= 32,
446 : "Code::flags field exhausted");
447 :
448 : // KindSpecificFlags layout (STUB, BUILTIN and OPTIMIZED_FUNCTION)
449 : #define CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS(V, _) \
450 : V(MarkedForDeoptimizationField, bool, 1, _) \
451 : V(EmbeddedObjectsClearedField, bool, 1, _) \
452 : V(DeoptAlreadyCountedField, bool, 1, _) \
453 : V(CanHaveWeakObjectsField, bool, 1, _) \
454 : V(IsPromiseRejectionField, bool, 1, _) \
455 : V(IsExceptionCaughtField, bool, 1, _)
456 : DEFINE_BIT_FIELDS(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS)
457 : #undef CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS
458 : static_assert(IsExceptionCaughtField::kNext <= 32, "KindSpecificFlags full");
459 :
460 : // The {marked_for_deoptimization} field is accessed from generated code.
461 : static const int kMarkedForDeoptimizationBit =
462 : MarkedForDeoptimizationField::kShift;
463 :
464 : static const int kArgumentsBits = 16;
465 : // Reserve one argument count value as the "don't adapt arguments" sentinel.
466 : static const int kMaxArguments = (1 << kArgumentsBits) - 2;
467 :
468 : private:
469 : friend class RelocIterator;
470 :
471 : bool is_promise_rejection() const;
472 : bool is_exception_caught() const;
473 :
474 91023354 : OBJECT_CONSTRUCTORS(Code, HeapObject);
475 : };
476 :
477 : class Code::OptimizedCodeIterator {
478 : public:
479 : explicit OptimizedCodeIterator(Isolate* isolate);
480 : Code Next();
481 :
482 : private:
483 : Context next_context_;
484 : Code current_code_;
485 : Isolate* isolate_;
486 :
487 : DISALLOW_HEAP_ALLOCATION(no_gc)
488 : DISALLOW_COPY_AND_ASSIGN(OptimizedCodeIterator);
489 : };
490 :
491 : // CodeDataContainer is a container for all mutable fields associated with its
492 : // referencing {Code} object. Since {Code} objects reside on write-protected
493 : // pages within the heap, its header fields need to be immutable. There always
494 : // is a 1-to-1 relation between {Code} and {CodeDataContainer}, the referencing
495 : // field {Code::code_data_container} itself is immutable.
496 : class CodeDataContainer : public HeapObject {
497 : public:
498 : NEVER_READ_ONLY_SPACE
499 : DECL_ACCESSORS(next_code_link, Object)
500 : DECL_INT_ACCESSORS(kind_specific_flags)
501 :
502 : // Clear uninitialized padding space. This ensures that the snapshot content
503 : // is deterministic.
504 : inline void clear_padding();
505 :
506 : DECL_CAST(CodeDataContainer)
507 :
508 : // Dispatched behavior.
509 : DECL_PRINTER(CodeDataContainer)
510 : DECL_VERIFIER(CodeDataContainer)
511 :
512 : // Layout description.
513 : #define CODE_DATA_FIELDS(V) \
514 : /* Weak pointer fields. */ \
515 : V(kPointerFieldsStrongEndOffset, 0) \
516 : V(kNextCodeLinkOffset, kTaggedSize) \
517 : V(kPointerFieldsWeakEndOffset, 0) \
518 : /* Raw data fields. */ \
519 : V(kKindSpecificFlagsOffset, kIntSize) \
520 : V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize)) \
521 : /* Total size. */ \
522 : V(kSize, 0)
523 :
524 : DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS)
525 : #undef CODE_DATA_FIELDS
526 :
527 : class BodyDescriptor;
528 :
529 89258234 : OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObject);
530 : };
531 :
532 : class AbstractCode : public HeapObject {
533 : public:
534 : NEVER_READ_ONLY_SPACE
535 : // All code kinds and INTERPRETED_FUNCTION.
536 : enum Kind {
537 : #define DEFINE_CODE_KIND_ENUM(name) name,
538 : CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
539 : #undef DEFINE_CODE_KIND_ENUM
540 : INTERPRETED_FUNCTION,
541 : NUMBER_OF_KINDS
542 : };
543 :
544 : static const char* Kind2String(Kind kind);
545 :
546 : int SourcePosition(int offset);
547 : int SourceStatementPosition(int offset);
548 :
549 : // Returns the address of the first instruction.
550 : inline Address raw_instruction_start();
551 :
552 : // Returns the address of the first instruction. For off-heap code objects
553 : // this differs from instruction_start (which would point to the off-heap
554 : // trampoline instead).
555 : inline Address InstructionStart();
556 :
557 : // Returns the address right after the last instruction.
558 : inline Address raw_instruction_end();
559 :
560 : // Returns the address right after the last instruction. For off-heap code
561 : // objects this differs from instruction_end (which would point to the
562 : // off-heap trampoline instead).
563 : inline Address InstructionEnd();
564 :
565 : // Returns the size of the code instructions.
566 : inline int raw_instruction_size();
567 :
568 : // Returns the size of the native instructions, including embedded
569 : // data such as the safepoints table. For off-heap code objects
570 : // this may differ from instruction_size in that this will return the size of
571 : // the off-heap instruction stream rather than the on-heap trampoline located
572 : // at instruction_start.
573 : inline int InstructionSize();
574 :
575 : // Return the source position table.
576 : inline ByteArray source_position_table();
577 :
578 : inline Object stack_frame_cache();
579 : static void SetStackFrameCache(Handle<AbstractCode> abstract_code,
580 : Handle<SimpleNumberDictionary> cache);
581 : void DropStackFrameCache();
582 :
583 : // Returns the size of instructions and the metadata.
584 : inline int SizeIncludingMetadata();
585 :
586 : // Returns true if pc is inside this object's instructions.
587 : inline bool contains(Address pc);
588 :
589 : // Returns the AbstractCode::Kind of the code.
590 : inline Kind kind();
591 :
592 : // Calculate the size of the code object to report for log events. This takes
593 : // the layout of the code object into account.
594 : inline int ExecutableSize();
595 :
596 : DECL_CAST(AbstractCode)
597 : inline Code GetCode();
598 : inline BytecodeArray GetBytecodeArray();
599 :
600 : // Max loop nesting marker used to postpose OSR. We don't take loop
601 : // nesting that is deeper than 5 levels into account.
602 : static const int kMaxLoopNestingMarker = 6;
603 :
604 : OBJECT_CONSTRUCTORS(AbstractCode, HeapObject);
605 : };
606 :
607 : // Dependent code is a singly linked list of weak fixed arrays. Each array
608 : // contains weak pointers to code objects for one dependent group. The suffix of
609 : // the array can be filled with the undefined value if the number of codes is
610 : // less than the length of the array.
611 : //
612 : // +------+-----------------+--------+--------+-----+--------+-----------+-----+
613 : // | next | count & group 1 | code 1 | code 2 | ... | code n | undefined | ... |
614 : // +------+-----------------+--------+--------+-----+--------+-----------+-----+
615 : // |
616 : // V
617 : // +------+-----------------+--------+--------+-----+--------+-----------+-----+
618 : // | next | count & group 2 | code 1 | code 2 | ... | code m | undefined | ... |
619 : // +------+-----------------+--------+--------+-----+--------+-----------+-----+
620 : // |
621 : // V
622 : // empty_weak_fixed_array()
623 : //
624 : // The list of weak fixed arrays is ordered by dependency groups.
625 :
626 : class DependentCode : public WeakFixedArray {
627 : public:
628 : DECL_CAST(DependentCode)
629 :
630 : enum DependencyGroup {
631 : // Group of code that embed a transition to this map, and depend on being
632 : // deoptimized when the transition is replaced by a new version.
633 : kTransitionGroup,
634 : // Group of code that omit run-time prototype checks for prototypes
635 : // described by this map. The group is deoptimized whenever an object
636 : // described by this map changes shape (and transitions to a new map),
637 : // possibly invalidating the assumptions embedded in the code.
638 : kPrototypeCheckGroup,
639 : // Group of code that depends on global property values in property cells
640 : // not being changed.
641 : kPropertyCellChangedGroup,
642 : // Group of code that omit run-time checks for field(s) introduced by
643 : // this map, i.e. for the field type.
644 : kFieldOwnerGroup,
645 : // Group of code that omit run-time type checks for initial maps of
646 : // constructors.
647 : kInitialMapChangedGroup,
648 : // Group of code that depends on tenuring information in AllocationSites
649 : // not being changed.
650 : kAllocationSiteTenuringChangedGroup,
651 : // Group of code that depends on element transition information in
652 : // AllocationSites not being changed.
653 : kAllocationSiteTransitionChangedGroup
654 : };
655 :
656 : // Register a code dependency of {cell} on {object}.
657 : static void InstallDependency(Isolate* isolate, const MaybeObjectHandle& code,
658 : Handle<HeapObject> object,
659 : DependencyGroup group);
660 :
661 : void DeoptimizeDependentCodeGroup(Isolate* isolate, DependencyGroup group);
662 :
663 : bool MarkCodeForDeoptimization(Isolate* isolate, DependencyGroup group);
664 :
665 : // The following low-level accessors are exposed only for tests.
666 : inline DependencyGroup group();
667 : inline MaybeObject object_at(int i);
668 : inline int count();
669 : inline DependentCode next_link();
670 :
671 : private:
672 : static const char* DependencyGroupName(DependencyGroup group);
673 :
674 : // Get/Set {object}'s {DependentCode}.
675 : static DependentCode GetDependentCode(Handle<HeapObject> object);
676 : static void SetDependentCode(Handle<HeapObject> object,
677 : Handle<DependentCode> dep);
678 :
679 : static Handle<DependentCode> New(Isolate* isolate, DependencyGroup group,
680 : const MaybeObjectHandle& object,
681 : Handle<DependentCode> next);
682 : static Handle<DependentCode> EnsureSpace(Isolate* isolate,
683 : Handle<DependentCode> entries);
684 : static Handle<DependentCode> InsertWeakCode(Isolate* isolate,
685 : Handle<DependentCode> entries,
686 : DependencyGroup group,
687 : const MaybeObjectHandle& code);
688 :
689 : // Compact by removing cleared weak cells and return true if there was
690 : // any cleared weak cell.
691 : bool Compact();
692 :
693 : static int Grow(int number_of_entries) {
694 52031 : if (number_of_entries < 5) return number_of_entries + 1;
695 18604 : return number_of_entries * 5 / 4;
696 : }
697 :
698 : static const int kGroupCount = kAllocationSiteTransitionChangedGroup + 1;
699 : static const int kNextLinkIndex = 0;
700 : static const int kFlagsIndex = 1;
701 : static const int kCodesStartIndex = 2;
702 :
703 : inline void set_next_link(DependentCode next);
704 : inline void set_count(int value);
705 : inline void set_object_at(int i, MaybeObject object);
706 : inline void clear_at(int i);
707 : inline void copy(int from, int to);
708 :
709 : inline int flags();
710 : inline void set_flags(int flags);
711 : class GroupField : public BitField<int, 0, 3> {};
712 : class CountField : public BitField<int, 3, 27> {};
713 : STATIC_ASSERT(kGroupCount <= GroupField::kMax + 1);
714 :
715 : OBJECT_CONSTRUCTORS(DependentCode, WeakFixedArray);
716 : };
717 :
718 : // BytecodeArray represents a sequence of interpreter bytecodes.
719 : class BytecodeArray : public FixedArrayBase {
720 : public:
721 : enum Age {
722 : kNoAgeBytecodeAge = 0,
723 : kQuadragenarianBytecodeAge,
724 : kQuinquagenarianBytecodeAge,
725 : kSexagenarianBytecodeAge,
726 : kSeptuagenarianBytecodeAge,
727 : kOctogenarianBytecodeAge,
728 : kAfterLastBytecodeAge,
729 : kFirstBytecodeAge = kNoAgeBytecodeAge,
730 : kLastBytecodeAge = kAfterLastBytecodeAge - 1,
731 : kBytecodeAgeCount = kAfterLastBytecodeAge - kFirstBytecodeAge - 1,
732 : kIsOldBytecodeAge = kSexagenarianBytecodeAge
733 : };
734 :
735 : static constexpr int SizeFor(int length) {
736 17046771 : return OBJECT_POINTER_ALIGN(kHeaderSize + length);
737 : }
738 :
739 : // Setter and getter
740 : inline byte get(int index) const;
741 : inline void set(int index, byte value);
742 :
743 : // Returns data start address.
744 : inline Address GetFirstBytecodeAddress();
745 :
746 : // Accessors for frame size.
747 : inline int frame_size() const;
748 : inline void set_frame_size(int frame_size);
749 :
750 : // Accessor for register count (derived from frame_size).
751 : inline int register_count() const;
752 :
753 : // Accessors for parameter count (including implicit 'this' receiver).
754 : inline int parameter_count() const;
755 : inline void set_parameter_count(int number_of_parameters);
756 :
757 : // Register used to pass the incoming new.target or generator object from the
758 : // fucntion call.
759 : inline interpreter::Register incoming_new_target_or_generator_register()
760 : const;
761 : inline void set_incoming_new_target_or_generator_register(
762 : interpreter::Register incoming_new_target_or_generator_register);
763 :
764 : // Accessors for profiling count.
765 : inline int interrupt_budget() const;
766 : inline void set_interrupt_budget(int interrupt_budget);
767 :
768 : // Accessors for OSR loop nesting level.
769 : inline int osr_loop_nesting_level() const;
770 : inline void set_osr_loop_nesting_level(int depth);
771 :
772 : // Accessors for bytecode's code age.
773 : inline Age bytecode_age() const;
774 : inline void set_bytecode_age(Age age);
775 :
776 : // Accessors for the constant pool.
777 : DECL_ACCESSORS(constant_pool, FixedArray)
778 :
779 : // Accessors for handler table containing offsets of exception handlers.
780 : DECL_ACCESSORS(handler_table, ByteArray)
781 :
782 : // Accessors for source position table containing mappings between byte code
783 : // offset and source position or SourcePositionTableWithFrameCache.
784 : DECL_ACCESSORS(source_position_table, Object)
785 :
786 : inline ByteArray SourcePositionTable();
787 : inline bool HasSourcePositionTable();
788 : inline void ClearFrameCacheFromSourcePositionTable();
789 :
790 : DECL_CAST(BytecodeArray)
791 :
792 : // Dispatched behavior.
793 : inline int BytecodeArraySize();
794 :
795 : inline int raw_instruction_size();
796 :
797 : // Returns the size of bytecode and its metadata. This includes the size of
798 : // bytecode, constant pool, source position table, and handler table.
799 : inline int SizeIncludingMetadata();
800 :
801 : int SourcePosition(int offset);
802 : int SourceStatementPosition(int offset);
803 :
804 : DECL_PRINTER(BytecodeArray)
805 : DECL_VERIFIER(BytecodeArray)
806 :
807 : void Disassemble(std::ostream& os);
808 :
809 : void CopyBytecodesTo(BytecodeArray to);
810 :
811 : // Bytecode aging
812 : bool IsOld() const;
813 : void MakeOlder();
814 :
815 : // Clear uninitialized padding space. This ensures that the snapshot content
816 : // is deterministic.
817 : inline void clear_padding();
818 :
819 : // Compares only the bytecode array but not any of the header fields.
820 : bool IsBytecodeEqual(const BytecodeArray other) const;
821 :
822 : // Layout description.
823 : #define BYTECODE_ARRAY_FIELDS(V) \
824 : /* Pointer fields. */ \
825 : V(kConstantPoolOffset, kTaggedSize) \
826 : V(kHandlerTableOffset, kTaggedSize) \
827 : V(kSourcePositionTableOffset, kTaggedSize) \
828 : V(kFrameSizeOffset, kIntSize) \
829 : V(kParameterSizeOffset, kIntSize) \
830 : V(kIncomingNewTargetOrGeneratorRegisterOffset, kIntSize) \
831 : V(kInterruptBudgetOffset, kIntSize) \
832 : V(kOSRNestingLevelOffset, kCharSize) \
833 : V(kBytecodeAgeOffset, kCharSize) \
834 : /* Total size. */ \
835 : V(kHeaderSize, 0)
836 :
837 : DEFINE_FIELD_OFFSET_CONSTANTS(FixedArrayBase::kHeaderSize,
838 : BYTECODE_ARRAY_FIELDS)
839 : #undef BYTECODE_ARRAY_FIELDS
840 :
841 : // Maximal memory consumption for a single BytecodeArray.
842 : static const int kMaxSize = 512 * MB;
843 : // Maximal length of a single BytecodeArray.
844 : static const int kMaxLength = kMaxSize - kHeaderSize;
845 :
846 : class BodyDescriptor;
847 :
848 443328 : OBJECT_CONSTRUCTORS(BytecodeArray, FixedArrayBase);
849 : };
850 :
851 : // DeoptimizationData is a fixed array used to hold the deoptimization data for
852 : // optimized code. It also contains information about functions that were
853 : // inlined. If N different functions were inlined then the first N elements of
854 : // the literal array will contain these functions.
855 : //
856 : // It can be empty.
857 : class DeoptimizationData : public FixedArray {
858 : public:
859 : // Layout description. Indices in the array.
860 : static const int kTranslationByteArrayIndex = 0;
861 : static const int kInlinedFunctionCountIndex = 1;
862 : static const int kLiteralArrayIndex = 2;
863 : static const int kOsrBytecodeOffsetIndex = 3;
864 : static const int kOsrPcOffsetIndex = 4;
865 : static const int kOptimizationIdIndex = 5;
866 : static const int kSharedFunctionInfoIndex = 6;
867 : static const int kInliningPositionsIndex = 7;
868 : static const int kFirstDeoptEntryIndex = 8;
869 :
870 : // Offsets of deopt entry elements relative to the start of the entry.
871 : static const int kBytecodeOffsetRawOffset = 0;
872 : static const int kTranslationIndexOffset = 1;
873 : static const int kPcOffset = 2;
874 : static const int kDeoptEntrySize = 3;
875 :
876 : // Simple element accessors.
877 : #define DECL_ELEMENT_ACCESSORS(name, type) \
878 : inline type name() const; \
879 : inline void Set##name(type value);
880 :
881 : DECL_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
882 : DECL_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
883 : DECL_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
884 : DECL_ELEMENT_ACCESSORS(OsrBytecodeOffset, Smi)
885 : DECL_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
886 : DECL_ELEMENT_ACCESSORS(OptimizationId, Smi)
887 : DECL_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
888 : DECL_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
889 :
890 : #undef DECL_ELEMENT_ACCESSORS
891 :
892 : // Accessors for elements of the ith deoptimization entry.
893 : #define DECL_ENTRY_ACCESSORS(name, type) \
894 : inline type name(int i) const; \
895 : inline void Set##name(int i, type value);
896 :
897 : DECL_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
898 : DECL_ENTRY_ACCESSORS(TranslationIndex, Smi)
899 : DECL_ENTRY_ACCESSORS(Pc, Smi)
900 :
901 : #undef DECL_ENTRY_ACCESSORS
902 :
903 : inline BailoutId BytecodeOffset(int i);
904 :
905 : inline void SetBytecodeOffset(int i, BailoutId value);
906 :
907 : inline int DeoptCount();
908 :
909 : static const int kNotInlinedIndex = -1;
910 :
911 : // Returns the inlined function at the given position in LiteralArray, or the
912 : // outer function if index == kNotInlinedIndex.
913 : class SharedFunctionInfo GetInlinedFunction(int index);
914 :
915 : // Allocates a DeoptimizationData.
916 : static Handle<DeoptimizationData> New(Isolate* isolate, int deopt_entry_count,
917 : PretenureFlag pretenure);
918 :
919 : // Return an empty DeoptimizationData.
920 : static Handle<DeoptimizationData> Empty(Isolate* isolate);
921 :
922 : DECL_CAST(DeoptimizationData)
923 :
924 : #ifdef ENABLE_DISASSEMBLER
925 : void DeoptimizationDataPrint(std::ostream& os); // NOLINT
926 : #endif
927 :
928 : private:
929 : static int IndexForEntry(int i) {
930 11934547 : return kFirstDeoptEntryIndex + (i * kDeoptEntrySize);
931 : }
932 :
933 : static int LengthFor(int entry_count) { return IndexForEntry(entry_count); }
934 :
935 4977 : OBJECT_CONSTRUCTORS(DeoptimizationData, FixedArray);
936 : };
937 :
938 : class SourcePositionTableWithFrameCache : public Tuple2 {
939 : public:
940 : DECL_ACCESSORS(source_position_table, ByteArray)
941 : DECL_ACCESSORS(stack_frame_cache, SimpleNumberDictionary)
942 :
943 : DECL_CAST(SourcePositionTableWithFrameCache)
944 :
945 : // Layout description.
946 : #define SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS(V) \
947 : V(kSourcePositionTableIndex, kTaggedSize) \
948 : V(kStackFrameCacheIndex, kTaggedSize) \
949 : /* Total size. */ \
950 : V(kSize, 0)
951 :
952 : DEFINE_FIELD_OFFSET_CONSTANTS(Struct::kHeaderSize,
953 : SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS)
954 : #undef SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS
955 :
956 : OBJECT_CONSTRUCTORS(SourcePositionTableWithFrameCache, Tuple2);
957 : };
958 :
959 : } // namespace internal
960 : } // namespace v8
961 :
962 : #include "src/objects/object-macros-undef.h"
963 :
964 : #endif // V8_OBJECTS_CODE_H_
|