Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_OBJECTS_CODE_INL_H_
6 : #define V8_OBJECTS_CODE_INL_H_
7 :
8 : #include "src/objects/code.h"
9 :
10 : #include "src/objects/dictionary.h"
11 : #include "src/v8memory.h"
12 :
13 : // Has to be the last include (doesn't have include guards):
14 : #include "src/objects/object-macros.h"
15 :
16 : namespace v8 {
17 : namespace internal {
18 :
19 44400433 : TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
20 723758245 : TYPE_CHECKER(Code, CODE_TYPE)
21 :
22 : CAST_ACCESSOR(AbstractCode)
23 : CAST_ACCESSOR(BytecodeArray)
24 : CAST_ACCESSOR(Code)
25 : CAST_ACCESSOR(DependentCode)
26 : CAST_ACCESSOR(DeoptimizationData)
27 : CAST_ACCESSOR(HandlerTable)
28 :
29 42398 : int AbstractCode::instruction_size() {
30 42398 : if (IsCode()) {
31 41969 : return GetCode()->instruction_size();
32 : } else {
33 429 : return GetBytecodeArray()->length();
34 : }
35 : }
36 :
37 1567074 : ByteArray* AbstractCode::source_position_table() {
38 1567074 : if (IsCode()) {
39 42182 : return GetCode()->SourcePositionTable();
40 : } else {
41 1524892 : return GetBytecodeArray()->SourcePositionTable();
42 : }
43 : }
44 :
45 70155 : Object* AbstractCode::stack_frame_cache() {
46 : Object* maybe_table;
47 70155 : if (IsCode()) {
48 : maybe_table = GetCode()->source_position_table();
49 : } else {
50 : maybe_table = GetBytecodeArray()->source_position_table();
51 : }
52 70155 : if (maybe_table->IsSourcePositionTableWithFrameCache()) {
53 : return SourcePositionTableWithFrameCache::cast(maybe_table)
54 63284 : ->stack_frame_cache();
55 : }
56 : return Smi::kZero;
57 : }
58 :
59 0 : int AbstractCode::SizeIncludingMetadata() {
60 0 : if (IsCode()) {
61 0 : return GetCode()->SizeIncludingMetadata();
62 : } else {
63 0 : return GetBytecodeArray()->SizeIncludingMetadata();
64 : }
65 : }
66 142447 : int AbstractCode::ExecutableSize() {
67 142447 : if (IsCode()) {
68 141350 : return GetCode()->ExecutableSize();
69 : } else {
70 1097 : return GetBytecodeArray()->BytecodeArraySize();
71 : }
72 : }
73 :
74 185289 : Address AbstractCode::instruction_start() {
75 185289 : if (IsCode()) {
76 183385 : return GetCode()->instruction_start();
77 : } else {
78 1904 : return GetBytecodeArray()->GetFirstBytecodeAddress();
79 : }
80 : }
81 :
82 15 : Address AbstractCode::instruction_end() {
83 15 : if (IsCode()) {
84 3 : return GetCode()->instruction_end();
85 : } else {
86 24 : return GetBytecodeArray()->GetFirstBytecodeAddress() +
87 12 : GetBytecodeArray()->length();
88 : }
89 : }
90 :
91 : bool AbstractCode::contains(byte* inner_pointer) {
92 24 : return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
93 : }
94 :
95 112487 : AbstractCode::Kind AbstractCode::kind() {
96 112487 : if (IsCode()) {
97 110259 : return static_cast<AbstractCode::Kind>(GetCode()->kind());
98 : } else {
99 : return INTERPRETED_FUNCTION;
100 : }
101 : }
102 :
103 : Code* AbstractCode::GetCode() { return Code::cast(this); }
104 :
105 : BytecodeArray* AbstractCode::GetBytecodeArray() {
106 : return BytecodeArray::cast(this);
107 : }
108 :
109 : DependentCode* DependentCode::next_link() {
110 : return DependentCode::cast(get(kNextLinkIndex));
111 : }
112 :
113 : void DependentCode::set_next_link(DependentCode* next) {
114 327653 : set(kNextLinkIndex, next);
115 : }
116 :
117 : int DependentCode::flags() { return Smi::ToInt(get(kFlagsIndex)); }
118 :
119 : void DependentCode::set_flags(int flags) {
120 : set(kFlagsIndex, Smi::FromInt(flags));
121 : }
122 :
123 2368132 : int DependentCode::count() { return CountField::decode(flags()); }
124 :
125 724468 : void DependentCode::set_count(int value) {
126 1448936 : set_flags(CountField::update(flags(), value));
127 724468 : }
128 :
129 : DependentCode::DependencyGroup DependentCode::group() {
130 17525 : return static_cast<DependencyGroup>(GroupField::decode(flags()));
131 : }
132 :
133 : void DependentCode::set_group(DependentCode::DependencyGroup group) {
134 : set_flags(GroupField::update(flags(), static_cast<int>(group)));
135 : }
136 :
137 : void DependentCode::set_object_at(int i, Object* object) {
138 1293594 : set(kCodesStartIndex + i, object);
139 : }
140 :
141 363607962 : Object* DependentCode::object_at(int i) { return get(kCodesStartIndex + i); }
142 :
143 72706 : void DependentCode::clear_at(int i) { set_undefined(kCodesStartIndex + i); }
144 :
145 24080 : void DependentCode::copy(int from, int to) {
146 48160 : set(kCodesStartIndex + to, get(kCodesStartIndex + from));
147 24080 : }
148 :
149 287133480 : INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
150 1669351 : INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
151 : #define CODE_ACCESSORS(name, type, offset) \
152 : ACCESSORS_CHECKED2(Code, name, type, offset, true, \
153 : !GetHeap()->InNewSpace(value))
154 9497548 : CODE_ACCESSORS(relocation_info, ByteArray, kRelocationInfoOffset)
155 11689711 : CODE_ACCESSORS(handler_table, FixedArray, kHandlerTableOffset)
156 16700213 : CODE_ACCESSORS(deoptimization_data, FixedArray, kDeoptimizationDataOffset)
157 14896407 : CODE_ACCESSORS(source_position_table, Object, kSourcePositionTableOffset)
158 8879856 : CODE_ACCESSORS(trap_handler_index, Smi, kTrapHandlerIndex)
159 9841366 : CODE_ACCESSORS(raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
160 12206816 : CODE_ACCESSORS(next_code_link, Object, kNextCodeLinkOffset)
161 : #undef CODE_ACCESSORS
162 :
163 1206 : void Code::WipeOutHeader() {
164 1206 : WRITE_FIELD(this, kRelocationInfoOffset, nullptr);
165 1206 : WRITE_FIELD(this, kHandlerTableOffset, nullptr);
166 1206 : WRITE_FIELD(this, kDeoptimizationDataOffset, nullptr);
167 1206 : WRITE_FIELD(this, kSourcePositionTableOffset, nullptr);
168 : // Do not wipe out major/minor keys on a code stub or IC
169 2412 : if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
170 0 : WRITE_FIELD(this, kTypeFeedbackInfoOffset, nullptr);
171 : }
172 1206 : WRITE_FIELD(this, kNextCodeLinkOffset, nullptr);
173 1206 : }
174 :
175 1669351 : void Code::clear_padding() {
176 1669351 : memset(address() + kHeaderPaddingStart, 0, kHeaderSize - kHeaderPaddingStart);
177 : Address data_end =
178 1669351 : has_unwinding_info() ? unwinding_info_end() : instruction_end();
179 1669351 : memset(data_end, 0, CodeSize() - (data_end - address()));
180 1669351 : }
181 :
182 192473 : ByteArray* Code::SourcePositionTable() const {
183 : Object* maybe_table = source_position_table();
184 192473 : if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
185 : DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
186 : return SourcePositionTableWithFrameCache::cast(maybe_table)
187 0 : ->source_position_table();
188 : }
189 :
190 : uint32_t Code::stub_key() const {
191 : DCHECK(is_stub());
192 : Smi* smi_key = Smi::cast(raw_type_feedback_info());
193 44776 : return static_cast<uint32_t>(smi_key->value());
194 : }
195 :
196 : void Code::set_stub_key(uint32_t key) {
197 : DCHECK(is_stub());
198 334235 : set_raw_type_feedback_info(Smi::FromInt(key));
199 : }
200 :
201 : byte* Code::instruction_start() const {
202 : return const_cast<byte*>(FIELD_ADDR_CONST(this, kHeaderSize));
203 : }
204 :
205 : byte* Code::instruction_end() const {
206 3482750 : return instruction_start() + instruction_size();
207 : }
208 :
209 : int Code::GetUnwindingInfoSizeOffset() const {
210 : DCHECK(has_unwinding_info());
211 124 : return RoundUp(kHeaderSize + instruction_size(), kInt64Size);
212 : }
213 :
214 : int Code::unwinding_info_size() const {
215 : DCHECK(has_unwinding_info());
216 : return static_cast<int>(
217 70 : READ_UINT64_FIELD(this, GetUnwindingInfoSizeOffset()));
218 : }
219 :
220 : void Code::set_unwinding_info_size(int value) {
221 : DCHECK(has_unwinding_info());
222 27 : WRITE_UINT64_FIELD(this, GetUnwindingInfoSizeOffset(), value);
223 : }
224 :
225 : byte* Code::unwinding_info_start() const {
226 : DCHECK(has_unwinding_info());
227 : return const_cast<byte*>(
228 : FIELD_ADDR_CONST(this, GetUnwindingInfoSizeOffset())) +
229 97 : kInt64Size;
230 : }
231 :
232 : byte* Code::unwinding_info_end() const {
233 : DCHECK(has_unwinding_info());
234 70 : return unwinding_info_start() + unwinding_info_size();
235 : }
236 :
237 254743844 : int Code::body_size() const {
238 : int unpadded_body_size =
239 : has_unwinding_info()
240 37 : ? static_cast<int>(unwinding_info_end() - instruction_start())
241 254743881 : : instruction_size();
242 254743844 : return RoundUp(unpadded_body_size, kObjectAlignment);
243 : }
244 :
245 3547 : int Code::SizeIncludingMetadata() const {
246 : int size = CodeSize();
247 3547 : size += relocation_info()->Size();
248 3547 : size += deoptimization_data()->Size();
249 3547 : size += handler_table()->Size();
250 3547 : return size;
251 : }
252 :
253 : ByteArray* Code::unchecked_relocation_info() const {
254 122896189 : return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
255 : }
256 :
257 : byte* Code::relocation_start() const {
258 : return unchecked_relocation_info()->GetDataStartAddress();
259 : }
260 :
261 : int Code::relocation_size() const {
262 : return unchecked_relocation_info()->length();
263 : }
264 :
265 : byte* Code::entry() const { return instruction_start(); }
266 :
267 : bool Code::contains(byte* inner_pointer) {
268 8765060 : return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
269 : }
270 :
271 : int Code::ExecutableSize() const {
272 : // Check that the assumptions about the layout of the code object holds.
273 : DCHECK_EQ(static_cast<int>(instruction_start() - address()),
274 : Code::kHeaderSize);
275 141350 : return instruction_size() + Code::kHeaderSize;
276 : }
277 :
278 254198899 : int Code::CodeSize() const { return SizeFor(body_size()); }
279 :
280 5929 : Code::Kind Code::kind() const {
281 92975240 : return KindField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
282 : }
283 :
284 : void Code::initialize_flags(Kind kind) {
285 1669351 : WRITE_UINT32_FIELD(this, kFlagsOffset, KindField::encode(kind));
286 : }
287 :
288 : void Code::set_kind(Kind kind) {
289 : STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
290 : uint32_t previous = READ_UINT32_FIELD(this, kFlagsOffset);
291 : uint32_t updated_value = KindField::update(previous, kind);
292 : WRITE_UINT32_FIELD(this, kFlagsOffset, updated_value);
293 : }
294 :
295 : // For initialization.
296 : void Code::set_raw_kind_specific_flags1(int value) {
297 1669351 : WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
298 : }
299 :
300 : void Code::set_raw_kind_specific_flags2(int value) {
301 1669351 : WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
302 : }
303 :
304 : inline bool Code::is_interpreter_trampoline_builtin() const {
305 : Builtins* builtins = GetIsolate()->builtins();
306 : bool is_interpreter_trampoline =
307 1675435 : (this == builtins->builtin(Builtins::kInterpreterEntryTrampoline) ||
308 32330846 : this == builtins->builtin(Builtins::kInterpreterEnterBytecodeAdvance) ||
309 : this == builtins->builtin(Builtins::kInterpreterEnterBytecodeDispatch));
310 : DCHECK_IMPLIES(is_interpreter_trampoline, !Builtins::IsLazy(builtin_index()));
311 : return is_interpreter_trampoline;
312 : }
313 :
314 : inline bool Code::checks_optimization_marker() const {
315 : Builtins* builtins = GetIsolate()->builtins();
316 : bool checks_marker =
317 : (this == builtins->builtin(Builtins::kCompileLazy) ||
318 : this == builtins->builtin(Builtins::kInterpreterEntryTrampoline) ||
319 : this == builtins->builtin(Builtins::kCheckOptimizationMarker));
320 : DCHECK_IMPLIES(checks_marker, !Builtins::IsLazy(builtin_index()));
321 : return checks_marker ||
322 : (kind() == OPTIMIZED_FUNCTION && marked_for_deoptimization());
323 : }
324 :
325 : inline bool Code::has_unwinding_info() const {
326 256413213 : return HasUnwindingInfoField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
327 : }
328 :
329 : inline void Code::set_has_unwinding_info(bool state) {
330 1669351 : uint32_t previous = READ_UINT32_FIELD(this, kFlagsOffset);
331 : uint32_t updated_value = HasUnwindingInfoField::update(previous, state);
332 1669351 : WRITE_UINT32_FIELD(this, kFlagsOffset, updated_value);
333 : }
334 :
335 : inline bool Code::has_tagged_params() const {
336 740253 : int flags = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
337 : return HasTaggedStackField::decode(flags);
338 : }
339 :
340 : inline void Code::set_has_tagged_params(bool value) {
341 2193207 : int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
342 : int updated = HasTaggedStackField::update(previous, value);
343 : WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
344 : }
345 :
346 20008 : inline bool Code::is_turbofanned() const {
347 : return IsTurbofannedField::decode(
348 5221181 : READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
349 : }
350 :
351 : inline void Code::set_is_turbofanned(bool value) {
352 1300545 : int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
353 : int updated = IsTurbofannedField::update(previous, value);
354 1300545 : WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
355 : }
356 :
357 : inline bool Code::can_have_weak_objects() const {
358 : DCHECK(kind() == OPTIMIZED_FUNCTION);
359 : return CanHaveWeakObjectsField::decode(
360 3337207 : READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
361 : }
362 :
363 : inline void Code::set_can_have_weak_objects(bool value) {
364 : DCHECK(kind() == OPTIMIZED_FUNCTION);
365 438218 : int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
366 : int updated = CanHaveWeakObjectsField::update(previous, value);
367 438218 : WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
368 : }
369 :
370 : inline bool Code::is_construct_stub() const {
371 : DCHECK(kind() == BUILTIN);
372 : return IsConstructStubField::decode(
373 143835 : READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
374 : }
375 :
376 : inline void Code::set_is_construct_stub(bool value) {
377 : DCHECK(kind() == BUILTIN);
378 13034369 : int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
379 : int updated = IsConstructStubField::update(previous, value);
380 13034369 : WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
381 : }
382 :
383 : inline bool Code::is_promise_rejection() const {
384 : DCHECK(kind() == BUILTIN);
385 : return IsPromiseRejectionField::decode(
386 1578 : READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
387 : }
388 :
389 : inline void Code::set_is_promise_rejection(bool value) {
390 : DCHECK(kind() == BUILTIN);
391 558 : int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
392 : int updated = IsPromiseRejectionField::update(previous, value);
393 558 : WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
394 : }
395 :
396 : inline bool Code::is_exception_caught() const {
397 : DCHECK(kind() == BUILTIN);
398 : return IsExceptionCaughtField::decode(
399 : READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
400 : }
401 :
402 : inline void Code::set_is_exception_caught(bool value) {
403 : DCHECK(kind() == BUILTIN);
404 31 : int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
405 : int updated = IsExceptionCaughtField::update(previous, value);
406 31 : WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
407 : }
408 :
409 : inline HandlerTable::CatchPrediction Code::GetBuiltinCatchPrediction() {
410 1578 : if (is_promise_rejection()) return HandlerTable::PROMISE;
411 9 : if (is_exception_caught()) return HandlerTable::CAUGHT;
412 : return HandlerTable::UNCAUGHT;
413 : }
414 :
415 : int Code::builtin_index() const {
416 12837902 : int index = READ_INT_FIELD(this, kBuiltinIndexOffset);
417 : DCHECK(index == -1 || Builtins::IsBuiltinId(index));
418 : return index;
419 : }
420 :
421 : void Code::set_builtin_index(int index) {
422 : DCHECK(index == -1 || Builtins::IsBuiltinId(index));
423 1712813 : WRITE_INT_FIELD(this, kBuiltinIndexOffset, index);
424 : }
425 :
426 : bool Code::is_builtin() const { return builtin_index() != -1; }
427 :
428 : unsigned Code::stack_slots() const {
429 : DCHECK(is_turbofanned());
430 : return StackSlotsField::decode(
431 4354843 : READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
432 : }
433 :
434 1300545 : void Code::set_stack_slots(unsigned slots) {
435 1300545 : CHECK(slots <= (1 << kStackSlotsBitCount));
436 : DCHECK(is_turbofanned());
437 1300545 : int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
438 : int updated = StackSlotsField::update(previous, slots);
439 1300545 : WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
440 1300545 : }
441 :
442 : unsigned Code::safepoint_table_offset() const {
443 : DCHECK(is_turbofanned());
444 : return SafepointTableOffsetField::decode(
445 2762125 : READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
446 : }
447 :
448 1300545 : void Code::set_safepoint_table_offset(unsigned offset) {
449 1300545 : CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
450 : DCHECK(is_turbofanned());
451 : DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
452 1300545 : int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
453 : int updated = SafepointTableOffsetField::update(previous, offset);
454 1300545 : WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
455 1300545 : }
456 :
457 : bool Code::marked_for_deoptimization() const {
458 : DCHECK(kind() == OPTIMIZED_FUNCTION);
459 : return MarkedForDeoptimizationField::decode(
460 4728649 : READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
461 : }
462 :
463 : void Code::set_marked_for_deoptimization(bool flag) {
464 : DCHECK(kind() == OPTIMIZED_FUNCTION);
465 : DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
466 697516 : int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
467 : int updated = MarkedForDeoptimizationField::update(previous, flag);
468 697516 : WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
469 : }
470 :
471 : bool Code::deopt_already_counted() const {
472 : DCHECK(kind() == OPTIMIZED_FUNCTION);
473 : return DeoptAlreadyCountedField::decode(
474 184293 : READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
475 : }
476 :
477 : void Code::set_deopt_already_counted(bool flag) {
478 : DCHECK(kind() == OPTIMIZED_FUNCTION);
479 : DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
480 164008 : int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
481 : int updated = DeoptAlreadyCountedField::update(previous, flag);
482 164008 : WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
483 : }
484 :
485 : bool Code::is_stub() const { return kind() == STUB; }
486 : bool Code::is_optimized_code() const { return kind() == OPTIMIZED_FUNCTION; }
487 : bool Code::is_wasm_code() const { return kind() == WASM_FUNCTION; }
488 :
489 : Address Code::constant_pool() {
490 : Address constant_pool = nullptr;
491 : if (FLAG_enable_embedded_constant_pool) {
492 : int offset = constant_pool_offset();
493 : if (offset < instruction_size()) {
494 : constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
495 : }
496 : }
497 : return constant_pool;
498 : }
499 :
500 195948464 : Code* Code::GetCodeFromTargetAddress(Address address) {
501 : HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
502 : // GetCodeFromTargetAddress might be called when marking objects during mark
503 : // sweep. reinterpret_cast is therefore used instead of the more appropriate
504 : // Code::cast. Code::cast does not work when the object's map is
505 : // marked.
506 : Code* result = reinterpret_cast<Code*>(code);
507 195948464 : return result;
508 : }
509 :
510 : Object* Code::GetObjectFromCodeEntry(Address code_entry) {
511 : return HeapObject::FromAddress(code_entry - Code::kHeaderSize);
512 : }
513 :
514 : Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
515 0 : return GetObjectFromCodeEntry(Memory::Address_at(location_of_address));
516 : }
517 :
518 : bool Code::CanContainWeakObjects() {
519 11239273 : return is_optimized_code() && can_have_weak_objects();
520 : }
521 :
522 7902066 : bool Code::IsWeakObject(Object* object) {
523 7902066 : return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
524 : }
525 :
526 6881525 : bool Code::IsWeakObjectInOptimizedCode(Object* object) {
527 6881525 : if (object->IsMap()) {
528 173836 : return Map::cast(object)->CanTransition();
529 : }
530 6707689 : if (object->IsCell()) {
531 : object = Cell::cast(object)->value();
532 6706003 : } else if (object->IsPropertyCell()) {
533 : object = PropertyCell::cast(object)->value();
534 : }
535 12500401 : if (object->IsJSReceiver() || object->IsContext()) {
536 : return true;
537 : }
538 5525352 : return false;
539 : }
540 :
541 58332 : byte BytecodeArray::get(int index) {
542 : DCHECK(index >= 0 && index < this->length());
543 237416747 : return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
544 : }
545 :
546 : void BytecodeArray::set(int index, byte value) {
547 : DCHECK(index >= 0 && index < this->length());
548 13390857 : WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
549 : }
550 :
551 : void BytecodeArray::set_frame_size(int frame_size) {
552 : DCHECK_GE(frame_size, 0);
553 : DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
554 2162227 : WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
555 : }
556 :
557 : int BytecodeArray::frame_size() const {
558 31356986 : return READ_INT_FIELD(this, kFrameSizeOffset);
559 : }
560 :
561 11215 : int BytecodeArray::register_count() const {
562 31346063 : return frame_size() / kPointerSize;
563 : }
564 :
565 : void BytecodeArray::set_parameter_count(int number_of_parameters) {
566 : DCHECK_GE(number_of_parameters, 0);
567 : // Parameter count is stored as the size on stack of the parameters to allow
568 : // it to be used directly by generated code.
569 2162227 : WRITE_INT_FIELD(this, kParameterSizeOffset,
570 2162227 : (number_of_parameters << kPointerSizeLog2));
571 : }
572 :
573 : interpreter::Register BytecodeArray::incoming_new_target_or_generator_register()
574 : const {
575 : int register_operand =
576 513251 : READ_INT_FIELD(this, kIncomingNewTargetOrGeneratorRegisterOffset);
577 513251 : if (register_operand == 0) {
578 : return interpreter::Register::invalid_value();
579 : } else {
580 : return interpreter::Register::FromOperand(register_operand);
581 : }
582 : }
583 :
584 : void BytecodeArray::set_incoming_new_target_or_generator_register(
585 : interpreter::Register incoming_new_target_or_generator_register) {
586 119512 : if (!incoming_new_target_or_generator_register.is_valid()) {
587 2161812 : WRITE_INT_FIELD(this, kIncomingNewTargetOrGeneratorRegisterOffset, 0);
588 : } else {
589 : DCHECK(incoming_new_target_or_generator_register.index() <
590 : register_count());
591 : DCHECK_NE(0, incoming_new_target_or_generator_register.ToOperand());
592 : WRITE_INT_FIELD(this, kIncomingNewTargetOrGeneratorRegisterOffset,
593 111586 : incoming_new_target_or_generator_register.ToOperand());
594 : }
595 : }
596 :
597 : int BytecodeArray::interrupt_budget() const {
598 8341 : return READ_INT_FIELD(this, kInterruptBudgetOffset);
599 : }
600 :
601 : void BytecodeArray::set_interrupt_budget(int interrupt_budget) {
602 : DCHECK_GE(interrupt_budget, 0);
603 2162227 : WRITE_INT_FIELD(this, kInterruptBudgetOffset, interrupt_budget);
604 : }
605 :
606 : int BytecodeArray::osr_loop_nesting_level() const {
607 17710 : return READ_INT8_FIELD(this, kOSRNestingLevelOffset);
608 : }
609 :
610 : void BytecodeArray::set_osr_loop_nesting_level(int depth) {
611 : DCHECK(0 <= depth && depth <= AbstractCode::kMaxLoopNestingMarker);
612 : STATIC_ASSERT(AbstractCode::kMaxLoopNestingMarker < kMaxInt8);
613 2177634 : WRITE_INT8_FIELD(this, kOSRNestingLevelOffset, depth);
614 : }
615 :
616 : BytecodeArray::Age BytecodeArray::bytecode_age() const {
617 : // Bytecode is aged by the concurrent marker.
618 3666027 : return static_cast<Age>(RELAXED_READ_INT8_FIELD(this, kBytecodeAgeOffset));
619 : }
620 :
621 : void BytecodeArray::set_bytecode_age(BytecodeArray::Age age) {
622 : DCHECK_GE(age, kFirstBytecodeAge);
623 : DCHECK_LE(age, kLastBytecodeAge);
624 : STATIC_ASSERT(kLastBytecodeAge <= kMaxInt8);
625 : // Bytecode is aged by the concurrent marker.
626 2162232 : RELAXED_WRITE_INT8_FIELD(this, kBytecodeAgeOffset, static_cast<int8_t>(age));
627 : }
628 :
629 : int BytecodeArray::parameter_count() const {
630 : // Parameter count is stored as the size on stack of the parameters to allow
631 : // it to be used directly by generated code.
632 1692430 : return READ_INT_FIELD(this, kParameterSizeOffset) >> kPointerSizeLog2;
633 : }
634 :
635 16107079 : ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
636 46324933 : ACCESSORS(BytecodeArray, handler_table, FixedArray, kHandlerTableOffset)
637 27525776 : ACCESSORS(BytecodeArray, source_position_table, Object,
638 : kSourcePositionTableOffset)
639 :
640 2153885 : void BytecodeArray::clear_padding() {
641 2153885 : int data_size = kHeaderSize + length();
642 2153885 : memset(address() + data_size, 0, SizeFor(length()) - data_size);
643 2153885 : }
644 :
645 : Address BytecodeArray::GetFirstBytecodeAddress() {
646 : return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
647 : }
648 :
649 5809125 : ByteArray* BytecodeArray::SourcePositionTable() {
650 : Object* maybe_table = source_position_table();
651 5809126 : if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
652 : DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
653 : return SourcePositionTableWithFrameCache::cast(maybe_table)
654 190491 : ->source_position_table();
655 : }
656 :
657 : int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
658 :
659 2131308 : int BytecodeArray::SizeIncludingMetadata() {
660 : int size = BytecodeArraySize();
661 2131308 : size += constant_pool()->Size();
662 2131308 : size += handler_table()->Size();
663 4262616 : size += SourcePositionTable()->Size();
664 2131308 : return size;
665 : }
666 :
667 : int HandlerTable::GetRangeStart(int index) const {
668 989265 : return Smi::ToInt(get(index * kRangeEntrySize + kRangeStartIndex));
669 : }
670 :
671 27171 : int HandlerTable::GetRangeEnd(int index) const {
672 54342 : return Smi::ToInt(get(index * kRangeEntrySize + kRangeEndIndex));
673 : }
674 :
675 27171 : int HandlerTable::GetRangeHandler(int index) const {
676 : return HandlerOffsetField::decode(
677 81513 : Smi::ToInt(get(index * kRangeEntrySize + kRangeHandlerIndex)));
678 : }
679 :
680 26649 : int HandlerTable::GetRangeData(int index) const {
681 53298 : return Smi::ToInt(get(index * kRangeEntrySize + kRangeDataIndex));
682 : }
683 :
684 138435 : void HandlerTable::SetRangeStart(int index, int value) {
685 138435 : set(index * kRangeEntrySize + kRangeStartIndex, Smi::FromInt(value));
686 138435 : }
687 :
688 138435 : void HandlerTable::SetRangeEnd(int index, int value) {
689 138435 : set(index * kRangeEntrySize + kRangeEndIndex, Smi::FromInt(value));
690 138435 : }
691 :
692 138435 : void HandlerTable::SetRangeHandler(int index, int offset,
693 : CatchPrediction prediction) {
694 138435 : int value = HandlerOffsetField::encode(offset) |
695 138435 : HandlerPredictionField::encode(prediction);
696 138435 : set(index * kRangeEntrySize + kRangeHandlerIndex, Smi::FromInt(value));
697 138435 : }
698 :
699 138435 : void HandlerTable::SetRangeData(int index, int value) {
700 138435 : set(index * kRangeEntrySize + kRangeDataIndex, Smi::FromInt(value));
701 138435 : }
702 :
703 155205 : void HandlerTable::SetReturnOffset(int index, int value) {
704 155205 : set(index * kReturnEntrySize + kReturnOffsetIndex, Smi::FromInt(value));
705 155205 : }
706 :
707 155205 : void HandlerTable::SetReturnHandler(int index, int offset) {
708 155205 : int value = HandlerOffsetField::encode(offset);
709 155205 : set(index * kReturnEntrySize + kReturnHandlerIndex, Smi::FromInt(value));
710 155205 : }
711 :
712 : int HandlerTable::NumberOfRangeEntries() const {
713 16080210 : return length() / kRangeEntrySize;
714 : }
715 :
716 148979 : BailoutId DeoptimizationData::BytecodeOffset(int i) {
717 148979 : return BailoutId(BytecodeOffsetRaw(i)->value());
718 : }
719 :
720 : void DeoptimizationData::SetBytecodeOffset(int i, BailoutId value) {
721 3077632 : SetBytecodeOffsetRaw(i, Smi::FromInt(value.ToInt()));
722 : }
723 :
724 : int DeoptimizationData::DeoptCount() {
725 30353 : return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
726 : }
727 :
728 : } // namespace internal
729 : } // namespace v8
730 :
731 : #include "src/objects/object-macros-undef.h"
732 :
733 : #endif // V8_OBJECTS_CODE_INL_H_
|