LCOV - code coverage report
Current view: top level - src/interpreter - interpreter-assembler.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 602 668 90.1 %
Date: 2017-10-20 Functions: 88 97 90.7 %

          Line data    Source code
       1             : // Copyright 2015 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/interpreter/interpreter-assembler.h"
       6             : 
       7             : #include <limits>
       8             : #include <ostream>
       9             : 
      10             : #include "src/code-factory.h"
      11             : #include "src/frames.h"
      12             : #include "src/interface-descriptors.h"
      13             : #include "src/interpreter/bytecodes.h"
      14             : #include "src/interpreter/interpreter.h"
      15             : #include "src/machine-type.h"
      16             : #include "src/macro-assembler.h"
      17             : #include "src/objects-inl.h"
      18             : #include "src/zone/zone.h"
      19             : 
      20             : namespace v8 {
      21             : namespace internal {
      22             : namespace interpreter {
      23             : 
      24             : using compiler::CodeAssemblerState;
      25             : using compiler::Node;
      26             : 
      27       15786 : InterpreterAssembler::InterpreterAssembler(CodeAssemblerState* state,
      28             :                                            Bytecode bytecode,
      29             :                                            OperandScale operand_scale)
      30             :     : CodeStubAssembler(state),
      31             :       bytecode_(bytecode),
      32             :       operand_scale_(operand_scale),
      33             :       VARIABLE_CONSTRUCTOR(interpreted_frame_pointer_,
      34             :                            MachineType::PointerRepresentation()),
      35             :       VARIABLE_CONSTRUCTOR(
      36             :           bytecode_array_, MachineRepresentation::kTagged,
      37             :           Parameter(InterpreterDispatchDescriptor::kBytecodeArray)),
      38             :       VARIABLE_CONSTRUCTOR(
      39             :           bytecode_offset_, MachineType::PointerRepresentation(),
      40             :           Parameter(InterpreterDispatchDescriptor::kBytecodeOffset)),
      41             :       VARIABLE_CONSTRUCTOR(
      42             :           dispatch_table_, MachineType::PointerRepresentation(),
      43             :           Parameter(InterpreterDispatchDescriptor::kDispatchTable)),
      44             :       VARIABLE_CONSTRUCTOR(
      45             :           accumulator_, MachineRepresentation::kTagged,
      46             :           Parameter(InterpreterDispatchDescriptor::kAccumulator)),
      47             :       accumulator_use_(AccumulatorUse::kNone),
      48             :       made_call_(false),
      49             :       reloaded_frame_ptr_(false),
      50             :       bytecode_array_valid_(true),
      51             :       disable_stack_check_across_call_(false),
      52       15786 :       stack_pointer_before_call_(nullptr) {
      53             : #ifdef V8_TRACE_IGNITION
      54             :   TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
      55             : #endif
      56       34805 :   RegisterCallGenerationCallbacks([this] { CallPrologue(); },
      57       82163 :                                   [this] { CallEpilogue(); });
      58             : 
      59             :   // Save the bytecode offset immediately if bytecode will make a call along the
      60             :   // critical path, or it is a return bytecode.
      61       29486 :   if (Bytecodes::MakesCallAlongCriticalPath(bytecode) ||
      62       13700 :       bytecode_ == Bytecode::kReturn) {
      63        2128 :     SaveBytecodeOffset();
      64             :   }
      65       15786 : }
      66             : 
      67       31572 : InterpreterAssembler::~InterpreterAssembler() {
      68             :   // If the following check fails the handler does not use the
      69             :   // accumulator in the way described in the bytecode definitions in
      70             :   // bytecodes.h.
      71             :   DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
      72       15786 :   UnregisterCallGenerationCallbacks();
      73       15786 : }
      74             : 
      75       78563 : Node* InterpreterAssembler::GetInterpretedFramePointer() {
      76       78563 :   if (!interpreted_frame_pointer_.IsBound()) {
      77       13605 :     interpreted_frame_pointer_.Bind(LoadParentFramePointer());
      78       71840 :   } else if (Bytecodes::MakesCallAlongCriticalPath(bytecode_) && made_call_ &&
      79        6882 :              !reloaded_frame_ptr_) {
      80        1302 :     interpreted_frame_pointer_.Bind(LoadParentFramePointer());
      81        1302 :     reloaded_frame_ptr_ = true;
      82             :   }
      83       78563 :   return interpreted_frame_pointer_.value();
      84             : }
      85             : 
      86      113001 : Node* InterpreterAssembler::BytecodeOffset() {
      87      115667 :   if (Bytecodes::MakesCallAlongCriticalPath(bytecode_) && made_call_ &&
      88        2666 :       (bytecode_offset_.value() ==
      89        2666 :        Parameter(InterpreterDispatchDescriptor::kBytecodeOffset))) {
      90        1302 :     bytecode_offset_.Bind(ReloadBytecodeOffset());
      91             :   }
      92      113001 :   return bytecode_offset_.value();
      93             : }
      94             : 
      95        2604 : Node* InterpreterAssembler::ReloadBytecodeOffset() {
      96        1302 :   Node* offset = LoadAndUntagRegister(Register::bytecode_offset());
      97        1302 :   if (operand_scale() != OperandScale::kSingle) {
      98             :     // Add one to the offset such that it points to the actual bytecode rather
      99             :     // than the Wide / ExtraWide prefix bytecode.
     100        2604 :     offset = IntPtrAdd(offset, IntPtrConstant(1));
     101             :   }
     102        1302 :   return offset;
     103             : }
     104             : 
     105       66932 : void InterpreterAssembler::SaveBytecodeOffset() {
     106       33466 :   Node* offset = BytecodeOffset();
     107       33466 :   if (operand_scale() != OperandScale::kSingle) {
     108             :     // Subtract one from the offset such that it points to the Wide / ExtraWide
     109             :     // prefix bytecode.
     110       85480 :     offset = IntPtrSub(BytecodeOffset(), IntPtrConstant(1));
     111             :   }
     112       33466 :   StoreAndTagRegister(offset, Register::bytecode_offset());
     113       33466 : }
     114             : 
     115       68243 : Node* InterpreterAssembler::BytecodeArrayTaggedPointer() {
     116             :   // Force a re-load of the bytecode array after every call in case the debugger
     117             :   // has been activated.
     118       68243 :   if (!bytecode_array_valid_) {
     119       10974 :     bytecode_array_.Bind(LoadRegister(Register::bytecode_array()));
     120       10974 :     bytecode_array_valid_ = true;
     121             :   }
     122       68243 :   return bytecode_array_.value();
     123             : }
     124             : 
     125       32581 : Node* InterpreterAssembler::DispatchTableRawPointer() {
     126       34255 :   if (Bytecodes::MakesCallAlongCriticalPath(bytecode_) && made_call_ &&
     127        1674 :       (dispatch_table_.value() ==
     128        1674 :        Parameter(InterpreterDispatchDescriptor::kDispatchTable))) {
     129             :     dispatch_table_.Bind(ExternalConstant(
     130        1674 :         ExternalReference::interpreter_dispatch_table_address(isolate())));
     131             :   }
     132       32581 :   return dispatch_table_.value();
     133             : }
     134             : 
     135           0 : Node* InterpreterAssembler::GetAccumulatorUnchecked() {
     136       27122 :   return accumulator_.value();
     137             : }
     138             : 
     139        9638 : Node* InterpreterAssembler::GetAccumulator() {
     140             :   DCHECK(Bytecodes::ReadsAccumulator(bytecode_));
     141       21012 :   accumulator_use_ = accumulator_use_ | AccumulatorUse::kRead;
     142        9638 :   return GetAccumulatorUnchecked();
     143             : }
     144             : 
     145       10656 : void InterpreterAssembler::SetAccumulator(Node* value) {
     146             :   DCHECK(Bytecodes::WritesAccumulator(bytecode_));
     147       21684 :   accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
     148       10842 :   accumulator_.Bind(value);
     149       10656 : }
     150             : 
     151       10898 : Node* InterpreterAssembler::GetContext() {
     152       12386 :   return LoadRegister(Register::current_context());
     153             : }
     154             : 
     155         186 : void InterpreterAssembler::SetContext(Node* value) {
     156         186 :   StoreRegister(value, Register::current_context());
     157         186 : }
     158             : 
     159         558 : Node* InterpreterAssembler::GetContextAtDepth(Node* context, Node* depth) {
     160         558 :   Variable cur_context(this, MachineRepresentation::kTaggedPointer);
     161         558 :   cur_context.Bind(context);
     162             : 
     163        1116 :   Variable cur_depth(this, MachineRepresentation::kWord32);
     164         558 :   cur_depth.Bind(depth);
     165             : 
     166         558 :   Label context_found(this);
     167             : 
     168         558 :   Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context};
     169        1116 :   Label context_search(this, 2, context_search_loop_variables);
     170             : 
     171             :   // Fast path if the depth is 0.
     172        1674 :   Branch(Word32Equal(depth, Int32Constant(0)), &context_found, &context_search);
     173             : 
     174             :   // Loop until the depth is 0.
     175         558 :   BIND(&context_search);
     176             :   {
     177        2232 :     cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1)));
     178             :     cur_context.Bind(
     179        1674 :         LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
     180             : 
     181        1674 :     Branch(Word32Equal(cur_depth.value(), Int32Constant(0)), &context_found,
     182        1116 :            &context_search);
     183             :   }
     184             : 
     185         558 :   BIND(&context_found);
     186        1116 :   return cur_context.value();
     187             : }
     188             : 
     189         372 : void InterpreterAssembler::GotoIfHasContextExtensionUpToDepth(Node* context,
     190             :                                                               Node* depth,
     191             :                                                               Label* target) {
     192         372 :   Variable cur_context(this, MachineRepresentation::kTaggedPointer);
     193         372 :   cur_context.Bind(context);
     194             : 
     195         744 :   Variable cur_depth(this, MachineRepresentation::kWord32);
     196         372 :   cur_depth.Bind(depth);
     197             : 
     198         372 :   Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context};
     199         744 :   Label context_search(this, 2, context_search_loop_variables);
     200             : 
     201             :   // Loop until the depth is 0.
     202         372 :   Goto(&context_search);
     203         372 :   BIND(&context_search);
     204             :   {
     205             :     // TODO(leszeks): We only need to do this check if the context had a sloppy
     206             :     // eval, we could pass in a context chain bitmask to figure out which
     207             :     // contexts actually need to be checked.
     208             : 
     209             :     Node* extension_slot =
     210        1116 :         LoadContextElement(cur_context.value(), Context::EXTENSION_INDEX);
     211             : 
     212             :     // Jump to the target if the extension slot is not a hole.
     213         744 :     GotoIf(WordNotEqual(extension_slot, TheHoleConstant()), target);
     214             : 
     215        1488 :     cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1)));
     216             :     cur_context.Bind(
     217        1116 :         LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
     218             : 
     219        1116 :     GotoIf(Word32NotEqual(cur_depth.value(), Int32Constant(0)),
     220         744 :            &context_search);
     221         372 :   }
     222         372 : }
     223             : 
     224        1102 : Node* InterpreterAssembler::RegisterLocation(Node* reg_index) {
     225             :   return IntPtrAdd(GetInterpretedFramePointer(),
     226        3306 :                    RegisterFrameOffset(reg_index));
     227             : }
     228             : 
     229           0 : Node* InterpreterAssembler::RegisterFrameOffset(Node* index) {
     230       13474 :   return TimesPointerSize(index);
     231             : }
     232             : 
     233       30135 : Node* InterpreterAssembler::LoadRegister(Register reg) {
     234             :   return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(),
     235       60270 :               IntPtrConstant(reg.ToOperand() << kPointerSizeLog2));
     236             : }
     237             : 
     238        9937 : Node* InterpreterAssembler::LoadRegister(Node* reg_index) {
     239             :   return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(),
     240        9937 :               RegisterFrameOffset(reg_index));
     241             : }
     242             : 
     243        1302 : Node* InterpreterAssembler::LoadAndUntagRegister(Register reg) {
     244             :   return LoadAndUntagSmi(GetInterpretedFramePointer(), reg.ToOperand()
     245        2604 :                                                            << kPointerSizeLog2);
     246             : }
     247             : 
     248         186 : Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) {
     249             :   return StoreNoWriteBarrier(
     250             :       MachineRepresentation::kTagged, GetInterpretedFramePointer(),
     251         372 :       IntPtrConstant(reg.ToOperand() << kPointerSizeLog2), value);
     252             : }
     253             : 
     254        2435 : Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) {
     255             :   return StoreNoWriteBarrier(MachineRepresentation::kTagged,
     256             :                              GetInterpretedFramePointer(),
     257        2435 :                              RegisterFrameOffset(reg_index), value);
     258             : }
     259             : 
     260       33466 : Node* InterpreterAssembler::StoreAndTagRegister(compiler::Node* value,
     261             :                                                 Register reg) {
     262       33466 :   int offset = reg.ToOperand() << kPointerSizeLog2;
     263       33466 :   return StoreAndTagSmi(GetInterpretedFramePointer(), offset, value);
     264             : }
     265             : 
     266        2418 : Node* InterpreterAssembler::NextRegister(Node* reg_index) {
     267             :   // Register indexes are negative, so the next index is minus one.
     268        7254 :   return IntPtrAdd(reg_index, IntPtrConstant(-1));
     269             : }
     270             : 
     271        9625 : Node* InterpreterAssembler::OperandOffset(int operand_index) {
     272             :   return IntPtrConstant(
     273       19250 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()));
     274             : }
     275             : 
     276        5566 : Node* InterpreterAssembler::BytecodeOperandUnsignedByte(int operand_index) {
     277             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     278             :   DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
     279             :                                     bytecode_, operand_index, operand_scale()));
     280        5566 :   Node* operand_offset = OperandOffset(operand_index);
     281             :   return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
     282       16698 :               IntPtrAdd(BytecodeOffset(), operand_offset));
     283             : }
     284             : 
     285        4059 : Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) {
     286             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     287             :   DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
     288             :                                     bytecode_, operand_index, operand_scale()));
     289        4059 :   Node* operand_offset = OperandOffset(operand_index);
     290             :   return Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
     291       12177 :               IntPtrAdd(BytecodeOffset(), operand_offset));
     292             : }
     293             : 
     294           0 : compiler::Node* InterpreterAssembler::BytecodeOperandReadUnaligned(
     295             :     int relative_offset, MachineType result_type) {
     296             :   static const int kMaxCount = 4;
     297             :   DCHECK(!TargetSupportsUnalignedAccess());
     298             : 
     299             :   int count;
     300           0 :   switch (result_type.representation()) {
     301             :     case MachineRepresentation::kWord16:
     302             :       count = 2;
     303             :       break;
     304             :     case MachineRepresentation::kWord32:
     305             :       count = 4;
     306           0 :       break;
     307             :     default:
     308           0 :       UNREACHABLE();
     309             :       break;
     310             :   }
     311             :   MachineType msb_type =
     312           0 :       result_type.IsSigned() ? MachineType::Int8() : MachineType::Uint8();
     313             : 
     314             : #if V8_TARGET_LITTLE_ENDIAN
     315             :   const int kStep = -1;
     316           0 :   int msb_offset = count - 1;
     317             : #elif V8_TARGET_BIG_ENDIAN
     318             :   const int kStep = 1;
     319             :   int msb_offset = 0;
     320             : #else
     321             : #error "Unknown Architecture"
     322             : #endif
     323             : 
     324             :   // Read the most signicant bytecode into bytes[0] and then in order
     325             :   // down to least significant in bytes[count - 1].
     326             :   DCHECK_LE(count, kMaxCount);
     327             :   compiler::Node* bytes[kMaxCount];
     328           0 :   for (int i = 0; i < count; i++) {
     329           0 :     MachineType machine_type = (i == 0) ? msb_type : MachineType::Uint8();
     330           0 :     Node* offset = IntPtrConstant(relative_offset + msb_offset + i * kStep);
     331           0 :     Node* array_offset = IntPtrAdd(BytecodeOffset(), offset);
     332           0 :     bytes[i] = Load(machine_type, BytecodeArrayTaggedPointer(), array_offset);
     333             :   }
     334             : 
     335             :   // Pack LSB to MSB.
     336           0 :   Node* result = bytes[--count];
     337           0 :   for (int i = 1; --count >= 0; i++) {
     338           0 :     Node* shift = Int32Constant(i * kBitsPerByte);
     339           0 :     Node* value = Word32Shl(bytes[count], shift);
     340           0 :     result = Word32Or(value, result);
     341             :   }
     342           0 :   return result;
     343             : }
     344             : 
     345        5058 : Node* InterpreterAssembler::BytecodeOperandUnsignedShort(int operand_index) {
     346             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     347             :   DCHECK_EQ(
     348             :       OperandSize::kShort,
     349             :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
     350             :   int operand_offset =
     351        5058 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
     352             :   if (TargetSupportsUnalignedAccess()) {
     353             :     return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(),
     354       20232 :                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
     355             :   } else {
     356             :     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint16());
     357             :   }
     358             : }
     359             : 
     360        3377 : Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) {
     361             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     362             :   DCHECK_EQ(
     363             :       OperandSize::kShort,
     364             :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
     365             :   int operand_offset =
     366        3377 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
     367             :   if (TargetSupportsUnalignedAccess()) {
     368             :     return Load(MachineType::Int16(), BytecodeArrayTaggedPointer(),
     369       13508 :                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
     370             :   } else {
     371             :     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16());
     372             :   }
     373             : }
     374             : 
     375        4860 : Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) {
     376             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     377             :   DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
     378             :                                     bytecode_, operand_index, operand_scale()));
     379             :   int operand_offset =
     380        4860 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
     381             :   if (TargetSupportsUnalignedAccess()) {
     382             :     return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(),
     383       19440 :                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
     384             :   } else {
     385             :     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32());
     386             :   }
     387             : }
     388             : 
     389        3377 : Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) {
     390             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     391             :   DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
     392             :                                     bytecode_, operand_index, operand_scale()));
     393             :   int operand_offset =
     394        3377 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
     395             :   if (TargetSupportsUnalignedAccess()) {
     396             :     return Load(MachineType::Int32(), BytecodeArrayTaggedPointer(),
     397       13508 :                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
     398             :   } else {
     399             :     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32());
     400             :   }
     401             : }
     402             : 
     403       10813 : Node* InterpreterAssembler::BytecodeSignedOperand(int operand_index,
     404             :                                                   OperandSize operand_size) {
     405             :   DCHECK(!Bytecodes::IsUnsignedOperandType(
     406             :       Bytecodes::GetOperandType(bytecode_, operand_index)));
     407       10813 :   switch (operand_size) {
     408             :     case OperandSize::kByte:
     409        4059 :       return BytecodeOperandSignedByte(operand_index);
     410             :     case OperandSize::kShort:
     411        3377 :       return BytecodeOperandSignedShort(operand_index);
     412             :     case OperandSize::kQuad:
     413        3377 :       return BytecodeOperandSignedQuad(operand_index);
     414             :     case OperandSize::kNone:
     415           0 :       UNREACHABLE();
     416             :   }
     417             :   return nullptr;
     418             : }
     419             : 
     420       15484 : Node* InterpreterAssembler::BytecodeUnsignedOperand(int operand_index,
     421             :                                                     OperandSize operand_size) {
     422             :   DCHECK(Bytecodes::IsUnsignedOperandType(
     423             :       Bytecodes::GetOperandType(bytecode_, operand_index)));
     424       15484 :   switch (operand_size) {
     425             :     case OperandSize::kByte:
     426        5566 :       return BytecodeOperandUnsignedByte(operand_index);
     427             :     case OperandSize::kShort:
     428        5058 :       return BytecodeOperandUnsignedShort(operand_index);
     429             :     case OperandSize::kQuad:
     430        4860 :       return BytecodeOperandUnsignedQuad(operand_index);
     431             :     case OperandSize::kNone:
     432           0 :       UNREACHABLE();
     433             :   }
     434             :   return nullptr;
     435             : }
     436             : 
     437        1152 : Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) {
     438             :   DCHECK_EQ(OperandType::kRegCount,
     439             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     440             :   OperandSize operand_size =
     441        1152 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     442        1152 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     443             : }
     444             : 
     445         610 : Node* InterpreterAssembler::BytecodeOperandFlag(int operand_index) {
     446             :   DCHECK_EQ(OperandType::kFlag8,
     447             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     448             :   OperandSize operand_size =
     449         610 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     450             :   DCHECK_EQ(operand_size, OperandSize::kByte);
     451         610 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     452             : }
     453             : 
     454        2211 : Node* InterpreterAssembler::BytecodeOperandUImm(int operand_index) {
     455             :   DCHECK_EQ(OperandType::kUImm,
     456             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     457             :   OperandSize operand_size =
     458        2211 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     459        2211 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     460             : }
     461             : 
     462        1116 : Node* InterpreterAssembler::BytecodeOperandUImmWord(int operand_index) {
     463        3348 :   return ChangeUint32ToWord(BytecodeOperandUImm(operand_index));
     464             : }
     465             : 
     466          93 : Node* InterpreterAssembler::BytecodeOperandUImmSmi(int operand_index) {
     467         279 :   return SmiFromWord32(BytecodeOperandUImm(operand_index));
     468             : }
     469             : 
     470        1632 : Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) {
     471             :   DCHECK_EQ(OperandType::kImm,
     472             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     473             :   OperandSize operand_size =
     474        1632 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     475        1632 :   return BytecodeSignedOperand(operand_index, operand_size);
     476             : }
     477             : 
     478         279 : Node* InterpreterAssembler::BytecodeOperandImmIntPtr(int operand_index) {
     479         837 :   return ChangeInt32ToIntPtr(BytecodeOperandImm(operand_index));
     480             : }
     481             : 
     482        1209 : Node* InterpreterAssembler::BytecodeOperandImmSmi(int operand_index) {
     483        3627 :   return SmiFromWord32(BytecodeOperandImm(operand_index));
     484             : }
     485             : 
     486       11121 : Node* InterpreterAssembler::BytecodeOperandIdxInt32(int operand_index) {
     487             :   DCHECK_EQ(OperandType::kIdx,
     488             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     489             :   OperandSize operand_size =
     490       11121 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     491       11121 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     492             : }
     493             : 
     494       11121 : Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
     495       33363 :   return ChangeUint32ToWord(BytecodeOperandIdxInt32(operand_index));
     496             : }
     497             : 
     498          93 : Node* InterpreterAssembler::BytecodeOperandIdxSmi(int operand_index) {
     499         279 :   return SmiTag(BytecodeOperandIdx(operand_index));
     500             : }
     501             : 
     502        9181 : Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) {
     503             :   DCHECK(Bytecodes::IsRegisterOperandType(
     504             :       Bytecodes::GetOperandType(bytecode_, operand_index)));
     505             :   OperandSize operand_size =
     506        9181 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     507             :   return ChangeInt32ToIntPtr(
     508       27543 :       BytecodeSignedOperand(operand_index, operand_size));
     509             : }
     510             : 
     511         198 : Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) {
     512             :   DCHECK_EQ(OperandType::kRuntimeId,
     513             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     514             :   OperandSize operand_size =
     515         198 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     516             :   DCHECK_EQ(operand_size, OperandSize::kShort);
     517         198 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     518             : }
     519             : 
     520          96 : Node* InterpreterAssembler::BytecodeOperandNativeContextIndex(
     521          96 :     int operand_index) {
     522             :   DCHECK_EQ(OperandType::kNativeContextIndex,
     523             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     524             :   OperandSize operand_size =
     525          96 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     526             :   return ChangeUint32ToWord(
     527         288 :       BytecodeUnsignedOperand(operand_index, operand_size));
     528             : }
     529             : 
     530          96 : Node* InterpreterAssembler::BytecodeOperandIntrinsicId(int operand_index) {
     531             :   DCHECK_EQ(OperandType::kIntrinsicId,
     532             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     533             :   OperandSize operand_size =
     534          96 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     535             :   DCHECK_EQ(operand_size, OperandSize::kByte);
     536          96 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     537             : }
     538             : 
     539        4250 : Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
     540             :   Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(),
     541        4250 :                                         BytecodeArray::kConstantPoolOffset);
     542        4250 :   return LoadFixedArrayElement(constant_pool, index);
     543             : }
     544             : 
     545        1023 : Node* InterpreterAssembler::LoadAndUntagConstantPoolEntry(Node* index) {
     546        3069 :   return SmiUntag(LoadConstantPoolEntry(index));
     547             : }
     548             : 
     549        6403 : Node* InterpreterAssembler::LoadFeedbackVector() {
     550        6403 :   Node* function = LoadRegister(Register::function_closure());
     551        6403 :   Node* cell = LoadObjectField(function, JSFunction::kFeedbackVectorOffset);
     552             :   Node* vector = LoadObjectField(cell, Cell::kValueOffset);
     553        6403 :   return vector;
     554             : }
     555             : 
     556       34805 : void InterpreterAssembler::CallPrologue() {
     557       34805 :   if (!Bytecodes::MakesCallAlongCriticalPath(bytecode_)) {
     558             :     // Bytecodes that make a call along the critical path save the bytecode
     559             :     // offset in the bytecode handler's prologue. For other bytecodes, if
     560             :     // there are multiple calls in the bytecode handler, you need to spill
     561             :     // before each of them, unless SaveBytecodeOffset has explicitly been called
     562             :     // in a path that dominates _all_ of those calls (which we don't track).
     563       31338 :     SaveBytecodeOffset();
     564             :   }
     565             : 
     566       34805 :   if (FLAG_debug_code && !disable_stack_check_across_call_) {
     567             :     DCHECK_NULL(stack_pointer_before_call_);
     568           0 :     stack_pointer_before_call_ = LoadStackPointer();
     569             :   }
     570       34805 :   bytecode_array_valid_ = false;
     571       34805 :   made_call_ = true;
     572       34805 : }
     573             : 
     574       34805 : void InterpreterAssembler::CallEpilogue() {
     575       34805 :   if (FLAG_debug_code && !disable_stack_check_across_call_) {
     576           0 :     Node* stack_pointer_after_call = LoadStackPointer();
     577           0 :     Node* stack_pointer_before_call = stack_pointer_before_call_;
     578           0 :     stack_pointer_before_call_ = nullptr;
     579             :     AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call,
     580           0 :                         kUnexpectedStackPointer);
     581             :   }
     582       34805 : }
     583             : 
     584        1116 : Node* InterpreterAssembler::IncrementCallCount(Node* feedback_vector,
     585             :                                                Node* slot_id) {
     586        1116 :   Comment("increment call count");
     587             :   Node* call_count =
     588        1116 :       LoadFeedbackVectorSlot(feedback_vector, slot_id, kPointerSize);
     589        3348 :   Node* new_count = SmiAdd(call_count, SmiConstant(1));
     590             :   // Count is Smi, so we don't need a write barrier.
     591             :   return StoreFeedbackVectorSlot(feedback_vector, slot_id, new_count,
     592        1116 :                                  SKIP_WRITE_BARRIER, kPointerSize);
     593             : }
     594             : 
     595         930 : void InterpreterAssembler::CollectCallFeedback(Node* target, Node* context,
     596             :                                                Node* feedback_vector,
     597             :                                                Node* slot_id) {
     598        1860 :   Label extra_checks(this, Label::kDeferred), done(this);
     599             : 
     600             :   // Increment the call count.
     601         930 :   IncrementCallCount(feedback_vector, slot_id);
     602             : 
     603             :   // Check if we have monomorphic {target} feedback already.
     604         930 :   Node* feedback_element = LoadFeedbackVectorSlot(feedback_vector, slot_id);
     605         930 :   Node* feedback_value = LoadWeakCellValueUnchecked(feedback_element);
     606        1860 :   Branch(WordEqual(target, feedback_value), &done, &extra_checks);
     607             : 
     608         930 :   BIND(&extra_checks);
     609             :   {
     610         930 :     Label check_initialized(this), initialize(this), mark_megamorphic(this);
     611             : 
     612             :     // Check if it is a megamorphic {target}.
     613         930 :     Comment("check if megamorphic");
     614             :     Node* is_megamorphic =
     615             :         WordEqual(feedback_element,
     616         930 :                   HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())));
     617         930 :     GotoIf(is_megamorphic, &done);
     618             : 
     619         930 :     Comment("check if weak cell");
     620             :     Node* is_weak_cell = WordEqual(LoadMap(feedback_element),
     621        2790 :                                    LoadRoot(Heap::kWeakCellMapRootIndex));
     622         930 :     GotoIfNot(is_weak_cell, &check_initialized);
     623             : 
     624             :     // If the weak cell is cleared, we have a new chance to become monomorphic.
     625         930 :     Comment("check if weak cell is cleared");
     626        1860 :     Node* is_smi = TaggedIsSmi(feedback_value);
     627         930 :     Branch(is_smi, &initialize, &mark_megamorphic);
     628             : 
     629         930 :     BIND(&check_initialized);
     630             :     {
     631             :       // Check if it is uninitialized.
     632         930 :       Comment("check if uninitialized");
     633             :       Node* is_uninitialized = WordEqual(
     634         930 :           feedback_element, LoadRoot(Heap::kuninitialized_symbolRootIndex));
     635         930 :       Branch(is_uninitialized, &initialize, &mark_megamorphic);
     636             :     }
     637             : 
     638         930 :     BIND(&initialize);
     639             :     {
     640             :       // Check if {target} is a JSFunction in the current native
     641             :       // context.
     642         930 :       Comment("check if function in same native context");
     643        1860 :       GotoIf(TaggedIsSmi(target), &mark_megamorphic);
     644             :       // Check if the {target} is a JSFunction or JSBoundFunction
     645             :       // in the current native context.
     646         930 :       VARIABLE(var_target, MachineRepresentation::kTagged, target);
     647         930 :       Label loop(this, &var_target), done_loop(this);
     648         930 :       Goto(&loop);
     649         930 :       BIND(&loop);
     650             :       {
     651         930 :         Label if_boundfunction(this), if_function(this);
     652         930 :         Node* target = var_target.value();
     653             :         CSA_ASSERT(this, TaggedIsNotSmi(target));
     654        1860 :         Node* target_instance_type = LoadInstanceType(target);
     655             :         GotoIf(InstanceTypeEqual(target_instance_type, JS_BOUND_FUNCTION_TYPE),
     656        1860 :                &if_boundfunction);
     657             :         Branch(InstanceTypeEqual(target_instance_type, JS_FUNCTION_TYPE),
     658        1860 :                &if_function, &mark_megamorphic);
     659             : 
     660         930 :         BIND(&if_function);
     661             :         {
     662             :           // Check that the JSFunction {target} is in the current native
     663             :           // context.
     664             :           Node* target_context =
     665             :               LoadObjectField(target, JSFunction::kContextOffset);
     666        1860 :           Node* target_native_context = LoadNativeContext(target_context);
     667        1860 :           Branch(WordEqual(LoadNativeContext(context), target_native_context),
     668         930 :                  &done_loop, &mark_megamorphic);
     669             :         }
     670             : 
     671         930 :         BIND(&if_boundfunction);
     672             :         {
     673             :           // Continue with the [[BoundTargetFunction]] of {target}.
     674             :           var_target.Bind(LoadObjectField(
     675         930 :               target, JSBoundFunction::kBoundTargetFunctionOffset));
     676         930 :           Goto(&loop);
     677         930 :         }
     678             :       }
     679         930 :       BIND(&done_loop);
     680        1860 :       CreateWeakCellInFeedbackVector(feedback_vector, SmiTag(slot_id), target);
     681             :       // Reset profiler ticks.
     682             :       StoreObjectFieldNoWriteBarrier(feedback_vector,
     683             :                                      FeedbackVector::kProfilerTicksOffset,
     684        1860 :                                      SmiConstant(0));
     685        1860 :       Goto(&done);
     686             :     }
     687             : 
     688         930 :     BIND(&mark_megamorphic);
     689             :     {
     690             :       // MegamorphicSentinel is an immortal immovable object so
     691             :       // write-barrier is not needed.
     692         930 :       Comment("transition to megamorphic");
     693             :       DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex));
     694             :       StoreFeedbackVectorSlot(
     695             :           feedback_vector, slot_id,
     696         930 :           HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
     697         930 :           SKIP_WRITE_BARRIER);
     698             :       // Reset profiler ticks.
     699             :       StoreObjectFieldNoWriteBarrier(feedback_vector,
     700             :                                      FeedbackVector::kProfilerTicksOffset,
     701        1860 :                                      SmiConstant(0));
     702         930 :       Goto(&done);
     703         930 :     }
     704             :   }
     705             : 
     706        1860 :   BIND(&done);
     707         930 : }
     708             : 
     709         465 : void InterpreterAssembler::CallJSAndDispatch(
     710             :     Node* function, Node* context, Node* first_arg, Node* arg_count,
     711             :     ConvertReceiverMode receiver_mode) {
     712             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     713             :   DCHECK(Bytecodes::IsCallOrConstruct(bytecode_) ||
     714             :          bytecode_ == Bytecode::kInvokeIntrinsic);
     715             :   DCHECK_EQ(Bytecodes::GetReceiverMode(bytecode_), receiver_mode);
     716             :   Callable callable = CodeFactory::InterpreterPushArgsThenCall(
     717         465 :       isolate(), receiver_mode, InterpreterPushArgsMode::kOther);
     718             :   Node* code_target = HeapConstant(callable.code());
     719             : 
     720             :   TailCallStubThenBytecodeDispatch(callable.descriptor(), code_target, context,
     721         930 :                                    arg_count, first_arg, function);
     722             :   // TailCallStubThenDispatch updates accumulator with result.
     723         930 :   accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
     724         465 : }
     725             : 
     726             : template <class... TArgs>
     727         558 : void InterpreterAssembler::CallJSAndDispatch(Node* function, Node* context,
     728             :                                              Node* arg_count,
     729             :                                              ConvertReceiverMode receiver_mode,
     730             :                                              TArgs... args) {
     731             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     732             :   DCHECK(Bytecodes::IsCallOrConstruct(bytecode_) ||
     733             :          bytecode_ == Bytecode::kInvokeIntrinsic);
     734             :   DCHECK_EQ(Bytecodes::GetReceiverMode(bytecode_), receiver_mode);
     735         558 :   Callable callable = CodeFactory::Call(isolate());
     736             :   Node* code_target = HeapConstant(callable.code());
     737             : 
     738         558 :   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
     739             :     // The first argument parameter (the receiver) is implied to be undefined.
     740         837 :     TailCallStubThenBytecodeDispatch(
     741             :         callable.descriptor(), code_target, context, function, arg_count,
     742         279 :         static_cast<Node*>(UndefinedConstant()), args...);
     743             :   } else {
     744         558 :     TailCallStubThenBytecodeDispatch(callable.descriptor(), code_target,
     745             :                                      context, function, arg_count, args...);
     746             :   }
     747             :   // TailCallStubThenDispatch updates accumulator with result.
     748        1116 :   accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
     749         558 : }
     750             : 
     751             : // Instantiate CallJSAndDispatch() for argument counts used by interpreter
     752             : // generator.
     753             : template V8_EXPORT_PRIVATE void InterpreterAssembler::CallJSAndDispatch(
     754             :     Node* function, Node* context, Node* arg_count,
     755             :     ConvertReceiverMode receiver_mode);
     756             : template V8_EXPORT_PRIVATE void InterpreterAssembler::CallJSAndDispatch(
     757             :     Node* function, Node* context, Node* arg_count,
     758             :     ConvertReceiverMode receiver_mode, Node*);
     759             : template V8_EXPORT_PRIVATE void InterpreterAssembler::CallJSAndDispatch(
     760             :     Node* function, Node* context, Node* arg_count,
     761             :     ConvertReceiverMode receiver_mode, Node*, Node*);
     762             : template V8_EXPORT_PRIVATE void InterpreterAssembler::CallJSAndDispatch(
     763             :     Node* function, Node* context, Node* arg_count,
     764             :     ConvertReceiverMode receiver_mode, Node*, Node*, Node*);
     765             : 
     766          93 : void InterpreterAssembler::CallJSWithSpreadAndDispatch(
     767             :     Node* function, Node* context, Node* first_arg, Node* arg_count,
     768             :     Node* slot_id, Node* feedback_vector) {
     769             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     770             :   DCHECK_EQ(Bytecodes::GetReceiverMode(bytecode_), ConvertReceiverMode::kAny);
     771          93 :   CollectCallFeedback(function, context, feedback_vector, slot_id);
     772          93 :   Comment("call using CallWithSpread builtin");
     773             :   Callable callable = CodeFactory::InterpreterPushArgsThenCall(
     774             :       isolate(), ConvertReceiverMode::kAny,
     775          93 :       InterpreterPushArgsMode::kWithFinalSpread);
     776             :   Node* code_target = HeapConstant(callable.code());
     777             : 
     778             :   TailCallStubThenBytecodeDispatch(callable.descriptor(), code_target, context,
     779         186 :                                    arg_count, first_arg, function);
     780             :   // TailCallStubThenDispatch updates accumulator with result.
     781         186 :   accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
     782          93 : }
     783             : 
     784          93 : Node* InterpreterAssembler::Construct(Node* target, Node* context,
     785             :                                       Node* new_target, Node* first_arg,
     786             :                                       Node* arg_count, Node* slot_id,
     787             :                                       Node* feedback_vector) {
     788             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     789          93 :   VARIABLE(var_result, MachineRepresentation::kTagged);
     790         186 :   VARIABLE(var_site, MachineRepresentation::kTagged);
     791          93 :   Label extra_checks(this, Label::kDeferred), return_result(this, &var_result),
     792          93 :       construct(this), construct_array(this, &var_site);
     793             : 
     794             :   // Increment the call count.
     795          93 :   IncrementCallCount(feedback_vector, slot_id);
     796             : 
     797             :   // Check if we have monomorphic {new_target} feedback already.
     798          93 :   Node* feedback_element = LoadFeedbackVectorSlot(feedback_vector, slot_id);
     799          93 :   Node* feedback_value = LoadWeakCellValueUnchecked(feedback_element);
     800         186 :   Branch(WordEqual(new_target, feedback_value), &construct, &extra_checks);
     801             : 
     802          93 :   BIND(&extra_checks);
     803             :   {
     804          93 :     Label check_allocation_site(this), check_initialized(this),
     805          93 :         initialize(this), mark_megamorphic(this);
     806             : 
     807             :     // Check if it is a megamorphic {new_target}..
     808          93 :     Comment("check if megamorphic");
     809             :     Node* is_megamorphic =
     810             :         WordEqual(feedback_element,
     811          93 :                   HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())));
     812          93 :     GotoIf(is_megamorphic, &construct);
     813             : 
     814          93 :     Comment("check if weak cell");
     815         186 :     Node* feedback_element_map = LoadMap(feedback_element);
     816         186 :     GotoIfNot(IsWeakCellMap(feedback_element_map), &check_allocation_site);
     817             : 
     818             :     // If the weak cell is cleared, we have a new chance to become monomorphic.
     819          93 :     Comment("check if weak cell is cleared");
     820         186 :     Node* is_smi = TaggedIsSmi(feedback_value);
     821          93 :     Branch(is_smi, &initialize, &mark_megamorphic);
     822             : 
     823          93 :     BIND(&check_allocation_site);
     824             :     {
     825             :       // Check if it is an AllocationSite.
     826          93 :       Comment("check if allocation site");
     827         186 :       GotoIfNot(IsAllocationSiteMap(feedback_element_map), &check_initialized);
     828             : 
     829             :       // Make sure that {target} and {new_target} are the Array constructor.
     830          93 :       Node* array_function = LoadContextElement(LoadNativeContext(context),
     831         279 :                                                 Context::ARRAY_FUNCTION_INDEX);
     832         186 :       GotoIfNot(WordEqual(target, array_function), &mark_megamorphic);
     833         186 :       GotoIfNot(WordEqual(new_target, array_function), &mark_megamorphic);
     834          93 :       var_site.Bind(feedback_element);
     835          93 :       Goto(&construct_array);
     836             :     }
     837             : 
     838          93 :     BIND(&check_initialized);
     839             :     {
     840             :       // Check if it is uninitialized.
     841          93 :       Comment("check if uninitialized");
     842             :       Node* is_uninitialized = WordEqual(
     843          93 :           feedback_element, LoadRoot(Heap::kuninitialized_symbolRootIndex));
     844          93 :       Branch(is_uninitialized, &initialize, &mark_megamorphic);
     845             :     }
     846             : 
     847          93 :     BIND(&initialize);
     848             :     {
     849             :       // Check if {new_target} is a JSFunction in the current native context.
     850          93 :       Label create_allocation_site(this), create_weak_cell(this);
     851          93 :       Comment("check if function in same native context");
     852         186 :       GotoIf(TaggedIsSmi(new_target), &mark_megamorphic);
     853             :       // TODO(bmeurer): Add support for arbitrary constructors here, and
     854             :       // check via GetFunctionRealm (see src/objects.cc).
     855         186 :       GotoIfNot(IsJSFunction(new_target), &mark_megamorphic);
     856             :       Node* new_target_context =
     857             :           LoadObjectField(new_target, JSFunction::kContextOffset);
     858         186 :       Node* new_target_native_context = LoadNativeContext(new_target_context);
     859             :       GotoIfNot(
     860         186 :           WordEqual(LoadNativeContext(context), new_target_native_context),
     861          93 :           &mark_megamorphic);
     862             : 
     863             :       // Create an AllocationSite if {target} and {new_target} refer
     864             :       // to the current native context's Array constructor.
     865         186 :       GotoIfNot(WordEqual(target, new_target), &create_weak_cell);
     866             :       Node* array_function = LoadContextElement(new_target_native_context,
     867         186 :                                                 Context::ARRAY_FUNCTION_INDEX);
     868          93 :       Branch(WordEqual(target, array_function), &create_allocation_site,
     869         186 :              &create_weak_cell);
     870             : 
     871          93 :       BIND(&create_allocation_site);
     872             :       {
     873             :         var_site.Bind(CreateAllocationSiteInFeedbackVector(feedback_vector,
     874         186 :                                                            SmiTag(slot_id)));
     875             :         // Reset profiler ticks.
     876             :         StoreObjectFieldNoWriteBarrier(feedback_vector,
     877             :                                        FeedbackVector::kProfilerTicksOffset,
     878         186 :                                        SmiConstant(0));
     879          93 :         Goto(&construct_array);
     880             :       }
     881             : 
     882          93 :       BIND(&create_weak_cell);
     883             :       {
     884          93 :         CreateWeakCellInFeedbackVector(feedback_vector, SmiTag(slot_id),
     885         186 :                                        new_target);
     886             :         // Reset profiler ticks.
     887             :         StoreObjectFieldNoWriteBarrier(feedback_vector,
     888             :                                        FeedbackVector::kProfilerTicksOffset,
     889         186 :                                        SmiConstant(0));
     890          93 :         Goto(&construct);
     891          93 :       }
     892             :     }
     893             : 
     894          93 :     BIND(&mark_megamorphic);
     895             :     {
     896             :       // MegamorphicSentinel is an immortal immovable object so
     897             :       // write-barrier is not needed.
     898          93 :       Comment("transition to megamorphic");
     899             :       DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex));
     900             :       StoreFeedbackVectorSlot(
     901             :           feedback_vector, slot_id,
     902          93 :           HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
     903          93 :           SKIP_WRITE_BARRIER);
     904             :       // Reset profiler ticks.
     905             :       StoreObjectFieldNoWriteBarrier(feedback_vector,
     906             :                                      FeedbackVector::kProfilerTicksOffset,
     907         186 :                                      SmiConstant(0));
     908          93 :       Goto(&construct);
     909          93 :     }
     910             :   }
     911             : 
     912          93 :   BIND(&construct_array);
     913             :   {
     914             :     // TODO(bmeurer): Introduce a dedicated builtin to deal with the Array
     915             :     // constructor feedback collection inside of Ignition.
     916          93 :     Comment("call using ConstructArray builtin");
     917             :     Callable callable = CodeFactory::InterpreterPushArgsThenConstruct(
     918          93 :         isolate(), InterpreterPushArgsMode::kJSFunction);
     919             :     Node* code_target = HeapConstant(callable.code());
     920             :     var_result.Bind(CallStub(callable.descriptor(), code_target, context,
     921             :                              arg_count, new_target, target, var_site.value(),
     922         279 :                              first_arg));
     923          93 :     Goto(&return_result);
     924             :   }
     925             : 
     926          93 :   BIND(&construct);
     927             :   {
     928             :     // TODO(bmeurer): Remove the generic type_info parameter from the Construct.
     929          93 :     Comment("call using Construct builtin");
     930             :     Callable callable = CodeFactory::InterpreterPushArgsThenConstruct(
     931          93 :         isolate(), InterpreterPushArgsMode::kOther);
     932             :     Node* code_target = HeapConstant(callable.code());
     933             :     var_result.Bind(CallStub(callable.descriptor(), code_target, context,
     934             :                              arg_count, new_target, target, UndefinedConstant(),
     935         279 :                              first_arg));
     936          93 :     Goto(&return_result);
     937             :   }
     938             : 
     939          93 :   BIND(&return_result);
     940         186 :   return var_result.value();
     941             : }
     942             : 
     943          93 : Node* InterpreterAssembler::ConstructWithSpread(Node* target, Node* context,
     944             :                                                 Node* new_target,
     945             :                                                 Node* first_arg,
     946             :                                                 Node* arg_count, Node* slot_id,
     947             :                                                 Node* feedback_vector) {
     948             :   // TODO(bmeurer): Unify this with the Construct bytecode feedback
     949             :   // above once we have a way to pass the AllocationSite to the Array
     950             :   // constructor _and_ spread the last argument at the same time.
     951             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     952         186 :   Label extra_checks(this, Label::kDeferred), construct(this);
     953             : 
     954             :   // Increment the call count.
     955          93 :   IncrementCallCount(feedback_vector, slot_id);
     956             : 
     957             :   // Check if we have monomorphic {new_target} feedback already.
     958          93 :   Node* feedback_element = LoadFeedbackVectorSlot(feedback_vector, slot_id);
     959          93 :   Node* feedback_value = LoadWeakCellValueUnchecked(feedback_element);
     960         186 :   Branch(WordEqual(new_target, feedback_value), &construct, &extra_checks);
     961             : 
     962          93 :   BIND(&extra_checks);
     963             :   {
     964          93 :     Label check_initialized(this), initialize(this), mark_megamorphic(this);
     965             : 
     966             :     // Check if it is a megamorphic {new_target}.
     967          93 :     Comment("check if megamorphic");
     968             :     Node* is_megamorphic =
     969             :         WordEqual(feedback_element,
     970          93 :                   HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())));
     971          93 :     GotoIf(is_megamorphic, &construct);
     972             : 
     973          93 :     Comment("check if weak cell");
     974             :     Node* is_weak_cell = WordEqual(LoadMap(feedback_element),
     975         279 :                                    LoadRoot(Heap::kWeakCellMapRootIndex));
     976          93 :     GotoIfNot(is_weak_cell, &check_initialized);
     977             : 
     978             :     // If the weak cell is cleared, we have a new chance to become monomorphic.
     979          93 :     Comment("check if weak cell is cleared");
     980         186 :     Node* is_smi = TaggedIsSmi(feedback_value);
     981          93 :     Branch(is_smi, &initialize, &mark_megamorphic);
     982             : 
     983          93 :     BIND(&check_initialized);
     984             :     {
     985             :       // Check if it is uninitialized.
     986          93 :       Comment("check if uninitialized");
     987             :       Node* is_uninitialized = WordEqual(
     988          93 :           feedback_element, LoadRoot(Heap::kuninitialized_symbolRootIndex));
     989          93 :       Branch(is_uninitialized, &initialize, &mark_megamorphic);
     990             :     }
     991             : 
     992          93 :     BIND(&initialize);
     993             :     {
     994             :       // Check if {new_target} is a JSFunction in the current native
     995             :       // context.
     996          93 :       Comment("check if function in same native context");
     997         186 :       GotoIf(TaggedIsSmi(new_target), &mark_megamorphic);
     998             :       // TODO(bmeurer): Add support for arbitrary constructors here, and
     999             :       // check via GetFunctionRealm (see src/objects.cc).
    1000         186 :       GotoIfNot(IsJSFunction(new_target), &mark_megamorphic);
    1001             :       Node* target_context =
    1002             :           LoadObjectField(new_target, JSFunction::kContextOffset);
    1003         186 :       Node* target_native_context = LoadNativeContext(target_context);
    1004         186 :       GotoIfNot(WordEqual(LoadNativeContext(context), target_native_context),
    1005          93 :                 &mark_megamorphic);
    1006             : 
    1007          93 :       CreateWeakCellInFeedbackVector(feedback_vector, SmiTag(slot_id),
    1008         186 :                                      new_target);
    1009             :       // Reset profiler ticks.
    1010             :       StoreObjectFieldNoWriteBarrier(feedback_vector,
    1011             :                                      FeedbackVector::kProfilerTicksOffset,
    1012         186 :                                      SmiConstant(0));
    1013          93 :       Goto(&construct);
    1014             :     }
    1015             : 
    1016          93 :     BIND(&mark_megamorphic);
    1017             :     {
    1018             :       // MegamorphicSentinel is an immortal immovable object so
    1019             :       // write-barrier is not needed.
    1020          93 :       Comment("transition to megamorphic");
    1021             :       DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex));
    1022             :       StoreFeedbackVectorSlot(
    1023             :           feedback_vector, slot_id,
    1024          93 :           HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
    1025          93 :           SKIP_WRITE_BARRIER);
    1026             :       // Reset profiler ticks.
    1027             :       StoreObjectFieldNoWriteBarrier(feedback_vector,
    1028             :                                      FeedbackVector::kProfilerTicksOffset,
    1029         186 :                                      SmiConstant(0));
    1030          93 :       Goto(&construct);
    1031          93 :     }
    1032             :   }
    1033             : 
    1034          93 :   BIND(&construct);
    1035          93 :   Comment("call using ConstructWithSpread builtin");
    1036             :   Callable callable = CodeFactory::InterpreterPushArgsThenConstruct(
    1037          93 :       isolate(), InterpreterPushArgsMode::kWithFinalSpread);
    1038             :   Node* code_target = HeapConstant(callable.code());
    1039             :   return CallStub(callable.descriptor(), code_target, context, arg_count,
    1040         279 :                   new_target, target, UndefinedConstant(), first_arg);
    1041             : }
    1042             : 
    1043         192 : Node* InterpreterAssembler::CallRuntimeN(Node* function_id, Node* context,
    1044             :                                          Node* first_arg, Node* arg_count,
    1045             :                                          int result_size) {
    1046             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
    1047             :   DCHECK(Bytecodes::IsCallRuntime(bytecode_));
    1048         192 :   Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size);
    1049             :   Node* code_target = HeapConstant(callable.code());
    1050             : 
    1051             :   // Get the function entry from the function id.
    1052             :   Node* function_table = ExternalConstant(
    1053         384 :       ExternalReference::runtime_function_table_address(isolate()));
    1054             :   Node* function_offset =
    1055         576 :       Int32Mul(function_id, Int32Constant(sizeof(Runtime::Function)));
    1056             :   Node* function =
    1057         576 :       IntPtrAdd(function_table, ChangeUint32ToWord(function_offset));
    1058             :   Node* function_entry =
    1059             :       Load(MachineType::Pointer(), function,
    1060         384 :            IntPtrConstant(offsetof(Runtime::Function, entry)));
    1061             : 
    1062             :   return CallStubR(callable.descriptor(), result_size, code_target, context,
    1063         384 :                    arg_count, first_arg, function_entry);
    1064             : }
    1065             : 
    1066        2170 : void InterpreterAssembler::UpdateInterruptBudget(Node* weight, bool backward) {
    1067        2170 :   Comment("[ UpdateInterruptBudget");
    1068             : 
    1069             :   Node* budget_offset =
    1070        4340 :       IntPtrConstant(BytecodeArray::kInterruptBudgetOffset - kHeapObjectTag);
    1071             : 
    1072             :   // Assert that the weight is positive (negative weights should be implemented
    1073             :   // as backward updates).
    1074             :   CSA_ASSERT(this, Int32GreaterThanOrEqual(weight, Int32Constant(0)));
    1075             : 
    1076             :   // Update budget by |weight| and check if it reaches zero.
    1077        2170 :   Variable new_budget(this, MachineRepresentation::kWord32);
    1078             :   Node* old_budget =
    1079        2170 :       Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), budget_offset);
    1080             :   // Make sure we include the current bytecode in the budget calculation.
    1081             :   Node* budget_after_bytecode =
    1082        6510 :       Int32Sub(old_budget, Int32Constant(CurrentBytecodeSize()));
    1083             : 
    1084        2170 :   if (backward) {
    1085         434 :     new_budget.Bind(Int32Sub(budget_after_bytecode, weight));
    1086             : 
    1087             :     Node* condition =
    1088         868 :         Int32GreaterThanOrEqual(new_budget.value(), Int32Constant(0));
    1089         217 :     Label ok(this), interrupt_check(this, Label::kDeferred);
    1090         217 :     Branch(condition, &ok, &interrupt_check);
    1091             : 
    1092             :     // Perform interrupt and reset budget.
    1093         217 :     BIND(&interrupt_check);
    1094             :     {
    1095             :       CallRuntime(Runtime::kInterrupt, GetContext());
    1096         434 :       new_budget.Bind(Int32Constant(Interpreter::kInterruptBudget));
    1097         217 :       Goto(&ok);
    1098             :     }
    1099             : 
    1100         434 :     BIND(&ok);
    1101             :   } else {
    1102             :     // For a forward jump, we know we only increase the interrupt budget, so
    1103             :     // no need to check if it's below zero.
    1104        3906 :     new_budget.Bind(Int32Add(budget_after_bytecode, weight));
    1105             :   }
    1106             : 
    1107             :   // Update budget.
    1108             :   StoreNoWriteBarrier(MachineRepresentation::kWord32,
    1109             :                       BytecodeArrayTaggedPointer(), budget_offset,
    1110        2170 :                       new_budget.value());
    1111        2170 :   Comment("] UpdateInterruptBudget");
    1112        2170 : }
    1113             : 
    1114       14446 : Node* InterpreterAssembler::Advance() { return Advance(CurrentBytecodeSize()); }
    1115             : 
    1116       14508 : Node* InterpreterAssembler::Advance(int delta) {
    1117       29016 :   return Advance(IntPtrConstant(delta));
    1118             : }
    1119             : 
    1120       16647 : Node* InterpreterAssembler::Advance(Node* delta, bool backward) {
    1121             : #ifdef V8_TRACE_IGNITION
    1122             :   TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
    1123             : #endif
    1124       17205 :   Node* next_offset = backward ? IntPtrSub(BytecodeOffset(), delta)
    1125       82863 :                                : IntPtrAdd(BytecodeOffset(), delta);
    1126       16647 :   bytecode_offset_.Bind(next_offset);
    1127       16647 :   return next_offset;
    1128             : }
    1129             : 
    1130        2139 : Node* InterpreterAssembler::Jump(Node* delta, bool backward) {
    1131             :   DCHECK(!Bytecodes::IsStarLookahead(bytecode_, operand_scale_));
    1132             : 
    1133        6417 :   UpdateInterruptBudget(TruncateWordToWord32(delta), backward);
    1134        2139 :   Node* new_bytecode_offset = Advance(delta, backward);
    1135        2139 :   Node* target_bytecode = LoadBytecode(new_bytecode_offset);
    1136        2139 :   return DispatchToBytecode(target_bytecode, new_bytecode_offset);
    1137             : }
    1138             : 
    1139        1953 : Node* InterpreterAssembler::Jump(Node* delta) { return Jump(delta, false); }
    1140             : 
    1141         186 : Node* InterpreterAssembler::JumpBackward(Node* delta) {
    1142         186 :   return Jump(delta, true);
    1143             : }
    1144             : 
    1145        1116 : void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) {
    1146        2232 :   Label match(this), no_match(this);
    1147             : 
    1148        1116 :   Branch(condition, &match, &no_match);
    1149        1116 :   BIND(&match);
    1150             :   Jump(delta);
    1151        1116 :   BIND(&no_match);
    1152        2232 :   Dispatch();
    1153        1116 : }
    1154             : 
    1155         744 : void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) {
    1156        1488 :   JumpConditional(WordEqual(lhs, rhs), delta);
    1157         744 : }
    1158             : 
    1159         372 : void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
    1160             :                                               Node* delta) {
    1161         744 :   JumpConditional(WordNotEqual(lhs, rhs), delta);
    1162         372 : }
    1163             : 
    1164       16647 : Node* InterpreterAssembler::LoadBytecode(compiler::Node* bytecode_offset) {
    1165             :   Node* bytecode =
    1166       16647 :       Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), bytecode_offset);
    1167       33294 :   return ChangeUint32ToWord(bytecode);
    1168             : }
    1169             : 
    1170         682 : Node* InterpreterAssembler::StarDispatchLookahead(Node* target_bytecode) {
    1171        1364 :   Label do_inline_star(this), done(this);
    1172             : 
    1173        1364 :   Variable var_bytecode(this, MachineType::PointerRepresentation());
    1174         682 :   var_bytecode.Bind(target_bytecode);
    1175             : 
    1176        1364 :   Node* star_bytecode = IntPtrConstant(static_cast<int>(Bytecode::kStar));
    1177        1364 :   Node* is_star = WordEqual(target_bytecode, star_bytecode);
    1178         682 :   Branch(is_star, &do_inline_star, &done);
    1179             : 
    1180         682 :   BIND(&do_inline_star);
    1181             :   {
    1182         682 :     InlineStar();
    1183         682 :     var_bytecode.Bind(LoadBytecode(BytecodeOffset()));
    1184         682 :     Goto(&done);
    1185             :   }
    1186         682 :   BIND(&done);
    1187        1364 :   return var_bytecode.value();
    1188             : }
    1189             : 
    1190         682 : void InterpreterAssembler::InlineStar() {
    1191         682 :   Bytecode previous_bytecode = bytecode_;
    1192         682 :   AccumulatorUse previous_acc_use = accumulator_use_;
    1193             : 
    1194         682 :   bytecode_ = Bytecode::kStar;
    1195         682 :   accumulator_use_ = AccumulatorUse::kNone;
    1196             : 
    1197             : #ifdef V8_TRACE_IGNITION
    1198             :   TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
    1199             : #endif
    1200        1364 :   StoreRegister(GetAccumulator(), BytecodeOperandReg(0));
    1201             : 
    1202             :   DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
    1203             : 
    1204             :   Advance();
    1205         682 :   bytecode_ = previous_bytecode;
    1206         682 :   accumulator_use_ = previous_acc_use;
    1207         682 : }
    1208             : 
    1209       13764 : Node* InterpreterAssembler::Dispatch() {
    1210       13764 :   Comment("========= Dispatch");
    1211             :   DCHECK_IMPLIES(Bytecodes::MakesCallAlongCriticalPath(bytecode_), made_call_);
    1212             :   Node* target_offset = Advance();
    1213       13764 :   Node* target_bytecode = LoadBytecode(target_offset);
    1214             : 
    1215       13764 :   if (Bytecodes::IsStarLookahead(bytecode_, operand_scale_)) {
    1216         682 :     target_bytecode = StarDispatchLookahead(target_bytecode);
    1217             :   }
    1218       13764 :   return DispatchToBytecode(target_bytecode, BytecodeOffset());
    1219             : }
    1220             : 
    1221       15903 : Node* InterpreterAssembler::DispatchToBytecode(Node* target_bytecode,
    1222             :                                                Node* new_bytecode_offset) {
    1223       15903 :   if (FLAG_trace_ignition_dispatches) {
    1224           0 :     TraceBytecodeDispatch(target_bytecode);
    1225             :   }
    1226             : 
    1227             :   Node* target_code_entry =
    1228             :       Load(MachineType::Pointer(), DispatchTableRawPointer(),
    1229       15903 :            TimesPointerSize(target_bytecode));
    1230             : 
    1231       15903 :   return DispatchToBytecodeHandlerEntry(target_code_entry, new_bytecode_offset);
    1232             : }
    1233             : 
    1234         651 : Node* InterpreterAssembler::DispatchToBytecodeHandler(Node* handler,
    1235             :                                                       Node* bytecode_offset) {
    1236             :   // TODO(ishell): Add CSA::CodeEntryPoint(code).
    1237             :   Node* handler_entry =
    1238             :       IntPtrAdd(BitcastTaggedToWord(handler),
    1239        1953 :                 IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
    1240         651 :   return DispatchToBytecodeHandlerEntry(handler_entry, bytecode_offset);
    1241             : }
    1242             : 
    1243       16616 : Node* InterpreterAssembler::DispatchToBytecodeHandlerEntry(
    1244             :     Node* handler_entry, Node* bytecode_offset) {
    1245       16616 :   InterpreterDispatchDescriptor descriptor(isolate());
    1246             :   return TailCallBytecodeDispatch(
    1247             :       descriptor, handler_entry, GetAccumulatorUnchecked(), bytecode_offset,
    1248       33232 :       BytecodeArrayTaggedPointer(), DispatchTableRawPointer());
    1249             : }
    1250             : 
    1251          62 : void InterpreterAssembler::DispatchWide(OperandScale operand_scale) {
    1252             :   // Dispatching a wide bytecode requires treating the prefix
    1253             :   // bytecode a base pointer into the dispatch table and dispatching
    1254             :   // the bytecode that follows relative to this base.
    1255             :   //
    1256             :   //   Indices 0-255 correspond to bytecodes with operand_scale == 0
    1257             :   //   Indices 256-511 correspond to bytecodes with operand_scale == 1
    1258             :   //   Indices 512-767 correspond to bytecodes with operand_scale == 2
    1259             :   DCHECK_IMPLIES(Bytecodes::MakesCallAlongCriticalPath(bytecode_), made_call_);
    1260          62 :   Node* next_bytecode_offset = Advance(1);
    1261          62 :   Node* next_bytecode = LoadBytecode(next_bytecode_offset);
    1262             : 
    1263          62 :   if (FLAG_trace_ignition_dispatches) {
    1264           0 :     TraceBytecodeDispatch(next_bytecode);
    1265             :   }
    1266             : 
    1267             :   Node* base_index;
    1268          62 :   switch (operand_scale) {
    1269             :     case OperandScale::kDouble:
    1270          62 :       base_index = IntPtrConstant(1 << kBitsPerByte);
    1271          31 :       break;
    1272             :     case OperandScale::kQuadruple:
    1273          62 :       base_index = IntPtrConstant(2 << kBitsPerByte);
    1274          31 :       break;
    1275             :     default:
    1276           0 :       UNREACHABLE();
    1277             :       base_index = nullptr;
    1278             :   }
    1279         124 :   Node* target_index = IntPtrAdd(base_index, next_bytecode);
    1280             :   Node* target_code_entry =
    1281             :       Load(MachineType::Pointer(), DispatchTableRawPointer(),
    1282          62 :            TimesPointerSize(target_index));
    1283             : 
    1284          62 :   DispatchToBytecodeHandlerEntry(target_code_entry, next_bytecode_offset);
    1285          62 : }
    1286             : 
    1287          31 : void InterpreterAssembler::UpdateInterruptBudgetOnReturn() {
    1288             :   // TODO(rmcilroy): Investigate whether it is worth supporting self
    1289             :   // optimization of primitive functions like FullCodegen.
    1290             : 
    1291             :   // Update profiling count by the number of bytes between the end of the
    1292             :   // current bytecode and the start of the first one, to simulate backedge to
    1293             :   // start of function.
    1294             :   //
    1295             :   // With headers and current offset, the bytecode array layout looks like:
    1296             :   //
    1297             :   //           <---------- simulated backedge ----------
    1298             :   // | header | first bytecode | .... | return bytecode |
    1299             :   //  |<------ current offset ------->
    1300             :   //  ^ tagged bytecode array pointer
    1301             :   //
    1302             :   // UpdateInterruptBudget already handles adding the bytecode size to the
    1303             :   // length of the back-edge, so we just have to correct for the non-zero offset
    1304             :   // of the first bytecode.
    1305             : 
    1306             :   const int kFirstBytecodeOffset = BytecodeArray::kHeaderSize - kHeapObjectTag;
    1307          93 :   Node* profiling_weight = Int32Sub(TruncateWordToWord32(BytecodeOffset()),
    1308         155 :                                     Int32Constant(kFirstBytecodeOffset));
    1309          31 :   UpdateInterruptBudget(profiling_weight, true);
    1310          31 : }
    1311             : 
    1312          31 : Node* InterpreterAssembler::StackCheckTriggeredInterrupt() {
    1313          31 :   Node* sp = LoadStackPointer();
    1314             :   Node* stack_limit = Load(
    1315             :       MachineType::Pointer(),
    1316          62 :       ExternalConstant(ExternalReference::address_of_stack_limit(isolate())));
    1317          62 :   return UintPtrLessThan(sp, stack_limit);
    1318             : }
    1319             : 
    1320          93 : Node* InterpreterAssembler::LoadOSRNestingLevel() {
    1321             :   return LoadObjectField(BytecodeArrayTaggedPointer(),
    1322             :                          BytecodeArray::kOSRNestingLevelOffset,
    1323         186 :                          MachineType::Int8());
    1324             : }
    1325             : 
    1326        1085 : void InterpreterAssembler::Abort(BailoutReason bailout_reason) {
    1327        1085 :   disable_stack_check_across_call_ = true;
    1328        2170 :   Node* abort_id = SmiConstant(bailout_reason);
    1329             :   CallRuntime(Runtime::kAbort, GetContext(), abort_id);
    1330        1085 :   disable_stack_check_across_call_ = false;
    1331        1085 : }
    1332             : 
    1333           0 : void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs,
    1334             :                                                BailoutReason bailout_reason) {
    1335           0 :   Label ok(this), abort(this, Label::kDeferred);
    1336           0 :   Branch(WordEqual(lhs, rhs), &ok, &abort);
    1337             : 
    1338           0 :   BIND(&abort);
    1339           0 :   Abort(bailout_reason);
    1340           0 :   Goto(&ok);
    1341             : 
    1342           0 :   BIND(&ok);
    1343           0 : }
    1344             : 
    1345         651 : void InterpreterAssembler::MaybeDropFrames(Node* context) {
    1346             :   Node* restart_fp_address =
    1347        1302 :       ExternalConstant(ExternalReference::debug_restart_fp_address(isolate()));
    1348             : 
    1349         651 :   Node* restart_fp = Load(MachineType::Pointer(), restart_fp_address);
    1350        1302 :   Node* null = IntPtrConstant(0);
    1351             : 
    1352         651 :   Label ok(this), drop_frames(this);
    1353        1302 :   Branch(IntPtrEqual(restart_fp, null), &ok, &drop_frames);
    1354             : 
    1355         651 :   BIND(&drop_frames);
    1356             :   // We don't expect this call to return since the frame dropper tears down
    1357             :   // the stack and jumps into the function on the target frame to restart it.
    1358        1302 :   CallStub(CodeFactory::FrameDropperTrampoline(isolate()), context, restart_fp);
    1359         651 :   Abort(kUnexpectedReturnFromFrameDropper);
    1360         651 :   Goto(&ok);
    1361             : 
    1362        1302 :   BIND(&ok);
    1363         651 : }
    1364             : 
    1365           0 : void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) {
    1366             :   CallRuntime(function_id, GetContext(), BytecodeArrayTaggedPointer(),
    1367           0 :               SmiTag(BytecodeOffset()), GetAccumulatorUnchecked());
    1368           0 : }
    1369             : 
    1370           0 : void InterpreterAssembler::TraceBytecodeDispatch(Node* target_bytecode) {
    1371             :   Node* counters_table = ExternalConstant(
    1372           0 :       ExternalReference::interpreter_dispatch_counters(isolate()));
    1373             :   Node* source_bytecode_table_index = IntPtrConstant(
    1374           0 :       static_cast<int>(bytecode_) * (static_cast<int>(Bytecode::kLast) + 1));
    1375             : 
    1376             :   Node* counter_offset =
    1377           0 :       TimesPointerSize(IntPtrAdd(source_bytecode_table_index, target_bytecode));
    1378             :   Node* old_counter =
    1379           0 :       Load(MachineType::IntPtr(), counters_table, counter_offset);
    1380             : 
    1381           0 :   Label counter_ok(this), counter_saturated(this, Label::kDeferred);
    1382             : 
    1383             :   Node* counter_reached_max = WordEqual(
    1384           0 :       old_counter, IntPtrConstant(std::numeric_limits<uintptr_t>::max()));
    1385           0 :   Branch(counter_reached_max, &counter_saturated, &counter_ok);
    1386             : 
    1387           0 :   BIND(&counter_ok);
    1388             :   {
    1389           0 :     Node* new_counter = IntPtrAdd(old_counter, IntPtrConstant(1));
    1390             :     StoreNoWriteBarrier(MachineType::PointerRepresentation(), counters_table,
    1391           0 :                         counter_offset, new_counter);
    1392           0 :     Goto(&counter_saturated);
    1393             :   }
    1394             : 
    1395           0 :   BIND(&counter_saturated);
    1396           0 : }
    1397             : 
    1398             : // static
    1399         552 : bool InterpreterAssembler::TargetSupportsUnalignedAccess() {
    1400             : #if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
    1401             :   return false;
    1402             : #elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390 || \
    1403             :     V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_PPC
    1404         552 :   return true;
    1405             : #else
    1406             : #error "Unknown Architecture"
    1407             : #endif
    1408             : }
    1409             : 
    1410           0 : void InterpreterAssembler::AbortIfRegisterCountInvalid(Node* register_file,
    1411             :                                                        Node* register_count) {
    1412           0 :   Node* array_size = LoadAndUntagFixedArrayBaseLength(register_file);
    1413             : 
    1414           0 :   Label ok(this), abort(this, Label::kDeferred);
    1415           0 :   Branch(UintPtrLessThanOrEqual(register_count, array_size), &ok, &abort);
    1416             : 
    1417           0 :   BIND(&abort);
    1418           0 :   Abort(kInvalidRegisterFileInGenerator);
    1419           0 :   Goto(&ok);
    1420             : 
    1421           0 :   BIND(&ok);
    1422           0 : }
    1423             : 
    1424          93 : Node* InterpreterAssembler::ExportRegisterFile(Node* array,
    1425             :                                                Node* register_count) {
    1426          93 :   if (FLAG_debug_code) {
    1427           0 :     AbortIfRegisterCountInvalid(array, register_count);
    1428             :   }
    1429             : 
    1430          93 :   Variable var_index(this, MachineType::PointerRepresentation());
    1431         186 :   var_index.Bind(IntPtrConstant(0));
    1432             : 
    1433             :   // Iterate over register file and write values into array.
    1434             :   // The mapping of register to array index must match that used in
    1435             :   // BytecodeGraphBuilder::VisitResumeGenerator.
    1436          93 :   Label loop(this, &var_index), done_loop(this);
    1437          93 :   Goto(&loop);
    1438          93 :   BIND(&loop);
    1439             :   {
    1440          93 :     Node* index = var_index.value();
    1441         186 :     GotoIfNot(UintPtrLessThan(index, register_count), &done_loop);
    1442             : 
    1443         279 :     Node* reg_index = IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index);
    1444          93 :     Node* value = LoadRegister(reg_index);
    1445             : 
    1446          93 :     StoreFixedArrayElement(array, index, value);
    1447             : 
    1448         279 :     var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
    1449          93 :     Goto(&loop);
    1450             :   }
    1451          93 :   BIND(&done_loop);
    1452             : 
    1453          93 :   return array;
    1454             : }
    1455             : 
    1456          93 : Node* InterpreterAssembler::ImportRegisterFile(Node* array,
    1457             :                                                Node* register_count) {
    1458          93 :   if (FLAG_debug_code) {
    1459           0 :     AbortIfRegisterCountInvalid(array, register_count);
    1460             :   }
    1461             : 
    1462          93 :   Variable var_index(this, MachineType::PointerRepresentation());
    1463         186 :   var_index.Bind(IntPtrConstant(0));
    1464             : 
    1465             :   // Iterate over array and write values into register file.  Also erase the
    1466             :   // array contents to not keep them alive artificially.
    1467          93 :   Label loop(this, &var_index), done_loop(this);
    1468          93 :   Goto(&loop);
    1469          93 :   BIND(&loop);
    1470             :   {
    1471          93 :     Node* index = var_index.value();
    1472         186 :     GotoIfNot(UintPtrLessThan(index, register_count), &done_loop);
    1473             : 
    1474          93 :     Node* value = LoadFixedArrayElement(array, index);
    1475             : 
    1476         279 :     Node* reg_index = IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index);
    1477          93 :     StoreRegister(value, reg_index);
    1478             : 
    1479          93 :     StoreFixedArrayElement(array, index, StaleRegisterConstant());
    1480             : 
    1481         279 :     var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
    1482          93 :     Goto(&loop);
    1483             :   }
    1484          93 :   BIND(&done_loop);
    1485             : 
    1486          93 :   return array;
    1487             : }
    1488             : 
    1489           0 : int InterpreterAssembler::CurrentBytecodeSize() const {
    1490       16616 :   return Bytecodes::Size(bytecode_, operand_scale_);
    1491             : }
    1492             : 
    1493         186 : void InterpreterAssembler::ToNumberOrNumeric(Object::Conversion mode) {
    1494             :   Node* object = GetAccumulator();
    1495             :   Node* context = GetContext();
    1496             : 
    1497         186 :   Variable var_type_feedback(this, MachineRepresentation::kTaggedSigned);
    1498         372 :   Variable var_result(this, MachineRepresentation::kTagged);
    1499         186 :   Label if_done(this), if_objectissmi(this), if_objectisheapnumber(this),
    1500         186 :       if_objectisother(this, Label::kDeferred);
    1501             : 
    1502         372 :   GotoIf(TaggedIsSmi(object), &if_objectissmi);
    1503         372 :   Branch(IsHeapNumber(object), &if_objectisheapnumber, &if_objectisother);
    1504             : 
    1505         186 :   BIND(&if_objectissmi);
    1506             :   {
    1507         186 :     var_result.Bind(object);
    1508         372 :     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kSignedSmall));
    1509         186 :     Goto(&if_done);
    1510             :   }
    1511             : 
    1512         186 :   BIND(&if_objectisheapnumber);
    1513             :   {
    1514         186 :     var_result.Bind(object);
    1515         372 :     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kNumber));
    1516         186 :     Goto(&if_done);
    1517             :   }
    1518             : 
    1519         186 :   BIND(&if_objectisother);
    1520             :   {
    1521             :     auto builtin = Builtins::kNonNumberToNumber;
    1522         186 :     if (mode == Object::Conversion::kToNumeric) {
    1523             :       builtin = Builtins::kNonNumberToNumeric;
    1524             :       // Special case for collecting BigInt feedback.
    1525             :       Label not_bigint(this);
    1526         186 :       GotoIfNot(IsBigInt(object), &not_bigint);
    1527             :       {
    1528          93 :         var_result.Bind(object);
    1529         186 :         var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kBigInt));
    1530          93 :         Goto(&if_done);
    1531             :       }
    1532          93 :       BIND(&not_bigint);
    1533             :     }
    1534             : 
    1535             :     // Convert {object} by calling out to the appropriate builtin.
    1536         186 :     var_result.Bind(CallBuiltin(builtin, context, object));
    1537         372 :     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kAny));
    1538         186 :     Goto(&if_done);
    1539             :   }
    1540             : 
    1541         186 :   BIND(&if_done);
    1542             : 
    1543             :   // Record the type feedback collected for {object}.
    1544         186 :   Node* slot_index = BytecodeOperandIdx(0);
    1545         186 :   Node* feedback_vector = LoadFeedbackVector();
    1546         186 :   UpdateFeedback(var_type_feedback.value(), feedback_vector, slot_index);
    1547             : 
    1548         186 :   SetAccumulator(var_result.value());
    1549         372 :   Dispatch();
    1550         186 : }
    1551             : 
    1552             : }  // namespace interpreter
    1553             : }  // namespace internal
    1554             : }  // namespace v8

Generated by: LCOV version 1.10