LCOV - code coverage report
Current view: top level - src/interpreter - interpreter-assembler.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 580 642 90.3 %
Date: 2017-04-26 Functions: 82 90 91.1 %

          Line data    Source code
       1             : // Copyright 2015 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/interpreter/interpreter-assembler.h"
       6             : 
       7             : #include <limits>
       8             : #include <ostream>
       9             : 
      10             : #include "src/code-factory.h"
      11             : #include "src/frames.h"
      12             : #include "src/interface-descriptors.h"
      13             : #include "src/interpreter/bytecodes.h"
      14             : #include "src/interpreter/interpreter.h"
      15             : #include "src/machine-type.h"
      16             : #include "src/macro-assembler.h"
      17             : #include "src/objects-inl.h"
      18             : #include "src/zone/zone.h"
      19             : 
      20             : namespace v8 {
      21             : namespace internal {
      22             : namespace interpreter {
      23             : 
      24             : using compiler::CodeAssemblerState;
      25             : using compiler::Node;
      26             : 
      27       21028 : InterpreterAssembler::InterpreterAssembler(CodeAssemblerState* state,
      28             :                                            Bytecode bytecode,
      29             :                                            OperandScale operand_scale)
      30             :     : CodeStubAssembler(state),
      31             :       bytecode_(bytecode),
      32             :       operand_scale_(operand_scale),
      33             :       bytecode_offset_(this, MachineType::PointerRepresentation()),
      34             :       interpreted_frame_pointer_(this, MachineType::PointerRepresentation()),
      35             :       bytecode_array_(this, MachineRepresentation::kTagged),
      36             :       dispatch_table_(this, MachineType::PointerRepresentation()),
      37             :       accumulator_(this, MachineRepresentation::kTagged),
      38             :       accumulator_use_(AccumulatorUse::kNone),
      39             :       made_call_(false),
      40             :       reloaded_frame_ptr_(false),
      41             :       saved_bytecode_offset_(false),
      42             :       disable_stack_check_across_call_(false),
      43       21028 :       stack_pointer_before_call_(nullptr) {
      44       21028 :   accumulator_.Bind(Parameter(InterpreterDispatchDescriptor::kAccumulator));
      45             :   bytecode_offset_.Bind(
      46       21028 :       Parameter(InterpreterDispatchDescriptor::kBytecodeOffset));
      47             :   bytecode_array_.Bind(
      48       21028 :       Parameter(InterpreterDispatchDescriptor::kBytecodeArray));
      49             :   dispatch_table_.Bind(
      50       21028 :       Parameter(InterpreterDispatchDescriptor::kDispatchTable));
      51             : 
      52       21028 :   if (FLAG_trace_ignition) {
      53           0 :     TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
      54             :   }
      55       41035 :   RegisterCallGenerationCallbacks([this] { CallPrologue(); },
      56      104119 :                                   [this] { CallEpilogue(); });
      57             : 
      58       21028 :   if (Bytecodes::MakesCallAlongCriticalPath(bytecode)) {
      59        3044 :     SaveBytecodeOffset();
      60             :   }
      61       21028 : }
      62             : 
      63       42056 : InterpreterAssembler::~InterpreterAssembler() {
      64             :   // If the following check fails the handler does not use the
      65             :   // accumulator in the way described in the bytecode definitions in
      66             :   // bytecodes.h.
      67             :   DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
      68       21028 :   UnregisterCallGenerationCallbacks();
      69       21028 : }
      70             : 
      71      105880 : Node* InterpreterAssembler::GetInterpretedFramePointer() {
      72      105880 :   if (!interpreted_frame_pointer_.IsBound()) {
      73       24591 :     interpreted_frame_pointer_.Bind(LoadParentFramePointer());
      74       90190 :   } else if (Bytecodes::MakesCallAlongCriticalPath(bytecode_) && made_call_ &&
      75        8901 :              !reloaded_frame_ptr_) {
      76        2709 :     interpreted_frame_pointer_.Bind(LoadParentFramePointer());
      77        2709 :     reloaded_frame_ptr_ = true;
      78             :   }
      79      105880 :   return interpreted_frame_pointer_.value();
      80             : }
      81             : 
      82           0 : Node* InterpreterAssembler::GetAccumulatorUnchecked() {
      83       38555 :   return accumulator_.value();
      84             : }
      85             : 
      86       13486 : Node* InterpreterAssembler::GetAccumulator() {
      87             :   DCHECK(Bytecodes::ReadsAccumulator(bytecode_));
      88       29638 :   accumulator_use_ = accumulator_use_ | AccumulatorUse::kRead;
      89       13486 :   return GetAccumulatorUnchecked();
      90             : }
      91             : 
      92       15278 : void InterpreterAssembler::SetAccumulator(Node* value) {
      93             :   DCHECK(Bytecodes::WritesAccumulator(bytecode_));
      94       30556 :   accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
      95       15278 :   accumulator_.Bind(value);
      96       15278 : }
      97             : 
      98       14398 : Node* InterpreterAssembler::GetContext() {
      99       18870 :   return LoadRegister(Register::current_context());
     100             : }
     101             : 
     102         258 : void InterpreterAssembler::SetContext(Node* value) {
     103         258 :   StoreRegister(value, Register::current_context());
     104         258 : }
     105             : 
     106         774 : Node* InterpreterAssembler::GetContextAtDepth(Node* context, Node* depth) {
     107         774 :   Variable cur_context(this, MachineRepresentation::kTaggedPointer);
     108         774 :   cur_context.Bind(context);
     109             : 
     110        1548 :   Variable cur_depth(this, MachineRepresentation::kWord32);
     111         774 :   cur_depth.Bind(depth);
     112             : 
     113         774 :   Label context_found(this);
     114             : 
     115         774 :   Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context};
     116        1548 :   Label context_search(this, 2, context_search_loop_variables);
     117             : 
     118             :   // Fast path if the depth is 0.
     119         774 :   Branch(Word32Equal(depth, Int32Constant(0)), &context_found, &context_search);
     120             : 
     121             :   // Loop until the depth is 0.
     122         774 :   Bind(&context_search);
     123             :   {
     124         774 :     cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1)));
     125             :     cur_context.Bind(
     126         774 :         LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
     127             : 
     128             :     Branch(Word32Equal(cur_depth.value(), Int32Constant(0)), &context_found,
     129         774 :            &context_search);
     130             :   }
     131             : 
     132         774 :   Bind(&context_found);
     133        1548 :   return cur_context.value();
     134             : }
     135             : 
     136         516 : void InterpreterAssembler::GotoIfHasContextExtensionUpToDepth(Node* context,
     137             :                                                               Node* depth,
     138             :                                                               Label* target) {
     139         516 :   Variable cur_context(this, MachineRepresentation::kTaggedPointer);
     140         516 :   cur_context.Bind(context);
     141             : 
     142        1032 :   Variable cur_depth(this, MachineRepresentation::kWord32);
     143         516 :   cur_depth.Bind(depth);
     144             : 
     145         516 :   Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context};
     146        1032 :   Label context_search(this, 2, context_search_loop_variables);
     147             : 
     148             :   // Loop until the depth is 0.
     149         516 :   Goto(&context_search);
     150         516 :   Bind(&context_search);
     151             :   {
     152             :     // TODO(leszeks): We only need to do this check if the context had a sloppy
     153             :     // eval, we could pass in a context chain bitmask to figure out which
     154             :     // contexts actually need to be checked.
     155             : 
     156             :     Node* extension_slot =
     157         516 :         LoadContextElement(cur_context.value(), Context::EXTENSION_INDEX);
     158             : 
     159             :     // Jump to the target if the extension slot is not a hole.
     160         516 :     GotoIf(WordNotEqual(extension_slot, TheHoleConstant()), target);
     161             : 
     162         516 :     cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1)));
     163             :     cur_context.Bind(
     164         516 :         LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
     165             : 
     166             :     GotoIf(Word32NotEqual(cur_depth.value(), Int32Constant(0)),
     167         516 :            &context_search);
     168         516 :   }
     169         516 : }
     170             : 
     171      119686 : Node* InterpreterAssembler::BytecodeOffset() {
     172      125706 :   if (Bytecodes::MakesCallAlongCriticalPath(bytecode_) && made_call_ &&
     173        6020 :       (bytecode_offset_.value() ==
     174        6020 :        Parameter(InterpreterDispatchDescriptor::kBytecodeOffset))) {
     175        2709 :     bytecode_offset_.Bind(LoadAndUntagRegister(Register::bytecode_offset()));
     176             :   }
     177      119686 :   return bytecode_offset_.value();
     178             : }
     179             : 
     180       96059 : Node* InterpreterAssembler::BytecodeArrayTaggedPointer() {
     181             :   // Force a re-load of the bytecode array after every call in case the debugger
     182             :   // has been activated.
     183      141209 :   if (made_call_ &&
     184       45150 :       (bytecode_array_.value() ==
     185       45150 :        Parameter(InterpreterDispatchDescriptor::kBytecodeArray))) {
     186       18920 :     bytecode_array_.Bind(LoadRegister(Register::bytecode_array()));
     187             :   }
     188       96059 :   return bytecode_array_.value();
     189             : }
     190             : 
     191       46569 : Node* InterpreterAssembler::DispatchTableRawPointer() {
     192       51987 :   if (Bytecodes::MakesCallAlongCriticalPath(bytecode_) && made_call_ &&
     193        5418 :       (dispatch_table_.value() ==
     194        5418 :        Parameter(InterpreterDispatchDescriptor::kDispatchTable))) {
     195             :     dispatch_table_.Bind(ExternalConstant(
     196        2709 :         ExternalReference::interpreter_dispatch_table_address(isolate())));
     197             :   }
     198       46569 :   return dispatch_table_.value();
     199             : }
     200             : 
     201        1584 : Node* InterpreterAssembler::RegisterLocation(Node* reg_index) {
     202             :   return IntPtrAdd(GetInterpretedFramePointer(),
     203        1584 :                    RegisterFrameOffset(reg_index));
     204             : }
     205             : 
     206           0 : Node* InterpreterAssembler::RegisterFrameOffset(Node* index) {
     207       18727 :   return WordShl(index, kPointerSizeLog2);
     208             : }
     209             : 
     210       46727 : Node* InterpreterAssembler::LoadRegister(Register reg) {
     211             :   return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(),
     212       46727 :               IntPtrConstant(reg.ToOperand() << kPointerSizeLog2));
     213             : }
     214             : 
     215       12936 : Node* InterpreterAssembler::LoadRegister(Node* reg_index) {
     216             :   return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(),
     217       12936 :               RegisterFrameOffset(reg_index));
     218             : }
     219             : 
     220        2709 : Node* InterpreterAssembler::LoadAndUntagRegister(Register reg) {
     221             :   return LoadAndUntagSmi(GetInterpretedFramePointer(), reg.ToOperand()
     222        2709 :                                                            << kPointerSizeLog2);
     223             : }
     224             : 
     225         258 : Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) {
     226             :   return StoreNoWriteBarrier(
     227             :       MachineRepresentation::kTagged, GetInterpretedFramePointer(),
     228         258 :       IntPtrConstant(reg.ToOperand() << kPointerSizeLog2), value);
     229             : }
     230             : 
     231        4207 : Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) {
     232             :   return StoreNoWriteBarrier(MachineRepresentation::kTagged,
     233             :                              GetInterpretedFramePointer(),
     234        4207 :                              RegisterFrameOffset(reg_index), value);
     235             : }
     236             : 
     237       37459 : Node* InterpreterAssembler::StoreAndTagRegister(compiler::Node* value,
     238             :                                                 Register reg) {
     239       37459 :   int offset = reg.ToOperand() << kPointerSizeLog2;
     240       37459 :   return StoreAndTagSmi(GetInterpretedFramePointer(), offset, value);
     241             : }
     242             : 
     243        3354 : Node* InterpreterAssembler::NextRegister(Node* reg_index) {
     244             :   // Register indexes are negative, so the next index is minus one.
     245        3354 :   return IntPtrAdd(reg_index, IntPtrConstant(-1));
     246             : }
     247             : 
     248       13446 : Node* InterpreterAssembler::OperandOffset(int operand_index) {
     249             :   return IntPtrConstant(
     250       13446 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()));
     251             : }
     252             : 
     253        7302 : Node* InterpreterAssembler::BytecodeOperandUnsignedByte(int operand_index) {
     254             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     255             :   DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
     256             :                                     bytecode_, operand_index, operand_scale()));
     257        7302 :   Node* operand_offset = OperandOffset(operand_index);
     258             :   return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
     259        7302 :               IntPtrAdd(BytecodeOffset(), operand_offset));
     260             : }
     261             : 
     262        6144 : Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) {
     263             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     264             :   DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
     265             :                                     bytecode_, operand_index, operand_scale()));
     266        6144 :   Node* operand_offset = OperandOffset(operand_index);
     267             :   return Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
     268        6144 :               IntPtrAdd(BytecodeOffset(), operand_offset));
     269             : }
     270             : 
     271           0 : compiler::Node* InterpreterAssembler::BytecodeOperandReadUnaligned(
     272             :     int relative_offset, MachineType result_type) {
     273             :   static const int kMaxCount = 4;
     274             :   DCHECK(!TargetSupportsUnalignedAccess());
     275             : 
     276             :   int count;
     277           0 :   switch (result_type.representation()) {
     278             :     case MachineRepresentation::kWord16:
     279             :       count = 2;
     280             :       break;
     281             :     case MachineRepresentation::kWord32:
     282             :       count = 4;
     283           0 :       break;
     284             :     default:
     285           0 :       UNREACHABLE();
     286             :       break;
     287             :   }
     288             :   MachineType msb_type =
     289           0 :       result_type.IsSigned() ? MachineType::Int8() : MachineType::Uint8();
     290             : 
     291             : #if V8_TARGET_LITTLE_ENDIAN
     292             :   const int kStep = -1;
     293           0 :   int msb_offset = count - 1;
     294             : #elif V8_TARGET_BIG_ENDIAN
     295             :   const int kStep = 1;
     296             :   int msb_offset = 0;
     297             : #else
     298             : #error "Unknown Architecture"
     299             : #endif
     300             : 
     301             :   // Read the most signicant bytecode into bytes[0] and then in order
     302             :   // down to least significant in bytes[count - 1].
     303             :   DCHECK(count <= kMaxCount);
     304             :   compiler::Node* bytes[kMaxCount];
     305           0 :   for (int i = 0; i < count; i++) {
     306           0 :     MachineType machine_type = (i == 0) ? msb_type : MachineType::Uint8();
     307           0 :     Node* offset = IntPtrConstant(relative_offset + msb_offset + i * kStep);
     308           0 :     Node* array_offset = IntPtrAdd(BytecodeOffset(), offset);
     309           0 :     bytes[i] = Load(machine_type, BytecodeArrayTaggedPointer(), array_offset);
     310             :   }
     311             : 
     312             :   // Pack LSB to MSB.
     313           0 :   Node* result = bytes[--count];
     314           0 :   for (int i = 1; --count >= 0; i++) {
     315           0 :     Node* shift = Int32Constant(i * kBitsPerByte);
     316           0 :     Node* value = Word32Shl(bytes[count], shift);
     317           0 :     result = Word32Or(value, result);
     318             :   }
     319           0 :   return result;
     320             : }
     321             : 
     322        6602 : Node* InterpreterAssembler::BytecodeOperandUnsignedShort(int operand_index) {
     323             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     324             :   DCHECK_EQ(
     325             :       OperandSize::kShort,
     326             :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
     327             :   int operand_offset =
     328        6602 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
     329             :   if (TargetSupportsUnalignedAccess()) {
     330             :     return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(),
     331        6602 :                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
     332             :   } else {
     333             :     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint16());
     334             :   }
     335             : }
     336             : 
     337        4811 : Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) {
     338             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     339             :   DCHECK_EQ(
     340             :       OperandSize::kShort,
     341             :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
     342             :   int operand_offset =
     343        4811 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
     344             :   if (TargetSupportsUnalignedAccess()) {
     345             :     return Load(MachineType::Int16(), BytecodeArrayTaggedPointer(),
     346        4811 :                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
     347             :   } else {
     348             :     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16());
     349             :   }
     350             : }
     351             : 
     352        6332 : Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) {
     353             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     354             :   DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
     355             :                                     bytecode_, operand_index, operand_scale()));
     356             :   int operand_offset =
     357        6332 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
     358             :   if (TargetSupportsUnalignedAccess()) {
     359             :     return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(),
     360        6332 :                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
     361             :   } else {
     362             :     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32());
     363             :   }
     364             : }
     365             : 
     366        4811 : Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) {
     367             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     368             :   DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
     369             :                                     bytecode_, operand_index, operand_scale()));
     370             :   int operand_offset =
     371        4811 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
     372             :   if (TargetSupportsUnalignedAccess()) {
     373             :     return Load(MachineType::Int32(), BytecodeArrayTaggedPointer(),
     374        4811 :                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
     375             :   } else {
     376             :     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32());
     377             :   }
     378             : }
     379             : 
     380       15766 : Node* InterpreterAssembler::BytecodeSignedOperand(int operand_index,
     381             :                                                   OperandSize operand_size) {
     382             :   DCHECK(!Bytecodes::IsUnsignedOperandType(
     383             :       Bytecodes::GetOperandType(bytecode_, operand_index)));
     384       15766 :   switch (operand_size) {
     385             :     case OperandSize::kByte:
     386        6144 :       return BytecodeOperandSignedByte(operand_index);
     387             :     case OperandSize::kShort:
     388        4811 :       return BytecodeOperandSignedShort(operand_index);
     389             :     case OperandSize::kQuad:
     390        4811 :       return BytecodeOperandSignedQuad(operand_index);
     391             :     case OperandSize::kNone:
     392           0 :       UNREACHABLE();
     393             :   }
     394             :   return nullptr;
     395             : }
     396             : 
     397       20236 : Node* InterpreterAssembler::BytecodeUnsignedOperand(int operand_index,
     398             :                                                     OperandSize operand_size) {
     399             :   DCHECK(Bytecodes::IsUnsignedOperandType(
     400             :       Bytecodes::GetOperandType(bytecode_, operand_index)));
     401       20236 :   switch (operand_size) {
     402             :     case OperandSize::kByte:
     403        7302 :       return BytecodeOperandUnsignedByte(operand_index);
     404             :     case OperandSize::kShort:
     405        6602 :       return BytecodeOperandUnsignedShort(operand_index);
     406             :     case OperandSize::kQuad:
     407        6332 :       return BytecodeOperandUnsignedQuad(operand_index);
     408             :     case OperandSize::kNone:
     409           0 :       UNREACHABLE();
     410             :   }
     411             :   return nullptr;
     412             : }
     413             : 
     414        1452 : Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) {
     415             :   DCHECK_EQ(OperandType::kRegCount,
     416             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     417             :   OperandSize operand_size =
     418        1452 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     419        1452 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     420             : }
     421             : 
     422         838 : Node* InterpreterAssembler::BytecodeOperandFlag(int operand_index) {
     423             :   DCHECK_EQ(OperandType::kFlag8,
     424             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     425             :   OperandSize operand_size =
     426         838 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     427             :   DCHECK_EQ(operand_size, OperandSize::kByte);
     428         838 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     429             : }
     430             : 
     431        2907 : Node* InterpreterAssembler::BytecodeOperandUImm(int operand_index) {
     432             :   DCHECK_EQ(OperandType::kUImm,
     433             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     434             :   OperandSize operand_size =
     435        2907 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     436        2907 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     437             : }
     438             : 
     439        1548 : Node* InterpreterAssembler::BytecodeOperandUImmWord(int operand_index) {
     440        1548 :   return ChangeUint32ToWord(BytecodeOperandUImm(operand_index));
     441             : }
     442             : 
     443        2112 : Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) {
     444             :   DCHECK_EQ(OperandType::kImm,
     445             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     446             :   OperandSize operand_size =
     447        2112 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     448        2112 :   return BytecodeSignedOperand(operand_index, operand_size);
     449             : }
     450             : 
     451         258 : Node* InterpreterAssembler::BytecodeOperandImmIntPtr(int operand_index) {
     452         258 :   return ChangeInt32ToIntPtr(BytecodeOperandImm(operand_index));
     453             : }
     454             : 
     455        1677 : Node* InterpreterAssembler::BytecodeOperandImmSmi(int operand_index) {
     456        1677 :   return SmiFromWord32(BytecodeOperandImm(operand_index));
     457             : }
     458             : 
     459       14637 : Node* InterpreterAssembler::BytecodeOperandIdxInt32(int operand_index) {
     460             :   DCHECK(OperandType::kIdx ==
     461             :          Bytecodes::GetOperandType(bytecode_, operand_index));
     462             :   OperandSize operand_size =
     463       14637 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     464       14637 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     465             : }
     466             : 
     467       13863 : Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
     468       13863 :   return ChangeUint32ToWord(BytecodeOperandIdxInt32(operand_index));
     469             : }
     470             : 
     471         387 : Node* InterpreterAssembler::BytecodeOperandIdxSmi(int operand_index) {
     472         387 :   return SmiTag(BytecodeOperandIdx(operand_index));
     473             : }
     474             : 
     475       13654 : Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) {
     476             :   DCHECK(Bytecodes::IsRegisterOperandType(
     477             :       Bytecodes::GetOperandType(bytecode_, operand_index)));
     478             :   OperandSize operand_size =
     479       13654 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     480             :   return ChangeInt32ToIntPtr(
     481       13654 :       BytecodeSignedOperand(operand_index, operand_size));
     482             : }
     483             : 
     484         270 : Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) {
     485             :   DCHECK(OperandType::kRuntimeId ==
     486             :          Bytecodes::GetOperandType(bytecode_, operand_index));
     487             :   OperandSize operand_size =
     488         270 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     489             :   DCHECK_EQ(operand_size, OperandSize::kShort);
     490         270 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     491             : }
     492             : 
     493         132 : Node* InterpreterAssembler::BytecodeOperandIntrinsicId(int operand_index) {
     494             :   DCHECK(OperandType::kIntrinsicId ==
     495             :          Bytecodes::GetOperandType(bytecode_, operand_index));
     496             :   OperandSize operand_size =
     497         132 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     498             :   DCHECK_EQ(operand_size, OperandSize::kByte);
     499         132 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     500             : }
     501             : 
     502        5748 : Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
     503             :   Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(),
     504        5748 :                                         BytecodeArray::kConstantPoolOffset);
     505        5748 :   return LoadFixedArrayElement(constant_pool, index);
     506             : }
     507             : 
     508        1419 : Node* InterpreterAssembler::LoadAndUntagConstantPoolEntry(Node* index) {
     509        1419 :   return SmiUntag(LoadConstantPoolEntry(index));
     510             : }
     511             : 
     512        7905 : Node* InterpreterAssembler::LoadFeedbackVector() {
     513        7905 :   Node* function = LoadRegister(Register::function_closure());
     514        7905 :   Node* cell = LoadObjectField(function, JSFunction::kFeedbackVectorOffset);
     515        7905 :   Node* vector = LoadObjectField(cell, Cell::kValueOffset);
     516        7905 :   return vector;
     517             : }
     518             : 
     519        3044 : void InterpreterAssembler::SaveBytecodeOffset() {
     520             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     521        3044 :   StoreAndTagRegister(BytecodeOffset(), Register::bytecode_offset());
     522        3044 :   saved_bytecode_offset_ = true;
     523        3044 : }
     524             : 
     525       41035 : void InterpreterAssembler::CallPrologue() {
     526       41035 :   if (!saved_bytecode_offset_) {
     527             :     // If there are multiple calls in the bytecode handler, you need to spill
     528             :     // before each of them, unless SaveBytecodeOffset has explicitly been called
     529             :     // in a path that dominates _all_ of those calls. Therefore don't set
     530             :     // saved_bytecode_offset_ to true or call SaveBytecodeOffset.
     531       34415 :     StoreAndTagRegister(BytecodeOffset(), Register::bytecode_offset());
     532             :   }
     533             : 
     534       41035 :   if (FLAG_debug_code && !disable_stack_check_across_call_) {
     535             :     DCHECK(stack_pointer_before_call_ == nullptr);
     536           0 :     stack_pointer_before_call_ = LoadStackPointer();
     537             :   }
     538       41035 :   made_call_ = true;
     539       41035 : }
     540             : 
     541       41035 : void InterpreterAssembler::CallEpilogue() {
     542       41035 :   if (FLAG_debug_code && !disable_stack_check_across_call_) {
     543           0 :     Node* stack_pointer_after_call = LoadStackPointer();
     544           0 :     Node* stack_pointer_before_call = stack_pointer_before_call_;
     545           0 :     stack_pointer_before_call_ = nullptr;
     546             :     AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call,
     547           0 :                         kUnexpectedStackPointer);
     548             :   }
     549       41035 : }
     550             : 
     551        1290 : Node* InterpreterAssembler::IncrementCallCount(Node* feedback_vector,
     552             :                                                Node* slot_id) {
     553        1290 :   Comment("increment call count");
     554        1290 :   Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1));
     555        1290 :   Node* call_count = LoadFixedArrayElement(feedback_vector, call_count_slot);
     556        1290 :   Node* new_count = SmiAdd(call_count, SmiConstant(1));
     557             :   // Count is Smi, so we don't need a write barrier.
     558             :   return StoreFixedArrayElement(feedback_vector, call_count_slot, new_count,
     559        1290 :                                 SKIP_WRITE_BARRIER);
     560             : }
     561             : 
     562         516 : Node* InterpreterAssembler::CallJSWithFeedback(
     563             :     compiler::Node* function, compiler::Node* context,
     564             :     compiler::Node* first_arg, compiler::Node* arg_count,
     565             :     compiler::Node* slot_id, compiler::Node* feedback_vector,
     566             :     ConvertReceiverMode receiver_mode, TailCallMode tail_call_mode) {
     567             :   // Static checks to assert it is safe to examine the type feedback element.
     568             :   // We don't know that we have a weak cell. We might have a private symbol
     569             :   // or an AllocationSite, but the memory is safe to examine.
     570             :   // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
     571             :   // FixedArray.
     572             :   // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
     573             :   // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
     574             :   // computed, meaning that it can't appear to be a pointer. If the low bit is
     575             :   // 0, then hash is computed, but the 0 bit prevents the field from appearing
     576             :   // to be a pointer.
     577             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     578             :   DCHECK(Bytecodes::IsCallOrConstruct(bytecode_));
     579             :   DCHECK_EQ(Bytecodes::GetReceiverMode(bytecode_), receiver_mode);
     580             : 
     581             :   STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
     582             :   STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
     583             :                     WeakCell::kValueOffset &&
     584             :                 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
     585             : 
     586         516 :   Variable return_value(this, MachineRepresentation::kTagged);
     587         516 :   Label call_function(this), extra_checks(this, Label::kDeferred), call(this),
     588         516 :       end(this);
     589             : 
     590             :   // The checks. First, does function match the recorded monomorphic target?
     591         516 :   Node* feedback_element = LoadFixedArrayElement(feedback_vector, slot_id);
     592         516 :   Node* feedback_value = LoadWeakCellValueUnchecked(feedback_element);
     593         516 :   Node* is_monomorphic = WordEqual(function, feedback_value);
     594         516 :   GotoIfNot(is_monomorphic, &extra_checks);
     595             : 
     596             :   // The compare above could have been a SMI/SMI comparison. Guard against
     597             :   // this convincing us that we have a monomorphic JSFunction.
     598         516 :   Node* is_smi = TaggedIsSmi(function);
     599         516 :   Branch(is_smi, &extra_checks, &call_function);
     600             : 
     601         516 :   Bind(&call_function);
     602             :   {
     603             :     // Increment the call count.
     604         516 :     IncrementCallCount(feedback_vector, slot_id);
     605             : 
     606             :     // Call using call function builtin.
     607             :     Callable callable = CodeFactory::InterpreterPushArgsThenCall(
     608             :         isolate(), receiver_mode, tail_call_mode,
     609         516 :         InterpreterPushArgsMode::kJSFunction);
     610         516 :     Node* code_target = HeapConstant(callable.code());
     611             :     Node* ret_value = CallStub(callable.descriptor(), code_target, context,
     612         516 :                                arg_count, first_arg, function);
     613         516 :     return_value.Bind(ret_value);
     614         516 :     Goto(&end);
     615             :   }
     616             : 
     617         516 :   Bind(&extra_checks);
     618             :   {
     619         516 :     Label check_initialized(this), mark_megamorphic(this),
     620         516 :         create_allocation_site(this);
     621             : 
     622         516 :     Comment("check if megamorphic");
     623             :     // Check if it is a megamorphic target.
     624             :     Node* is_megamorphic =
     625             :         WordEqual(feedback_element,
     626        1032 :                   HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())));
     627         516 :     GotoIf(is_megamorphic, &call);
     628             : 
     629         516 :     Comment("check if it is an allocation site");
     630             :     GotoIfNot(IsAllocationSiteMap(LoadMap(feedback_element)),
     631         516 :               &check_initialized);
     632             : 
     633         516 :     if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
     634             :       // For undefined receivers (mostly global calls), do an additional check
     635             :       // for the monomorphic Array function, which would otherwise appear
     636             :       // megamorphic.
     637             : 
     638             :       // If it is not the Array() function, mark megamorphic.
     639             :       Node* context_slot = LoadContextElement(LoadNativeContext(context),
     640         129 :                                               Context::ARRAY_FUNCTION_INDEX);
     641         129 :       Node* is_array_function = WordEqual(context_slot, function);
     642         129 :       GotoIfNot(is_array_function, &mark_megamorphic);
     643             : 
     644             :       // It is a monomorphic Array function. Increment the call count.
     645         129 :       IncrementCallCount(feedback_vector, slot_id);
     646             : 
     647             :       // Call ArrayConstructorStub.
     648             :       Callable callable_call =
     649         129 :           CodeFactory::InterpreterPushArgsThenConstructArray(isolate());
     650         129 :       Node* code_target_call = HeapConstant(callable_call.code());
     651             :       Node* ret_value =
     652             :           CallStub(callable_call.descriptor(), code_target_call, context,
     653         129 :                    arg_count, function, feedback_element, first_arg);
     654         129 :       return_value.Bind(ret_value);
     655         129 :       Goto(&end);
     656             : 
     657             :     } else {
     658         387 :       Goto(&mark_megamorphic);
     659             :     }
     660             : 
     661         516 :     Bind(&check_initialized);
     662             :     {
     663         516 :       Comment("check if uninitialized");
     664             :       // Check if it is uninitialized target first.
     665             :       Node* is_uninitialized = WordEqual(
     666             :           feedback_element,
     667        1032 :           HeapConstant(FeedbackVector::UninitializedSentinel(isolate())));
     668         516 :       GotoIfNot(is_uninitialized, &mark_megamorphic);
     669             : 
     670         516 :       Comment("handle_uninitialized");
     671             :       // If it is not a JSFunction mark it as megamorphic.
     672         516 :       Node* is_smi = TaggedIsSmi(function);
     673         516 :       GotoIf(is_smi, &mark_megamorphic);
     674             : 
     675             :       // Check if function is an object of JSFunction type.
     676         516 :       Node* instance_type = LoadInstanceType(function);
     677             :       Node* is_js_function =
     678         516 :           Word32Equal(instance_type, Int32Constant(JS_FUNCTION_TYPE));
     679         516 :       GotoIfNot(is_js_function, &mark_megamorphic);
     680             : 
     681             :       // Check if it is the Array() function.
     682             :       Node* context_slot = LoadContextElement(LoadNativeContext(context),
     683         516 :                                               Context::ARRAY_FUNCTION_INDEX);
     684         516 :       Node* is_array_function = WordEqual(context_slot, function);
     685         516 :       GotoIf(is_array_function, &create_allocation_site);
     686             : 
     687             :       // Check if the function belongs to the same native context.
     688             :       Node* native_context = LoadNativeContext(
     689         516 :           LoadObjectField(function, JSFunction::kContextOffset));
     690             :       Node* is_same_native_context =
     691         516 :           WordEqual(native_context, LoadNativeContext(context));
     692         516 :       GotoIfNot(is_same_native_context, &mark_megamorphic);
     693             : 
     694             :       CreateWeakCellInFeedbackVector(feedback_vector, SmiTag(slot_id),
     695         516 :                                      function);
     696             : 
     697             :       // Call using call function builtin.
     698         516 :       Goto(&call_function);
     699             :     }
     700             : 
     701         516 :     Bind(&create_allocation_site);
     702             :     {
     703         516 :       CreateAllocationSiteInFeedbackVector(feedback_vector, SmiTag(slot_id));
     704             : 
     705             :       // Call using CallFunction builtin. CallICs have a PREMONOMORPHIC state.
     706             :       // They start collecting feedback only when a call is executed the second
     707             :       // time. So, do not pass any feedback here.
     708         516 :       Goto(&call_function);
     709             :     }
     710             : 
     711         516 :     Bind(&mark_megamorphic);
     712             :     {
     713             :       // Mark it as a megamorphic.
     714             :       // MegamorphicSentinel is created as a part of Heap::InitialObjects
     715             :       // and will not move during a GC. So it is safe to skip write barrier.
     716             :       DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex));
     717             :       StoreFixedArrayElement(
     718             :           feedback_vector, slot_id,
     719             :           HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
     720        1032 :           SKIP_WRITE_BARRIER);
     721         516 :       Goto(&call);
     722         516 :     }
     723             :   }
     724             : 
     725         516 :   Bind(&call);
     726             :   {
     727         516 :     Comment("Increment call count and call using Call builtin");
     728             :     // Increment the call count.
     729         516 :     IncrementCallCount(feedback_vector, slot_id);
     730             : 
     731             :     // Call using call builtin.
     732             :     Callable callable_call = CodeFactory::InterpreterPushArgsThenCall(
     733             :         isolate(), receiver_mode, tail_call_mode,
     734         516 :         InterpreterPushArgsMode::kOther);
     735         516 :     Node* code_target_call = HeapConstant(callable_call.code());
     736             :     Node* ret_value = CallStub(callable_call.descriptor(), code_target_call,
     737         516 :                                context, arg_count, first_arg, function);
     738         516 :     return_value.Bind(ret_value);
     739         516 :     Goto(&end);
     740             :   }
     741             : 
     742         516 :   Bind(&end);
     743        1032 :   return return_value.value();
     744             : }
     745             : 
     746         272 : Node* InterpreterAssembler::CallJS(Node* function, Node* context,
     747             :                                    Node* first_arg, Node* arg_count,
     748             :                                    ConvertReceiverMode receiver_mode,
     749             :                                    TailCallMode tail_call_mode) {
     750             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     751             :   DCHECK(Bytecodes::IsCallOrConstruct(bytecode_));
     752             :   DCHECK_EQ(Bytecodes::GetReceiverMode(bytecode_), receiver_mode);
     753             :   Callable callable = CodeFactory::InterpreterPushArgsThenCall(
     754             :       isolate(), receiver_mode, tail_call_mode,
     755         272 :       InterpreterPushArgsMode::kOther);
     756         272 :   Node* code_target = HeapConstant(callable.code());
     757             : 
     758             :   return CallStub(callable.descriptor(), code_target, context, arg_count,
     759         272 :                   first_arg, function);
     760             : }
     761             : 
     762         129 : Node* InterpreterAssembler::CallJSWithSpread(Node* function, Node* context,
     763             :                                              Node* first_arg, Node* arg_count) {
     764             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     765             :   DCHECK_EQ(Bytecodes::GetReceiverMode(bytecode_), ConvertReceiverMode::kAny);
     766             :   Callable callable = CodeFactory::InterpreterPushArgsThenCall(
     767             :       isolate(), ConvertReceiverMode::kAny, TailCallMode::kDisallow,
     768         129 :       InterpreterPushArgsMode::kWithFinalSpread);
     769         129 :   Node* code_target = HeapConstant(callable.code());
     770             : 
     771             :   return CallStub(callable.descriptor(), code_target, context, arg_count,
     772         129 :                   first_arg, function);
     773             : }
     774             : 
     775         129 : Node* InterpreterAssembler::Construct(Node* constructor, Node* context,
     776             :                                       Node* new_target, Node* first_arg,
     777             :                                       Node* arg_count, Node* slot_id,
     778             :                                       Node* feedback_vector) {
     779             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     780         129 :   Variable return_value(this, MachineRepresentation::kTagged);
     781         258 :   Variable allocation_feedback(this, MachineRepresentation::kTagged);
     782         129 :   Label call_construct_function(this, &allocation_feedback),
     783         129 :       extra_checks(this, Label::kDeferred), call_construct(this), end(this);
     784             : 
     785             :   // Slot id of 0 is used to indicate no type feedback is available.
     786             :   STATIC_ASSERT(FeedbackVector::kReservedIndexCount > 0);
     787         129 :   Node* is_feedback_unavailable = WordEqual(slot_id, IntPtrConstant(0));
     788         129 :   GotoIf(is_feedback_unavailable, &call_construct);
     789             : 
     790             :   // Check that the constructor is not a smi.
     791         129 :   Node* is_smi = TaggedIsSmi(constructor);
     792         129 :   GotoIf(is_smi, &call_construct);
     793             : 
     794             :   // Check that constructor is a JSFunction.
     795         129 :   Node* instance_type = LoadInstanceType(constructor);
     796             :   Node* is_js_function =
     797         129 :       Word32Equal(instance_type, Int32Constant(JS_FUNCTION_TYPE));
     798         129 :   GotoIfNot(is_js_function, &call_construct);
     799             : 
     800             :   // Check if it is a monomorphic constructor.
     801         129 :   Node* feedback_element = LoadFixedArrayElement(feedback_vector, slot_id);
     802         129 :   Node* feedback_value = LoadWeakCellValueUnchecked(feedback_element);
     803         129 :   Node* is_monomorphic = WordEqual(constructor, feedback_value);
     804         129 :   allocation_feedback.Bind(UndefinedConstant());
     805         129 :   Branch(is_monomorphic, &call_construct_function, &extra_checks);
     806             : 
     807         129 :   Bind(&call_construct_function);
     808             :   {
     809         129 :     Comment("call using ConstructFunction");
     810         129 :     IncrementCallCount(feedback_vector, slot_id);
     811             :     Callable callable_function = CodeFactory::InterpreterPushArgsThenConstruct(
     812         129 :         isolate(), InterpreterPushArgsMode::kJSFunction);
     813             :     return_value.Bind(CallStub(callable_function.descriptor(),
     814             :                                HeapConstant(callable_function.code()), context,
     815             :                                arg_count, new_target, constructor,
     816         516 :                                allocation_feedback.value(), first_arg));
     817         129 :     Goto(&end);
     818             :   }
     819             : 
     820         129 :   Bind(&extra_checks);
     821             :   {
     822         129 :     Label check_allocation_site(this), check_initialized(this),
     823         129 :         initialize(this), mark_megamorphic(this);
     824             : 
     825             :     // Check if it is a megamorphic target.
     826         129 :     Comment("check if megamorphic");
     827             :     Node* is_megamorphic =
     828             :         WordEqual(feedback_element,
     829         258 :                   HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())));
     830         129 :     GotoIf(is_megamorphic, &call_construct_function);
     831             : 
     832         129 :     Comment("check if weak cell");
     833             :     Node* is_weak_cell = WordEqual(LoadMap(feedback_element),
     834         129 :                                    LoadRoot(Heap::kWeakCellMapRootIndex));
     835         129 :     GotoIfNot(is_weak_cell, &check_allocation_site);
     836             : 
     837             :     // If the weak cell is cleared, we have a new chance to become
     838             :     // monomorphic.
     839         129 :     Comment("check if weak cell is cleared");
     840         129 :     Node* is_smi = TaggedIsSmi(feedback_value);
     841         129 :     Branch(is_smi, &initialize, &mark_megamorphic);
     842             : 
     843         129 :     Bind(&check_allocation_site);
     844             :     {
     845         129 :       Comment("check if it is an allocation site");
     846             :       Node* is_allocation_site =
     847             :           WordEqual(LoadObjectField(feedback_element, 0),
     848         129 :                     LoadRoot(Heap::kAllocationSiteMapRootIndex));
     849         129 :       GotoIfNot(is_allocation_site, &check_initialized);
     850             : 
     851             :       // Make sure the function is the Array() function.
     852             :       Node* context_slot = LoadContextElement(LoadNativeContext(context),
     853         129 :                                               Context::ARRAY_FUNCTION_INDEX);
     854         129 :       Node* is_array_function = WordEqual(context_slot, constructor);
     855         129 :       GotoIfNot(is_array_function, &mark_megamorphic);
     856             : 
     857         129 :       allocation_feedback.Bind(feedback_element);
     858         129 :       Goto(&call_construct_function);
     859             :     }
     860             : 
     861         129 :     Bind(&check_initialized);
     862             :     {
     863             :       // Check if it is uninitialized.
     864         129 :       Comment("check if uninitialized");
     865             :       Node* is_uninitialized = WordEqual(
     866         129 :           feedback_element, LoadRoot(Heap::kuninitialized_symbolRootIndex));
     867         129 :       Branch(is_uninitialized, &initialize, &mark_megamorphic);
     868             :     }
     869             : 
     870         129 :     Bind(&initialize);
     871             :     {
     872         129 :       Label create_allocation_site(this), create_weak_cell(this);
     873         129 :       Comment("initialize the feedback element");
     874             :       // Create an allocation site if the function is an array function,
     875             :       // otherwise create a weak cell.
     876             :       Node* context_slot = LoadContextElement(LoadNativeContext(context),
     877         129 :                                               Context::ARRAY_FUNCTION_INDEX);
     878         129 :       Node* is_array_function = WordEqual(context_slot, constructor);
     879         129 :       Branch(is_array_function, &create_allocation_site, &create_weak_cell);
     880             : 
     881         129 :       Bind(&create_allocation_site);
     882             :       {
     883             :         Node* site = CreateAllocationSiteInFeedbackVector(feedback_vector,
     884         129 :                                                           SmiTag(slot_id));
     885         129 :         allocation_feedback.Bind(site);
     886         129 :         Goto(&call_construct_function);
     887             :       }
     888             : 
     889         129 :       Bind(&create_weak_cell);
     890             :       {
     891             :         CreateWeakCellInFeedbackVector(feedback_vector, SmiTag(slot_id),
     892         129 :                                        constructor);
     893         129 :         Goto(&call_construct_function);
     894         129 :       }
     895             :     }
     896             : 
     897         129 :     Bind(&mark_megamorphic);
     898             :     {
     899             :       // MegamorphicSentinel is an immortal immovable object so
     900             :       // write-barrier is not needed.
     901         129 :       Comment("transition to megamorphic");
     902             :       DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex));
     903             :       StoreFixedArrayElement(
     904             :           feedback_vector, slot_id,
     905             :           HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
     906         258 :           SKIP_WRITE_BARRIER);
     907         129 :       Goto(&call_construct_function);
     908         129 :     }
     909             :   }
     910             : 
     911         129 :   Bind(&call_construct);
     912             :   {
     913         129 :     Comment("call using Construct builtin");
     914             :     Callable callable = CodeFactory::InterpreterPushArgsThenConstruct(
     915         129 :         isolate(), InterpreterPushArgsMode::kOther);
     916         129 :     Node* code_target = HeapConstant(callable.code());
     917             :     return_value.Bind(CallStub(callable.descriptor(), code_target, context,
     918             :                                arg_count, new_target, constructor,
     919         387 :                                UndefinedConstant(), first_arg));
     920         129 :     Goto(&end);
     921             :   }
     922             : 
     923         129 :   Bind(&end);
     924         258 :   return return_value.value();
     925             : }
     926             : 
     927         129 : Node* InterpreterAssembler::ConstructWithSpread(Node* constructor,
     928             :                                                 Node* context, Node* new_target,
     929             :                                                 Node* first_arg,
     930             :                                                 Node* arg_count) {
     931             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     932         129 :   Variable return_value(this, MachineRepresentation::kTagged);
     933         129 :   Comment("call using ConstructWithSpread");
     934             :   Callable callable = CodeFactory::InterpreterPushArgsThenConstruct(
     935         129 :       isolate(), InterpreterPushArgsMode::kWithFinalSpread);
     936         129 :   Node* code_target = HeapConstant(callable.code());
     937             :   return_value.Bind(CallStub(callable.descriptor(), code_target, context,
     938             :                              arg_count, new_target, constructor,
     939         387 :                              UndefinedConstant(), first_arg));
     940             : 
     941         258 :   return return_value.value();
     942             : }
     943             : 
     944         264 : Node* InterpreterAssembler::CallRuntimeN(Node* function_id, Node* context,
     945             :                                          Node* first_arg, Node* arg_count,
     946             :                                          int result_size) {
     947             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     948             :   DCHECK(Bytecodes::IsCallRuntime(bytecode_));
     949         264 :   Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size);
     950         264 :   Node* code_target = HeapConstant(callable.code());
     951             : 
     952             :   // Get the function entry from the function id.
     953             :   Node* function_table = ExternalConstant(
     954         264 :       ExternalReference::runtime_function_table_address(isolate()));
     955             :   Node* function_offset =
     956         264 :       Int32Mul(function_id, Int32Constant(sizeof(Runtime::Function)));
     957             :   Node* function =
     958         264 :       IntPtrAdd(function_table, ChangeUint32ToWord(function_offset));
     959             :   Node* function_entry =
     960             :       Load(MachineType::Pointer(), function,
     961         264 :            IntPtrConstant(offsetof(Runtime::Function, entry)));
     962             : 
     963             :   return CallStubR(callable.descriptor(), result_size, code_target, context,
     964         528 :                    arg_count, first_arg, function_entry);
     965             : }
     966             : 
     967        3139 : void InterpreterAssembler::UpdateInterruptBudget(Node* weight, bool backward) {
     968        6278 :   Label ok(this), interrupt_check(this, Label::kDeferred), end(this);
     969             :   Node* budget_offset =
     970        3139 :       IntPtrConstant(BytecodeArray::kInterruptBudgetOffset - kHeapObjectTag);
     971             : 
     972             :   // Assert that the weight is positive (negative weights should be implemented
     973             :   // as backward updates).
     974             :   CSA_ASSERT(this, Int32GreaterThanOrEqual(weight, Int32Constant(0)));
     975             : 
     976             :   // Update budget by |weight| and check if it reaches zero.
     977        6278 :   Variable new_budget(this, MachineRepresentation::kWord32);
     978             :   Node* old_budget =
     979        3139 :       Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), budget_offset);
     980             :   // Make sure we include the current bytecode in the budget calculation.
     981             :   Node* budget_after_bytecode =
     982        3139 :       Int32Sub(old_budget, Int32Constant(CurrentBytecodeSize()));
     983        3139 :   if (backward) {
     984         301 :     new_budget.Bind(Int32Sub(budget_after_bytecode, weight));
     985             :   } else {
     986        2838 :     new_budget.Bind(Int32Add(budget_after_bytecode, weight));
     987             :   }
     988             :   Node* condition =
     989        3139 :       Int32GreaterThanOrEqual(new_budget.value(), Int32Constant(0));
     990        3139 :   Branch(condition, &ok, &interrupt_check);
     991             : 
     992             :   // Perform interrupt and reset budget.
     993        3139 :   Bind(&interrupt_check);
     994             :   {
     995        3139 :     CallRuntime(Runtime::kInterrupt, GetContext());
     996        3139 :     new_budget.Bind(Int32Constant(Interpreter::InterruptBudget()));
     997        3139 :     Goto(&ok);
     998             :   }
     999             : 
    1000             :   // Update budget.
    1001        3139 :   Bind(&ok);
    1002             :   StoreNoWriteBarrier(MachineRepresentation::kWord32,
    1003             :                       BytecodeArrayTaggedPointer(), budget_offset,
    1004        6278 :                       new_budget.value());
    1005        3139 : }
    1006             : 
    1007       20984 : Node* InterpreterAssembler::Advance() { return Advance(CurrentBytecodeSize()); }
    1008             : 
    1009       21070 : Node* InterpreterAssembler::Advance(int delta) {
    1010       21070 :   return Advance(IntPtrConstant(delta));
    1011             : }
    1012             : 
    1013       24166 : Node* InterpreterAssembler::Advance(Node* delta, bool backward) {
    1014       24166 :   if (FLAG_trace_ignition) {
    1015           0 :     TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
    1016             :   }
    1017         258 :   Node* next_offset = backward ? IntPtrSub(BytecodeOffset(), delta)
    1018       24424 :                                : IntPtrAdd(BytecodeOffset(), delta);
    1019       24166 :   bytecode_offset_.Bind(next_offset);
    1020       24166 :   return next_offset;
    1021             : }
    1022             : 
    1023        3096 : Node* InterpreterAssembler::Jump(Node* delta, bool backward) {
    1024             :   DCHECK(!Bytecodes::IsStarLookahead(bytecode_, operand_scale_));
    1025             : 
    1026        3096 :   UpdateInterruptBudget(TruncateWordToWord32(delta), backward);
    1027        3096 :   Node* new_bytecode_offset = Advance(delta, backward);
    1028        3096 :   Node* target_bytecode = LoadBytecode(new_bytecode_offset);
    1029        3096 :   return DispatchToBytecode(target_bytecode, new_bytecode_offset);
    1030             : }
    1031             : 
    1032        2838 : Node* InterpreterAssembler::Jump(Node* delta) { return Jump(delta, false); }
    1033             : 
    1034         258 : Node* InterpreterAssembler::JumpBackward(Node* delta) {
    1035         258 :   return Jump(delta, true);
    1036             : }
    1037             : 
    1038        1806 : void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) {
    1039        3612 :   Label match(this), no_match(this);
    1040             : 
    1041        1806 :   Branch(condition, &match, &no_match);
    1042        1806 :   Bind(&match);
    1043             :   Jump(delta);
    1044        1806 :   Bind(&no_match);
    1045        3612 :   Dispatch();
    1046        1806 : }
    1047             : 
    1048        1032 : void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) {
    1049        1032 :   JumpConditional(WordEqual(lhs, rhs), delta);
    1050        1032 : }
    1051             : 
    1052         774 : void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
    1053             :                                               Node* delta) {
    1054         774 :   JumpConditional(WordNotEqual(lhs, rhs), delta);
    1055         774 : }
    1056             : 
    1057       24166 : Node* InterpreterAssembler::LoadBytecode(compiler::Node* bytecode_offset) {
    1058             :   Node* bytecode =
    1059       24166 :       Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), bytecode_offset);
    1060       24166 :   return ChangeUint32ToWord(bytecode);
    1061             : }
    1062             : 
    1063        1333 : Node* InterpreterAssembler::StarDispatchLookahead(Node* target_bytecode) {
    1064        2666 :   Label do_inline_star(this), done(this);
    1065             : 
    1066        2666 :   Variable var_bytecode(this, MachineType::PointerRepresentation());
    1067        1333 :   var_bytecode.Bind(target_bytecode);
    1068             : 
    1069        1333 :   Node* star_bytecode = IntPtrConstant(static_cast<int>(Bytecode::kStar));
    1070        1333 :   Node* is_star = WordEqual(target_bytecode, star_bytecode);
    1071        1333 :   Branch(is_star, &do_inline_star, &done);
    1072             : 
    1073        1333 :   Bind(&do_inline_star);
    1074             :   {
    1075        1333 :     InlineStar();
    1076        1333 :     var_bytecode.Bind(LoadBytecode(BytecodeOffset()));
    1077        1333 :     Goto(&done);
    1078             :   }
    1079        1333 :   Bind(&done);
    1080        2666 :   return var_bytecode.value();
    1081             : }
    1082             : 
    1083        1333 : void InterpreterAssembler::InlineStar() {
    1084        1333 :   Bytecode previous_bytecode = bytecode_;
    1085        1333 :   AccumulatorUse previous_acc_use = accumulator_use_;
    1086             : 
    1087        1333 :   bytecode_ = Bytecode::kStar;
    1088        1333 :   accumulator_use_ = AccumulatorUse::kNone;
    1089             : 
    1090        1333 :   if (FLAG_trace_ignition) {
    1091           0 :     TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
    1092             :   }
    1093        2666 :   StoreRegister(GetAccumulator(), BytecodeOperandReg(0));
    1094             : 
    1095             :   DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
    1096             : 
    1097             :   Advance();
    1098        1333 :   bytecode_ = previous_bytecode;
    1099        1333 :   accumulator_use_ = previous_acc_use;
    1100        1333 : }
    1101             : 
    1102       19651 : Node* InterpreterAssembler::Dispatch() {
    1103       19651 :   Comment("========= Dispatch");
    1104             :   DCHECK_IMPLIES(Bytecodes::MakesCallAlongCriticalPath(bytecode_), made_call_);
    1105             :   Node* target_offset = Advance();
    1106       19651 :   Node* target_bytecode = LoadBytecode(target_offset);
    1107             : 
    1108       19651 :   if (Bytecodes::IsStarLookahead(bytecode_, operand_scale_)) {
    1109        1333 :     target_bytecode = StarDispatchLookahead(target_bytecode);
    1110             :   }
    1111       19651 :   return DispatchToBytecode(target_bytecode, BytecodeOffset());
    1112             : }
    1113             : 
    1114       22747 : Node* InterpreterAssembler::DispatchToBytecode(Node* target_bytecode,
    1115             :                                                Node* new_bytecode_offset) {
    1116       22747 :   if (FLAG_trace_ignition_dispatches) {
    1117           0 :     TraceBytecodeDispatch(target_bytecode);
    1118             :   }
    1119             : 
    1120             :   Node* target_code_entry =
    1121             :       Load(MachineType::Pointer(), DispatchTableRawPointer(),
    1122       22747 :            WordShl(target_bytecode, IntPtrConstant(kPointerSizeLog2)));
    1123             : 
    1124       22747 :   return DispatchToBytecodeHandlerEntry(target_code_entry, new_bytecode_offset);
    1125             : }
    1126             : 
    1127         903 : Node* InterpreterAssembler::DispatchToBytecodeHandler(Node* handler,
    1128             :                                                       Node* bytecode_offset) {
    1129             :   // TODO(ishell): Add CSA::CodeEntryPoint(code).
    1130             :   Node* handler_entry =
    1131             :       IntPtrAdd(BitcastTaggedToWord(handler),
    1132         903 :                 IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
    1133         903 :   return DispatchToBytecodeHandlerEntry(handler_entry, bytecode_offset);
    1134             : }
    1135             : 
    1136       23736 : Node* InterpreterAssembler::DispatchToBytecodeHandlerEntry(
    1137             :     Node* handler_entry, Node* bytecode_offset) {
    1138       23736 :   InterpreterDispatchDescriptor descriptor(isolate());
    1139             :   return TailCallBytecodeDispatch(
    1140             :       descriptor, handler_entry, GetAccumulatorUnchecked(), bytecode_offset,
    1141       47472 :       BytecodeArrayTaggedPointer(), DispatchTableRawPointer());
    1142             : }
    1143             : 
    1144          86 : void InterpreterAssembler::DispatchWide(OperandScale operand_scale) {
    1145             :   // Dispatching a wide bytecode requires treating the prefix
    1146             :   // bytecode a base pointer into the dispatch table and dispatching
    1147             :   // the bytecode that follows relative to this base.
    1148             :   //
    1149             :   //   Indices 0-255 correspond to bytecodes with operand_scale == 0
    1150             :   //   Indices 256-511 correspond to bytecodes with operand_scale == 1
    1151             :   //   Indices 512-767 correspond to bytecodes with operand_scale == 2
    1152             :   DCHECK_IMPLIES(Bytecodes::MakesCallAlongCriticalPath(bytecode_), made_call_);
    1153          86 :   Node* next_bytecode_offset = Advance(1);
    1154          86 :   Node* next_bytecode = LoadBytecode(next_bytecode_offset);
    1155             : 
    1156          86 :   if (FLAG_trace_ignition_dispatches) {
    1157           0 :     TraceBytecodeDispatch(next_bytecode);
    1158             :   }
    1159             : 
    1160             :   Node* base_index;
    1161          86 :   switch (operand_scale) {
    1162             :     case OperandScale::kDouble:
    1163          43 :       base_index = IntPtrConstant(1 << kBitsPerByte);
    1164          43 :       break;
    1165             :     case OperandScale::kQuadruple:
    1166          43 :       base_index = IntPtrConstant(2 << kBitsPerByte);
    1167          43 :       break;
    1168             :     default:
    1169           0 :       UNREACHABLE();
    1170             :       base_index = nullptr;
    1171             :   }
    1172          86 :   Node* target_index = IntPtrAdd(base_index, next_bytecode);
    1173             :   Node* target_code_entry =
    1174             :       Load(MachineType::Pointer(), DispatchTableRawPointer(),
    1175          86 :            WordShl(target_index, kPointerSizeLog2));
    1176             : 
    1177          86 :   DispatchToBytecodeHandlerEntry(target_code_entry, next_bytecode_offset);
    1178          86 : }
    1179             : 
    1180        2322 : Node* InterpreterAssembler::TruncateTaggedToWord32WithFeedback(
    1181             :     Node* context, Node* value, Variable* var_type_feedback) {
    1182             :   // We might need to loop once due to ToNumber conversion.
    1183        2322 :   Variable var_value(this, MachineRepresentation::kTagged),
    1184        4644 :       var_result(this, MachineRepresentation::kWord32);
    1185        2322 :   Variable* loop_vars[] = {&var_value, var_type_feedback};
    1186        4644 :   Label loop(this, 2, loop_vars), done_loop(this, &var_result);
    1187        2322 :   var_value.Bind(value);
    1188        2322 :   var_type_feedback->Bind(SmiConstant(BinaryOperationFeedback::kNone));
    1189        2322 :   Goto(&loop);
    1190        2322 :   Bind(&loop);
    1191             :   {
    1192             :     // Load the current {value}.
    1193        2322 :     value = var_value.value();
    1194             : 
    1195             :     // Check if the {value} is a Smi or a HeapObject.
    1196        2322 :     Label if_valueissmi(this), if_valueisnotsmi(this);
    1197        2322 :     Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
    1198             : 
    1199        2322 :     Bind(&if_valueissmi);
    1200             :     {
    1201             :       // Convert the Smi {value}.
    1202        2322 :       var_result.Bind(SmiToWord32(value));
    1203             :       var_type_feedback->Bind(
    1204             :           SmiOr(var_type_feedback->value(),
    1205        2322 :                 SmiConstant(BinaryOperationFeedback::kSignedSmall)));
    1206        2322 :       Goto(&done_loop);
    1207             :     }
    1208             : 
    1209        2322 :     Bind(&if_valueisnotsmi);
    1210             :     {
    1211             :       // Check if {value} is a HeapNumber.
    1212             :       Label if_valueisheapnumber(this),
    1213        2322 :           if_valueisnotheapnumber(this, Label::kDeferred);
    1214        2322 :       Node* value_map = LoadMap(value);
    1215             :       Branch(IsHeapNumberMap(value_map), &if_valueisheapnumber,
    1216        2322 :              &if_valueisnotheapnumber);
    1217             : 
    1218        2322 :       Bind(&if_valueisheapnumber);
    1219             :       {
    1220             :         // Truncate the floating point value.
    1221        2322 :         var_result.Bind(TruncateHeapNumberValueToWord32(value));
    1222             :         var_type_feedback->Bind(
    1223             :             SmiOr(var_type_feedback->value(),
    1224        2322 :                   SmiConstant(BinaryOperationFeedback::kNumber)));
    1225        2322 :         Goto(&done_loop);
    1226             :       }
    1227             : 
    1228        2322 :       Bind(&if_valueisnotheapnumber);
    1229             :       {
    1230             :         // We do not require an Or with earlier feedback here because once we
    1231             :         // convert the value to a number, we cannot reach this path. We can
    1232             :         // only reach this path on the first pass when the feedback is kNone.
    1233             :         CSA_ASSERT(this, SmiEqual(var_type_feedback->value(),
    1234             :                                   SmiConstant(BinaryOperationFeedback::kNone)));
    1235             : 
    1236             :         Label if_valueisoddball(this),
    1237        2322 :             if_valueisnotoddball(this, Label::kDeferred);
    1238             :         Node* is_oddball = Word32Equal(LoadMapInstanceType(value_map),
    1239        2322 :                                        Int32Constant(ODDBALL_TYPE));
    1240        2322 :         Branch(is_oddball, &if_valueisoddball, &if_valueisnotoddball);
    1241             : 
    1242        2322 :         Bind(&if_valueisoddball);
    1243             :         {
    1244             :           // Convert Oddball to a Number and perform checks again.
    1245        2322 :           var_value.Bind(LoadObjectField(value, Oddball::kToNumberOffset));
    1246             :           var_type_feedback->Bind(
    1247        2322 :               SmiConstant(BinaryOperationFeedback::kNumberOrOddball));
    1248        2322 :           Goto(&loop);
    1249             :         }
    1250             : 
    1251        2322 :         Bind(&if_valueisnotoddball);
    1252             :         {
    1253             :           // Convert the {value} to a Number first.
    1254        2322 :           Callable callable = CodeFactory::NonNumberToNumber(isolate());
    1255        2322 :           var_value.Bind(CallStub(callable, context, value));
    1256        2322 :           var_type_feedback->Bind(SmiConstant(BinaryOperationFeedback::kAny));
    1257        2322 :           Goto(&loop);
    1258        2322 :         }
    1259        2322 :       }
    1260        2322 :     }
    1261             :   }
    1262        2322 :   Bind(&done_loop);
    1263        4644 :   return var_result.value();
    1264             : }
    1265             : 
    1266          43 : void InterpreterAssembler::UpdateInterruptBudgetOnReturn() {
    1267             :   // TODO(rmcilroy): Investigate whether it is worth supporting self
    1268             :   // optimization of primitive functions like FullCodegen.
    1269             : 
    1270             :   // Update profiling count by the number of bytes between the end of the
    1271             :   // current bytecode and the start of the first one, to simulate backedge to
    1272             :   // start of function.
    1273             :   //
    1274             :   // With headers and current offset, the bytecode array layout looks like:
    1275             :   //
    1276             :   //           <---------- simulated backedge ----------
    1277             :   // | header | first bytecode | .... | return bytecode |
    1278             :   //  |<------ current offset ------->
    1279             :   //  ^ tagged bytecode array pointer
    1280             :   //
    1281             :   // UpdateInterruptBudget already handles adding the bytecode size to the
    1282             :   // length of the back-edge, so we just have to correct for the non-zero offset
    1283             :   // of the first bytecode.
    1284             : 
    1285             :   const int kFirstBytecodeOffset = BytecodeArray::kHeaderSize - kHeapObjectTag;
    1286             :   Node* profiling_weight = Int32Sub(TruncateWordToWord32(BytecodeOffset()),
    1287          43 :                                     Int32Constant(kFirstBytecodeOffset));
    1288          43 :   UpdateInterruptBudget(profiling_weight, true);
    1289          43 : }
    1290             : 
    1291          43 : Node* InterpreterAssembler::StackCheckTriggeredInterrupt() {
    1292          43 :   Node* sp = LoadStackPointer();
    1293             :   Node* stack_limit = Load(
    1294             :       MachineType::Pointer(),
    1295          43 :       ExternalConstant(ExternalReference::address_of_stack_limit(isolate())));
    1296          43 :   return UintPtrLessThan(sp, stack_limit);
    1297             : }
    1298             : 
    1299         129 : Node* InterpreterAssembler::LoadOSRNestingLevel() {
    1300             :   return LoadObjectField(BytecodeArrayTaggedPointer(),
    1301             :                          BytecodeArray::kOSRNestingLevelOffset,
    1302         129 :                          MachineType::Int8());
    1303             : }
    1304             : 
    1305        1333 : void InterpreterAssembler::Abort(BailoutReason bailout_reason) {
    1306        1333 :   disable_stack_check_across_call_ = true;
    1307        1333 :   Node* abort_id = SmiTag(Int32Constant(bailout_reason));
    1308        1333 :   CallRuntime(Runtime::kAbort, GetContext(), abort_id);
    1309        1333 :   disable_stack_check_across_call_ = false;
    1310        1333 : }
    1311             : 
    1312           0 : void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs,
    1313             :                                                BailoutReason bailout_reason) {
    1314           0 :   Label ok(this), abort(this, Label::kDeferred);
    1315           0 :   Branch(WordEqual(lhs, rhs), &ok, &abort);
    1316             : 
    1317           0 :   Bind(&abort);
    1318           0 :   Abort(bailout_reason);
    1319           0 :   Goto(&ok);
    1320             : 
    1321           0 :   Bind(&ok);
    1322           0 : }
    1323             : 
    1324         903 : void InterpreterAssembler::MaybeDropFrames(Node* context) {
    1325             :   Node* restart_fp_address =
    1326         903 :       ExternalConstant(ExternalReference::debug_restart_fp_address(isolate()));
    1327             : 
    1328         903 :   Node* restart_fp = Load(MachineType::Pointer(), restart_fp_address);
    1329         903 :   Node* null = IntPtrConstant(0);
    1330             : 
    1331         903 :   Label ok(this), drop_frames(this);
    1332         903 :   Branch(IntPtrEqual(restart_fp, null), &ok, &drop_frames);
    1333             : 
    1334         903 :   Bind(&drop_frames);
    1335             :   // We don't expect this call to return since the frame dropper tears down
    1336             :   // the stack and jumps into the function on the target frame to restart it.
    1337        1806 :   CallStub(CodeFactory::FrameDropperTrampoline(isolate()), context, restart_fp);
    1338         903 :   Abort(kUnexpectedReturnFromFrameDropper);
    1339         903 :   Goto(&ok);
    1340             : 
    1341        1806 :   Bind(&ok);
    1342         903 : }
    1343             : 
    1344           0 : void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) {
    1345             :   CallRuntime(function_id, GetContext(), BytecodeArrayTaggedPointer(),
    1346           0 :               SmiTag(BytecodeOffset()), GetAccumulatorUnchecked());
    1347           0 : }
    1348             : 
    1349           0 : void InterpreterAssembler::TraceBytecodeDispatch(Node* target_bytecode) {
    1350             :   Node* counters_table = ExternalConstant(
    1351           0 :       ExternalReference::interpreter_dispatch_counters(isolate()));
    1352             :   Node* source_bytecode_table_index = IntPtrConstant(
    1353           0 :       static_cast<int>(bytecode_) * (static_cast<int>(Bytecode::kLast) + 1));
    1354             : 
    1355             :   Node* counter_offset =
    1356             :       WordShl(IntPtrAdd(source_bytecode_table_index, target_bytecode),
    1357           0 :               IntPtrConstant(kPointerSizeLog2));
    1358             :   Node* old_counter =
    1359           0 :       Load(MachineType::IntPtr(), counters_table, counter_offset);
    1360             : 
    1361           0 :   Label counter_ok(this), counter_saturated(this, Label::kDeferred);
    1362             : 
    1363             :   Node* counter_reached_max = WordEqual(
    1364           0 :       old_counter, IntPtrConstant(std::numeric_limits<uintptr_t>::max()));
    1365           0 :   Branch(counter_reached_max, &counter_saturated, &counter_ok);
    1366             : 
    1367           0 :   Bind(&counter_ok);
    1368             :   {
    1369           0 :     Node* new_counter = IntPtrAdd(old_counter, IntPtrConstant(1));
    1370             :     StoreNoWriteBarrier(MachineType::PointerRepresentation(), counters_table,
    1371           0 :                         counter_offset, new_counter);
    1372           0 :     Goto(&counter_saturated);
    1373             :   }
    1374             : 
    1375           0 :   Bind(&counter_saturated);
    1376           0 : }
    1377             : 
    1378             : // static
    1379         540 : bool InterpreterAssembler::TargetSupportsUnalignedAccess() {
    1380             : #if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
    1381             :   return false;
    1382             : #elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_X87 || \
    1383             :     V8_TARGET_ARCH_S390 || V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || \
    1384             :     V8_TARGET_ARCH_PPC
    1385         540 :   return true;
    1386             : #else
    1387             : #error "Unknown Architecture"
    1388             : #endif
    1389             : }
    1390             : 
    1391         258 : Node* InterpreterAssembler::RegisterCount() {
    1392         258 :   Node* bytecode_array = LoadRegister(Register::bytecode_array());
    1393             :   Node* frame_size = LoadObjectField(
    1394         258 :       bytecode_array, BytecodeArray::kFrameSizeOffset, MachineType::Uint32());
    1395             :   return WordShr(ChangeUint32ToWord(frame_size),
    1396         258 :                  IntPtrConstant(kPointerSizeLog2));
    1397             : }
    1398             : 
    1399         129 : Node* InterpreterAssembler::ExportRegisterFile(Node* array) {
    1400         129 :   Node* register_count = RegisterCount();
    1401         129 :   if (FLAG_debug_code) {
    1402           0 :     Node* array_size = LoadAndUntagFixedArrayBaseLength(array);
    1403             :     AbortIfWordNotEqual(array_size, register_count,
    1404           0 :                         kInvalidRegisterFileInGenerator);
    1405             :   }
    1406             : 
    1407         129 :   Variable var_index(this, MachineType::PointerRepresentation());
    1408         129 :   var_index.Bind(IntPtrConstant(0));
    1409             : 
    1410             :   // Iterate over register file and write values into array.
    1411             :   // The mapping of register to array index must match that used in
    1412             :   // BytecodeGraphBuilder::VisitResumeGenerator.
    1413         129 :   Label loop(this, &var_index), done_loop(this);
    1414         129 :   Goto(&loop);
    1415         129 :   Bind(&loop);
    1416             :   {
    1417         129 :     Node* index = var_index.value();
    1418         129 :     GotoIfNot(UintPtrLessThan(index, register_count), &done_loop);
    1419             : 
    1420         129 :     Node* reg_index = IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index);
    1421         129 :     Node* value = LoadRegister(reg_index);
    1422             : 
    1423         129 :     StoreFixedArrayElement(array, index, value);
    1424             : 
    1425         129 :     var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
    1426         129 :     Goto(&loop);
    1427             :   }
    1428         129 :   Bind(&done_loop);
    1429             : 
    1430         129 :   return array;
    1431             : }
    1432             : 
    1433         129 : Node* InterpreterAssembler::ImportRegisterFile(Node* array) {
    1434         129 :   Node* register_count = RegisterCount();
    1435         129 :   if (FLAG_debug_code) {
    1436           0 :     Node* array_size = LoadAndUntagFixedArrayBaseLength(array);
    1437             :     AbortIfWordNotEqual(array_size, register_count,
    1438           0 :                         kInvalidRegisterFileInGenerator);
    1439             :   }
    1440             : 
    1441         129 :   Variable var_index(this, MachineType::PointerRepresentation());
    1442         129 :   var_index.Bind(IntPtrConstant(0));
    1443             : 
    1444             :   // Iterate over array and write values into register file.  Also erase the
    1445             :   // array contents to not keep them alive artificially.
    1446         129 :   Label loop(this, &var_index), done_loop(this);
    1447         129 :   Goto(&loop);
    1448         129 :   Bind(&loop);
    1449             :   {
    1450         129 :     Node* index = var_index.value();
    1451         129 :     GotoIfNot(UintPtrLessThan(index, register_count), &done_loop);
    1452             : 
    1453         129 :     Node* value = LoadFixedArrayElement(array, index);
    1454             : 
    1455         129 :     Node* reg_index = IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index);
    1456         129 :     StoreRegister(value, reg_index);
    1457             : 
    1458         129 :     StoreFixedArrayElement(array, index, StaleRegisterConstant());
    1459             : 
    1460         129 :     var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
    1461         129 :     Goto(&loop);
    1462             :   }
    1463         129 :   Bind(&done_loop);
    1464             : 
    1465         129 :   return array;
    1466             : }
    1467             : 
    1468           0 : int InterpreterAssembler::CurrentBytecodeSize() const {
    1469       24123 :   return Bytecodes::Size(bytecode_, operand_scale_);
    1470             : }
    1471             : 
    1472             : }  // namespace interpreter
    1473             : }  // namespace internal
    1474             : }  // namespace v8

Generated by: LCOV version 1.10