LCOV - code coverage report
Current view: top level - src/interpreter - interpreter-assembler.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 735 813 90.4 %
Date: 2019-04-17 Functions: 99 110 90.0 %

          Line data    Source code
       1             : // Copyright 2015 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/interpreter/interpreter-assembler.h"
       6             : 
       7             : #include <limits>
       8             : #include <ostream>
       9             : 
      10             : #include "src/code-factory.h"
      11             : #include "src/frames.h"
      12             : #include "src/interface-descriptors.h"
      13             : #include "src/interpreter/bytecodes.h"
      14             : #include "src/interpreter/interpreter.h"
      15             : #include "src/machine-type.h"
      16             : #include "src/objects-inl.h"
      17             : #include "src/zone/zone.h"
      18             : 
      19             : namespace v8 {
      20             : namespace internal {
      21             : namespace interpreter {
      22             : 
      23             : using compiler::CodeAssemblerState;
      24             : using compiler::Node;
      25             : template <class T>
      26             : using TNode = compiler::TNode<T>;
      27             : 
      28       27990 : InterpreterAssembler::InterpreterAssembler(CodeAssemblerState* state,
      29             :                                            Bytecode bytecode,
      30             :                                            OperandScale operand_scale)
      31             :     : CodeStubAssembler(state),
      32             :       bytecode_(bytecode),
      33             :       operand_scale_(operand_scale),
      34             :       VARIABLE_CONSTRUCTOR(interpreted_frame_pointer_,
      35             :                            MachineType::PointerRepresentation()),
      36             :       VARIABLE_CONSTRUCTOR(
      37             :           bytecode_array_, MachineRepresentation::kTagged,
      38             :           Parameter(InterpreterDispatchDescriptor::kBytecodeArray)),
      39             :       VARIABLE_CONSTRUCTOR(
      40             :           bytecode_offset_, MachineType::PointerRepresentation(),
      41             :           Parameter(InterpreterDispatchDescriptor::kBytecodeOffset)),
      42             :       VARIABLE_CONSTRUCTOR(
      43             :           dispatch_table_, MachineType::PointerRepresentation(),
      44             :           Parameter(InterpreterDispatchDescriptor::kDispatchTable)),
      45             :       VARIABLE_CONSTRUCTOR(
      46             :           accumulator_, MachineRepresentation::kTagged,
      47             :           Parameter(InterpreterDispatchDescriptor::kAccumulator)),
      48             :       accumulator_use_(AccumulatorUse::kNone),
      49             :       made_call_(false),
      50             :       reloaded_frame_ptr_(false),
      51             :       bytecode_array_valid_(true),
      52             :       disable_stack_check_across_call_(false),
      53       27990 :       stack_pointer_before_call_(nullptr) {
      54             : #ifdef V8_TRACE_IGNITION
      55             :   TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
      56             : #endif
      57      114014 :   RegisterCallGenerationCallbacks([this] { CallPrologue(); },
      58       86024 :                                   [this] { CallEpilogue(); });
      59             : 
      60             :   // Save the bytecode offset immediately if bytecode will make a call along the
      61             :   // critical path, or it is a return bytecode.
      62       52278 :   if (Bytecodes::MakesCallAlongCriticalPath(bytecode) ||
      63             :       Bytecodes::Returns(bytecode)) {
      64        3942 :     SaveBytecodeOffset();
      65             :   }
      66       27990 : }
      67             : 
      68       83970 : InterpreterAssembler::~InterpreterAssembler() {
      69             :   // If the following check fails the handler does not use the
      70             :   // accumulator in the way described in the bytecode definitions in
      71             :   // bytecodes.h.
      72             :   DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
      73       27990 :   UnregisterCallGenerationCallbacks();
      74       27990 : }
      75             : 
      76      147429 : Node* InterpreterAssembler::GetInterpretedFramePointer() {
      77      147429 :   if (!interpreted_frame_pointer_.IsBound()) {
      78       28832 :     interpreted_frame_pointer_.Bind(LoadParentFramePointer());
      79      122125 :   } else if (Bytecodes::MakesCallAlongCriticalPath(bytecode_) && made_call_ &&
      80        3528 :              !reloaded_frame_ptr_) {
      81        1512 :     interpreted_frame_pointer_.Bind(LoadParentFramePointer());
      82        1512 :     reloaded_frame_ptr_ = true;
      83             :   }
      84      147429 :   return interpreted_frame_pointer_.value();
      85             : }
      86             : 
      87      232864 : Node* InterpreterAssembler::BytecodeOffset() {
      88      236168 :   if (Bytecodes::MakesCallAlongCriticalPath(bytecode_) && made_call_ &&
      89        3304 :       (bytecode_offset_.value() ==
      90        3304 :        Parameter(InterpreterDispatchDescriptor::kBytecodeOffset))) {
      91        1512 :     bytecode_offset_.Bind(ReloadBytecodeOffset());
      92             :   }
      93      232864 :   return bytecode_offset_.value();
      94             : }
      95             : 
      96        1512 : Node* InterpreterAssembler::ReloadBytecodeOffset() {
      97        1512 :   Node* offset = LoadAndUntagRegister(Register::bytecode_offset());
      98        1512 :   if (operand_scale() != OperandScale::kSingle) {
      99             :     // Add one to the offset such that it points to the actual bytecode rather
     100             :     // than the Wide / ExtraWide prefix bytecode.
     101        3024 :     offset = IntPtrAdd(offset, IntPtrConstant(1));
     102             :   }
     103        1512 :   return offset;
     104             : }
     105             : 
     106       57077 : void InterpreterAssembler::SaveBytecodeOffset() {
     107       57077 :   Node* offset = BytecodeOffset();
     108       57077 :   if (operand_scale() != OperandScale::kSingle) {
     109             :     // Subtract one from the offset such that it points to the Wide / ExtraWide
     110             :     // prefix bytecode.
     111      147128 :     offset = IntPtrSub(BytecodeOffset(), IntPtrConstant(1));
     112             :   }
     113       57077 :   StoreAndTagRegister(offset, Register::bytecode_offset());
     114       57077 : }
     115             : 
     116      148605 : Node* InterpreterAssembler::BytecodeArrayTaggedPointer() {
     117             :   // Force a re-load of the bytecode array after every call in case the debugger
     118             :   // has been activated.
     119      148605 :   if (!bytecode_array_valid_) {
     120       26208 :     bytecode_array_.Bind(LoadRegister(Register::bytecode_array()));
     121       26208 :     bytecode_array_valid_ = true;
     122             :   }
     123      148605 :   return bytecode_array_.value();
     124             : }
     125             : 
     126       87360 : Node* InterpreterAssembler::DispatchTableRawPointer() {
     127       90384 :   if (Bytecodes::MakesCallAlongCriticalPath(bytecode_) && made_call_ &&
     128        3024 :       (dispatch_table_.value() ==
     129        3024 :        Parameter(InterpreterDispatchDescriptor::kDispatchTable))) {
     130        4536 :     dispatch_table_.Bind(ExternalConstant(
     131        3024 :         ExternalReference::interpreter_dispatch_table_address(isolate())));
     132             :   }
     133       87360 :   return dispatch_table_.value();
     134             : }
     135             : 
     136           0 : Node* InterpreterAssembler::GetAccumulatorUnchecked() {
     137       62736 :   return accumulator_.value();
     138             : }
     139             : 
     140       19056 : Node* InterpreterAssembler::GetAccumulator() {
     141             :   DCHECK(Bytecodes::ReadsAccumulator(bytecode_));
     142       38112 :   accumulator_use_ = accumulator_use_ | AccumulatorUse::kRead;
     143       38112 :   return TaggedPoisonOnSpeculation(GetAccumulatorUnchecked());
     144             : }
     145             : 
     146       34068 : void InterpreterAssembler::SetAccumulator(Node* value) {
     147             :   DCHECK(Bytecodes::WritesAccumulator(bytecode_));
     148       68808 :   accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
     149       34404 :   accumulator_.Bind(value);
     150       34068 : }
     151             : 
     152       21348 : Node* InterpreterAssembler::GetContext() {
     153       24204 :   return LoadRegister(Register::current_context());
     154             : }
     155             : 
     156         504 : void InterpreterAssembler::SetContext(Node* value) {
     157         504 :   StoreRegister(value, Register::current_context());
     158         504 : }
     159             : 
     160        1176 : Node* InterpreterAssembler::GetContextAtDepth(Node* context, Node* depth) {
     161        2352 :   Variable cur_context(this, MachineRepresentation::kTaggedPointer);
     162        1176 :   cur_context.Bind(context);
     163             : 
     164        2352 :   Variable cur_depth(this, MachineRepresentation::kWord32);
     165        1176 :   cur_depth.Bind(depth);
     166             : 
     167        1176 :   Label context_found(this);
     168             : 
     169        1176 :   Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context};
     170        2352 :   Label context_search(this, 2, context_search_loop_variables);
     171             : 
     172             :   // Fast path if the depth is 0.
     173        3528 :   Branch(Word32Equal(depth, Int32Constant(0)), &context_found, &context_search);
     174             : 
     175             :   // Loop until the depth is 0.
     176        1176 :   BIND(&context_search);
     177             :   {
     178        4704 :     cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1)));
     179             :     cur_context.Bind(
     180        3528 :         LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
     181             : 
     182        4704 :     Branch(Word32Equal(cur_depth.value(), Int32Constant(0)), &context_found,
     183        1176 :            &context_search);
     184             :   }
     185             : 
     186        1176 :   BIND(&context_found);
     187        2352 :   return cur_context.value();
     188             : }
     189             : 
     190         672 : void InterpreterAssembler::GotoIfHasContextExtensionUpToDepth(Node* context,
     191             :                                                               Node* depth,
     192             :                                                               Label* target) {
     193        1344 :   Variable cur_context(this, MachineRepresentation::kTaggedPointer);
     194         672 :   cur_context.Bind(context);
     195             : 
     196        1344 :   Variable cur_depth(this, MachineRepresentation::kWord32);
     197         672 :   cur_depth.Bind(depth);
     198             : 
     199         672 :   Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context};
     200        1344 :   Label context_search(this, 2, context_search_loop_variables);
     201             : 
     202             :   // Loop until the depth is 0.
     203         672 :   Goto(&context_search);
     204         672 :   BIND(&context_search);
     205             :   {
     206             :     // TODO(leszeks): We only need to do this check if the context had a sloppy
     207             :     // eval, we could pass in a context chain bitmask to figure out which
     208             :     // contexts actually need to be checked.
     209             : 
     210             :     Node* extension_slot =
     211        2016 :         LoadContextElement(cur_context.value(), Context::EXTENSION_INDEX);
     212             : 
     213             :     // Jump to the target if the extension slot is not a hole.
     214        1344 :     GotoIf(WordNotEqual(extension_slot, TheHoleConstant()), target);
     215             : 
     216        2688 :     cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1)));
     217             :     cur_context.Bind(
     218        2016 :         LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
     219             : 
     220        2688 :     GotoIf(Word32NotEqual(cur_depth.value(), Int32Constant(0)),
     221         672 :            &context_search);
     222             :   }
     223         672 : }
     224             : 
     225        2184 : Node* InterpreterAssembler::RegisterLocation(Node* reg_index) {
     226        4368 :   return WordPoisonOnSpeculation(
     227        6552 :       IntPtrAdd(GetInterpretedFramePointer(), RegisterFrameOffset(reg_index)));
     228             : }
     229             : 
     230           0 : Node* InterpreterAssembler::RegisterLocation(Register reg) {
     231           0 :   return RegisterLocation(IntPtrConstant(reg.ToOperand()));
     232             : }
     233             : 
     234           0 : Node* InterpreterAssembler::RegisterFrameOffset(Node* index) {
     235       55216 :   return TimesSystemPointerSize(index);
     236             : }
     237             : 
     238       13608 : Node* InterpreterAssembler::LoadRegister(Node* reg_index) {
     239       13608 :   return LoadFullTagged(GetInterpretedFramePointer(),
     240             :                         RegisterFrameOffset(reg_index),
     241       13608 :                         LoadSensitivity::kCritical);
     242             : }
     243             : 
     244       67784 : Node* InterpreterAssembler::LoadRegister(Register reg) {
     245       67784 :   return LoadFullTagged(GetInterpretedFramePointer(),
     246      135568 :                         IntPtrConstant(reg.ToOperand() * kSystemPointerSize));
     247             : }
     248             : 
     249        1512 : Node* InterpreterAssembler::LoadAndUntagRegister(Register reg) {
     250        3024 :   return LoadAndUntagSmi(GetInterpretedFramePointer(),
     251        3024 :                          reg.ToOperand() * kSystemPointerSize);
     252             : }
     253             : 
     254       12936 : Node* InterpreterAssembler::LoadRegisterAtOperandIndex(int operand_index) {
     255       12936 :   return LoadRegister(
     256       12936 :       BytecodeOperandReg(operand_index, LoadSensitivity::kSafe));
     257             : }
     258             : 
     259         168 : std::pair<Node*, Node*> InterpreterAssembler::LoadRegisterPairAtOperandIndex(
     260             :     int operand_index) {
     261             :   DCHECK_EQ(OperandType::kRegPair,
     262             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     263             :   Node* first_reg_index =
     264         168 :       BytecodeOperandReg(operand_index, LoadSensitivity::kSafe);
     265         168 :   Node* second_reg_index = NextRegister(first_reg_index);
     266         168 :   return std::make_pair(LoadRegister(first_reg_index),
     267         336 :                         LoadRegister(second_reg_index));
     268             : }
     269             : 
     270             : InterpreterAssembler::RegListNodePair
     271        2184 : InterpreterAssembler::GetRegisterListAtOperandIndex(int operand_index) {
     272             :   DCHECK(Bytecodes::IsRegisterListOperandType(
     273             :       Bytecodes::GetOperandType(bytecode_, operand_index)));
     274             :   DCHECK_EQ(OperandType::kRegCount,
     275             :             Bytecodes::GetOperandType(bytecode_, operand_index + 1));
     276        2184 :   Node* base_reg = RegisterLocation(
     277        2184 :       BytecodeOperandReg(operand_index, LoadSensitivity::kSafe));
     278        2184 :   Node* reg_count = BytecodeOperandCount(operand_index + 1);
     279        2184 :   return RegListNodePair(base_reg, reg_count);
     280             : }
     281             : 
     282        6888 : Node* InterpreterAssembler::LoadRegisterFromRegisterList(
     283             :     const RegListNodePair& reg_list, int index) {
     284        6888 :   Node* location = RegisterLocationInRegisterList(reg_list, index);
     285             :   // Location is already poisoned on speculation, so no need to poison here.
     286        6888 :   return LoadFullTagged(location);
     287             : }
     288             : 
     289        7056 : Node* InterpreterAssembler::RegisterLocationInRegisterList(
     290             :     const RegListNodePair& reg_list, int index) {
     291             :   CSA_ASSERT(this,
     292             :              Uint32GreaterThan(reg_list.reg_count(), Int32Constant(index)));
     293       14112 :   Node* offset = RegisterFrameOffset(IntPtrConstant(index));
     294             :   // Register indexes are negative, so subtract index from base location to get
     295             :   // location.
     296       14112 :   return IntPtrSub(reg_list.base_reg_location(), offset);
     297             : }
     298             : 
     299         504 : void InterpreterAssembler::StoreRegister(Node* value, Register reg) {
     300         504 :   StoreFullTaggedNoWriteBarrier(
     301             :       GetInterpretedFramePointer(),
     302        1008 :       IntPtrConstant(reg.ToOperand() * kSystemPointerSize), value);
     303         504 : }
     304             : 
     305        4760 : void InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) {
     306        4760 :   StoreFullTaggedNoWriteBarrier(GetInterpretedFramePointer(),
     307        4760 :                                 RegisterFrameOffset(reg_index), value);
     308        4760 : }
     309             : 
     310       57077 : void InterpreterAssembler::StoreAndTagRegister(Node* value, Register reg) {
     311       57077 :   int offset = reg.ToOperand() * kSystemPointerSize;
     312       57077 :   StoreAndTagSmi(GetInterpretedFramePointer(), offset, value);
     313       57077 : }
     314             : 
     315        1008 : void InterpreterAssembler::StoreRegisterAtOperandIndex(Node* value,
     316             :                                                        int operand_index) {
     317        1008 :   StoreRegister(value,
     318        1008 :                 BytecodeOperandReg(operand_index, LoadSensitivity::kSafe));
     319        1008 : }
     320             : 
     321         168 : void InterpreterAssembler::StoreRegisterPairAtOperandIndex(Node* value1,
     322             :                                                            Node* value2,
     323             :                                                            int operand_index) {
     324             :   DCHECK_EQ(OperandType::kRegOutPair,
     325             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     326             :   Node* first_reg_index =
     327         168 :       BytecodeOperandReg(operand_index, LoadSensitivity::kSafe);
     328         168 :   StoreRegister(value1, first_reg_index);
     329         168 :   Node* second_reg_index = NextRegister(first_reg_index);
     330         168 :   StoreRegister(value2, second_reg_index);
     331         168 : }
     332             : 
     333         336 : void InterpreterAssembler::StoreRegisterTripleAtOperandIndex(
     334             :     Node* value1, Node* value2, Node* value3, int operand_index) {
     335             :   DCHECK_EQ(OperandType::kRegOutTriple,
     336             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     337             :   Node* first_reg_index =
     338         336 :       BytecodeOperandReg(operand_index, LoadSensitivity::kSafe);
     339         336 :   StoreRegister(value1, first_reg_index);
     340         336 :   Node* second_reg_index = NextRegister(first_reg_index);
     341         336 :   StoreRegister(value2, second_reg_index);
     342         336 :   Node* third_reg_index = NextRegister(second_reg_index);
     343         336 :   StoreRegister(value3, third_reg_index);
     344         336 : }
     345             : 
     346        1008 : Node* InterpreterAssembler::NextRegister(Node* reg_index) {
     347             :   // Register indexes are negative, so the next index is minus one.
     348        3024 :   return IntPtrAdd(reg_index, IntPtrConstant(-1));
     349             : }
     350             : 
     351       19695 : Node* InterpreterAssembler::OperandOffset(int operand_index) {
     352       39390 :   return IntPtrConstant(
     353       39390 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()));
     354             : }
     355             : 
     356       10829 : Node* InterpreterAssembler::BytecodeOperandUnsignedByte(
     357             :     int operand_index, LoadSensitivity needs_poisoning) {
     358             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     359             :   DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
     360             :                                     bytecode_, operand_index, operand_scale()));
     361       10829 :   Node* operand_offset = OperandOffset(operand_index);
     362       10829 :   return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
     363       32487 :               IntPtrAdd(BytecodeOffset(), operand_offset), needs_poisoning);
     364             : }
     365             : 
     366        8866 : Node* InterpreterAssembler::BytecodeOperandSignedByte(
     367             :     int operand_index, LoadSensitivity needs_poisoning) {
     368             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     369             :   DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
     370             :                                     bytecode_, operand_index, operand_scale()));
     371        8866 :   Node* operand_offset = OperandOffset(operand_index);
     372        8866 :   return Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
     373       26598 :               IntPtrAdd(BytecodeOffset(), operand_offset), needs_poisoning);
     374             : }
     375             : 
     376           0 : Node* InterpreterAssembler::BytecodeOperandReadUnaligned(
     377             :     int relative_offset, MachineType result_type,
     378             :     LoadSensitivity needs_poisoning) {
     379             :   static const int kMaxCount = 4;
     380             :   DCHECK(!TargetSupportsUnalignedAccess());
     381             : 
     382             :   int count;
     383           0 :   switch (result_type.representation()) {
     384             :     case MachineRepresentation::kWord16:
     385             :       count = 2;
     386             :       break;
     387             :     case MachineRepresentation::kWord32:
     388             :       count = 4;
     389           0 :       break;
     390             :     default:
     391           0 :       UNREACHABLE();
     392             :       break;
     393             :   }
     394             :   MachineType msb_type =
     395           0 :       result_type.IsSigned() ? MachineType::Int8() : MachineType::Uint8();
     396             : 
     397             : #if V8_TARGET_LITTLE_ENDIAN
     398             :   const int kStep = -1;
     399           0 :   int msb_offset = count - 1;
     400             : #elif V8_TARGET_BIG_ENDIAN
     401             :   const int kStep = 1;
     402             :   int msb_offset = 0;
     403             : #else
     404             : #error "Unknown Architecture"
     405             : #endif
     406             : 
     407             :   // Read the most signicant bytecode into bytes[0] and then in order
     408             :   // down to least significant in bytes[count - 1].
     409             :   DCHECK_LE(count, kMaxCount);
     410             :   Node* bytes[kMaxCount];
     411           0 :   for (int i = 0; i < count; i++) {
     412           0 :     MachineType machine_type = (i == 0) ? msb_type : MachineType::Uint8();
     413           0 :     Node* offset = IntPtrConstant(relative_offset + msb_offset + i * kStep);
     414           0 :     Node* array_offset = IntPtrAdd(BytecodeOffset(), offset);
     415           0 :     bytes[i] = Load(machine_type, BytecodeArrayTaggedPointer(), array_offset,
     416           0 :                     needs_poisoning);
     417             :   }
     418             : 
     419             :   // Pack LSB to MSB.
     420           0 :   Node* result = bytes[--count];
     421           0 :   for (int i = 1; --count >= 0; i++) {
     422           0 :     Node* shift = Int32Constant(i * kBitsPerByte);
     423           0 :     Node* value = Word32Shl(bytes[count], shift);
     424           0 :     result = Word32Or(value, result);
     425             :   }
     426           0 :   return result;
     427             : }
     428             : 
     429        9747 : Node* InterpreterAssembler::BytecodeOperandUnsignedShort(
     430             :     int operand_index, LoadSensitivity needs_poisoning) {
     431             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     432             :   DCHECK_EQ(
     433             :       OperandSize::kShort,
     434             :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
     435             :   int operand_offset =
     436        9747 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
     437             :   if (TargetSupportsUnalignedAccess()) {
     438        9747 :     return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(),
     439       38988 :                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)),
     440        9747 :                 needs_poisoning);
     441             :   } else {
     442             :     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint16(),
     443             :                                         needs_poisoning);
     444             :   }
     445             : }
     446             : 
     447        6626 : Node* InterpreterAssembler::BytecodeOperandSignedShort(
     448             :     int operand_index, LoadSensitivity needs_poisoning) {
     449             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     450             :   DCHECK_EQ(
     451             :       OperandSize::kShort,
     452             :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
     453             :   int operand_offset =
     454        6626 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
     455             :   if (TargetSupportsUnalignedAccess()) {
     456        6626 :     return Load(MachineType::Int16(), BytecodeArrayTaggedPointer(),
     457       26504 :                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)),
     458        6626 :                 needs_poisoning);
     459             :   } else {
     460             :     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16(),
     461             :                                         needs_poisoning);
     462             :   }
     463             : }
     464             : 
     465        9399 : Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(
     466             :     int operand_index, LoadSensitivity needs_poisoning) {
     467             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     468             :   DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
     469             :                                     bytecode_, operand_index, operand_scale()));
     470             :   int operand_offset =
     471        9399 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
     472             :   if (TargetSupportsUnalignedAccess()) {
     473        9399 :     return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(),
     474       37596 :                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)),
     475        9399 :                 needs_poisoning);
     476             :   } else {
     477             :     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32(),
     478             :                                         needs_poisoning);
     479             :   }
     480             : }
     481             : 
     482        6626 : Node* InterpreterAssembler::BytecodeOperandSignedQuad(
     483             :     int operand_index, LoadSensitivity needs_poisoning) {
     484             :   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
     485             :   DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
     486             :                                     bytecode_, operand_index, operand_scale()));
     487             :   int operand_offset =
     488        6626 :       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
     489             :   if (TargetSupportsUnalignedAccess()) {
     490        6626 :     return Load(MachineType::Int32(), BytecodeArrayTaggedPointer(),
     491       19878 :                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)),
     492        6626 :                 needs_poisoning);
     493             :   } else {
     494             :     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32(),
     495             :                                         needs_poisoning);
     496             :   }
     497             : }
     498             : 
     499       22118 : Node* InterpreterAssembler::BytecodeSignedOperand(
     500             :     int operand_index, OperandSize operand_size,
     501             :     LoadSensitivity needs_poisoning) {
     502             :   DCHECK(!Bytecodes::IsUnsignedOperandType(
     503             :       Bytecodes::GetOperandType(bytecode_, operand_index)));
     504       22118 :   switch (operand_size) {
     505             :     case OperandSize::kByte:
     506        8866 :       return BytecodeOperandSignedByte(operand_index, needs_poisoning);
     507             :     case OperandSize::kShort:
     508        6626 :       return BytecodeOperandSignedShort(operand_index, needs_poisoning);
     509             :     case OperandSize::kQuad:
     510        6626 :       return BytecodeOperandSignedQuad(operand_index, needs_poisoning);
     511             :     case OperandSize::kNone:
     512           0 :       UNREACHABLE();
     513             :   }
     514             :   return nullptr;
     515             : }
     516             : 
     517       29975 : Node* InterpreterAssembler::BytecodeUnsignedOperand(
     518             :     int operand_index, OperandSize operand_size,
     519             :     LoadSensitivity needs_poisoning) {
     520             :   DCHECK(Bytecodes::IsUnsignedOperandType(
     521             :       Bytecodes::GetOperandType(bytecode_, operand_index)));
     522       29975 :   switch (operand_size) {
     523             :     case OperandSize::kByte:
     524       10829 :       return BytecodeOperandUnsignedByte(operand_index, needs_poisoning);
     525             :     case OperandSize::kShort:
     526        9747 :       return BytecodeOperandUnsignedShort(operand_index, needs_poisoning);
     527             :     case OperandSize::kQuad:
     528        9399 :       return BytecodeOperandUnsignedQuad(operand_index, needs_poisoning);
     529             :     case OperandSize::kNone:
     530           0 :       UNREACHABLE();
     531             :   }
     532             :   return nullptr;
     533             : }
     534             : 
     535        2223 : Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) {
     536             :   DCHECK_EQ(OperandType::kRegCount,
     537             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     538             :   OperandSize operand_size =
     539        2223 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     540        2223 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     541             : }
     542             : 
     543        1259 : Node* InterpreterAssembler::BytecodeOperandFlag(int operand_index) {
     544             :   DCHECK_EQ(OperandType::kFlag8,
     545             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     546             :   OperandSize operand_size =
     547        1259 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     548             :   DCHECK_EQ(operand_size, OperandSize::kByte);
     549        1259 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     550             : }
     551             : 
     552        4275 : Node* InterpreterAssembler::BytecodeOperandUImm(int operand_index) {
     553             :   DCHECK_EQ(OperandType::kUImm,
     554             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     555             :   OperandSize operand_size =
     556        4275 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     557        4275 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     558             : }
     559             : 
     560        2184 : Node* InterpreterAssembler::BytecodeOperandUImmWord(int operand_index) {
     561        6552 :   return ChangeUint32ToWord(BytecodeOperandUImm(operand_index));
     562             : }
     563             : 
     564         168 : Node* InterpreterAssembler::BytecodeOperandUImmSmi(int operand_index) {
     565         504 :   return SmiFromInt32(BytecodeOperandUImm(operand_index));
     566             : }
     567             : 
     568        3078 : Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) {
     569             :   DCHECK_EQ(OperandType::kImm,
     570             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     571             :   OperandSize operand_size =
     572        3078 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     573        3078 :   return BytecodeSignedOperand(operand_index, operand_size);
     574             : }
     575             : 
     576         504 : Node* InterpreterAssembler::BytecodeOperandImmIntPtr(int operand_index) {
     577        1512 :   return ChangeInt32ToIntPtr(BytecodeOperandImm(operand_index));
     578             : }
     579             : 
     580        2352 : Node* InterpreterAssembler::BytecodeOperandImmSmi(int operand_index) {
     581        7056 :   return SmiFromInt32(BytecodeOperandImm(operand_index));
     582             : }
     583             : 
     584       14640 : Node* InterpreterAssembler::BytecodeOperandIdxInt32(int operand_index) {
     585             :   DCHECK_EQ(OperandType::kIdx,
     586             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     587             :   OperandSize operand_size =
     588       14640 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     589       14640 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     590             : }
     591             : 
     592       14640 : Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
     593       43920 :   return ChangeUint32ToWord(BytecodeOperandIdxInt32(operand_index));
     594             : }
     595             : 
     596         168 : Node* InterpreterAssembler::BytecodeOperandIdxSmi(int operand_index) {
     597         504 :   return SmiTag(BytecodeOperandIdx(operand_index));
     598             : }
     599             : 
     600        6888 : Node* InterpreterAssembler::BytecodeOperandConstantPoolIdx(
     601             :     int operand_index, LoadSensitivity needs_poisoning) {
     602             :   DCHECK_EQ(OperandType::kIdx,
     603             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     604             :   OperandSize operand_size =
     605        6888 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     606       13776 :   return ChangeUint32ToWord(
     607       20664 :       BytecodeUnsignedOperand(operand_index, operand_size, needs_poisoning));
     608             : }
     609             : 
     610       19040 : Node* InterpreterAssembler::BytecodeOperandReg(
     611             :     int operand_index, LoadSensitivity needs_poisoning) {
     612             :   DCHECK(Bytecodes::IsRegisterOperandType(
     613             :       Bytecodes::GetOperandType(bytecode_, operand_index)));
     614             :   OperandSize operand_size =
     615       19040 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     616       38080 :   return ChangeInt32ToIntPtr(
     617       57120 :       BytecodeSignedOperand(operand_index, operand_size, needs_poisoning));
     618             : }
     619             : 
     620         348 : Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) {
     621             :   DCHECK_EQ(OperandType::kRuntimeId,
     622             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     623             :   OperandSize operand_size =
     624         348 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     625             :   DCHECK_EQ(operand_size, OperandSize::kShort);
     626         348 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     627             : }
     628             : 
     629         171 : Node* InterpreterAssembler::BytecodeOperandNativeContextIndex(
     630             :     int operand_index) {
     631             :   DCHECK_EQ(OperandType::kNativeContextIndex,
     632             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     633             :   OperandSize operand_size =
     634         171 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     635         342 :   return ChangeUint32ToWord(
     636         513 :       BytecodeUnsignedOperand(operand_index, operand_size));
     637             : }
     638             : 
     639         171 : Node* InterpreterAssembler::BytecodeOperandIntrinsicId(int operand_index) {
     640             :   DCHECK_EQ(OperandType::kIntrinsicId,
     641             :             Bytecodes::GetOperandType(bytecode_, operand_index));
     642             :   OperandSize operand_size =
     643         171 :       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
     644             :   DCHECK_EQ(operand_size, OperandSize::kByte);
     645         171 :   return BytecodeUnsignedOperand(operand_index, operand_size);
     646             : }
     647             : 
     648        7920 : Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
     649        7920 :   TNode<FixedArray> constant_pool = CAST(LoadObjectField(
     650             :       BytecodeArrayTaggedPointer(), BytecodeArray::kConstantPoolOffset));
     651             :   return UnsafeLoadFixedArrayElement(
     652        7920 :       constant_pool, UncheckedCast<IntPtrT>(index), LoadSensitivity::kCritical);
     653             : }
     654             : 
     655         336 : Node* InterpreterAssembler::LoadAndUntagConstantPoolEntry(Node* index) {
     656        1008 :   return SmiUntag(LoadConstantPoolEntry(index));
     657             : }
     658             : 
     659        6888 : Node* InterpreterAssembler::LoadConstantPoolEntryAtOperandIndex(
     660             :     int operand_index) {
     661             :   Node* index =
     662        6888 :       BytecodeOperandConstantPoolIdx(operand_index, LoadSensitivity::kSafe);
     663        6888 :   return LoadConstantPoolEntry(index);
     664             : }
     665             : 
     666        1680 : Node* InterpreterAssembler::LoadAndUntagConstantPoolEntryAtOperandIndex(
     667             :     int operand_index) {
     668        5040 :   return SmiUntag(LoadConstantPoolEntryAtOperandIndex(operand_index));
     669             : }
     670             : 
     671       12108 : TNode<HeapObject> InterpreterAssembler::LoadFeedbackVector() {
     672       12108 :   TNode<JSFunction> function = CAST(LoadRegister(Register::function_closure()));
     673       12108 :   return CodeStubAssembler::LoadFeedbackVector(function);
     674             : }
     675             : 
     676       58034 : void InterpreterAssembler::CallPrologue() {
     677       58034 :   if (!Bytecodes::MakesCallAlongCriticalPath(bytecode_)) {
     678             :     // Bytecodes that make a call along the critical path save the bytecode
     679             :     // offset in the bytecode handler's prologue. For other bytecodes, if
     680             :     // there are multiple calls in the bytecode handler, you need to spill
     681             :     // before each of them, unless SaveBytecodeOffset has explicitly been called
     682             :     // in a path that dominates _all_ of those calls (which we don't track).
     683       53135 :     SaveBytecodeOffset();
     684             :   }
     685             : 
     686       58034 :   if (FLAG_debug_code && !disable_stack_check_across_call_) {
     687             :     DCHECK_NULL(stack_pointer_before_call_);
     688           0 :     stack_pointer_before_call_ = LoadStackPointer();
     689             :   }
     690       58034 :   bytecode_array_valid_ = false;
     691       58034 :   made_call_ = true;
     692       58034 : }
     693             : 
     694       58034 : void InterpreterAssembler::CallEpilogue() {
     695       58034 :   if (FLAG_debug_code && !disable_stack_check_across_call_) {
     696           0 :     Node* stack_pointer_after_call = LoadStackPointer();
     697           0 :     Node* stack_pointer_before_call = stack_pointer_before_call_;
     698           0 :     stack_pointer_before_call_ = nullptr;
     699             :     AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call,
     700           0 :                         AbortReason::kUnexpectedStackPointer);
     701             :   }
     702       58034 : }
     703             : 
     704        2016 : void InterpreterAssembler::IncrementCallCount(Node* feedback_vector,
     705             :                                               Node* slot_id) {
     706        2016 :   Comment("increment call count");
     707             :   TNode<Smi> call_count =
     708        2016 :       CAST(LoadFeedbackVectorSlot(feedback_vector, slot_id, kTaggedSize));
     709             :   // The lowest {FeedbackNexus::CallCountField::kShift} bits of the call
     710             :   // count are used as flags. To increment the call count by 1 we hence
     711             :   // have to increment by 1 << {FeedbackNexus::CallCountField::kShift}.
     712        4032 :   Node* new_count = SmiAdd(
     713        2016 :       call_count, SmiConstant(1 << FeedbackNexus::CallCountField::kShift));
     714             :   // Count is Smi, so we don't need a write barrier.
     715             :   StoreFeedbackVectorSlot(feedback_vector, slot_id, new_count,
     716        2016 :                           SKIP_WRITE_BARRIER, kTaggedSize);
     717        2016 : }
     718             : 
     719        1848 : void InterpreterAssembler::CollectCallableFeedback(Node* target, Node* context,
     720             :                                                    Node* feedback_vector,
     721             :                                                    Node* slot_id) {
     722        3696 :   Label extra_checks(this, Label::kDeferred), done(this);
     723             : 
     724             :   // Check if we have monomorphic {target} feedback already.
     725             :   TNode<MaybeObject> feedback =
     726        1848 :       LoadFeedbackVectorSlot(feedback_vector, slot_id);
     727        1848 :   Comment("check if monomorphic");
     728        1848 :   TNode<BoolT> is_monomorphic = IsWeakReferenceTo(feedback, CAST(target));
     729        1848 :   GotoIf(is_monomorphic, &done);
     730             : 
     731             :   // Check if it is a megamorphic {target}.
     732        1848 :   Comment("check if megamorphic");
     733             :   Node* is_megamorphic = WordEqual(
     734        1848 :       feedback, HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())));
     735        1848 :   Branch(is_megamorphic, &done, &extra_checks);
     736             : 
     737        1848 :   BIND(&extra_checks);
     738             :   {
     739        1848 :     Label initialize(this), mark_megamorphic(this);
     740             : 
     741        1848 :     Comment("check if weak reference");
     742             :     Node* is_uninitialized = WordEqual(
     743             :         feedback,
     744        1848 :         HeapConstant(FeedbackVector::UninitializedSentinel(isolate())));
     745        1848 :     GotoIf(is_uninitialized, &initialize);
     746             :     CSA_ASSERT(this, IsWeakOrCleared(feedback));
     747             : 
     748             :     // If the weak reference is cleared, we have a new chance to become
     749             :     // monomorphic.
     750        1848 :     Comment("check if weak reference is cleared");
     751        3696 :     Branch(IsCleared(feedback), &initialize, &mark_megamorphic);
     752             : 
     753        1848 :     BIND(&initialize);
     754             :     {
     755             :       // Check if {target} is a JSFunction in the current native context.
     756        1848 :       Comment("check if function in same native context");
     757        3696 :       GotoIf(TaggedIsSmi(target), &mark_megamorphic);
     758             :       // Check if the {target} is a JSFunction or JSBoundFunction
     759             :       // in the current native context.
     760        3696 :       VARIABLE(var_current, MachineRepresentation::kTagged, target);
     761        1848 :       Label loop(this, &var_current), done_loop(this);
     762        1848 :       Goto(&loop);
     763        1848 :       BIND(&loop);
     764             :       {
     765        1848 :         Label if_boundfunction(this), if_function(this);
     766        1848 :         Node* current = var_current.value();
     767             :         CSA_ASSERT(this, TaggedIsNotSmi(current));
     768        3696 :         Node* current_instance_type = LoadInstanceType(current);
     769        3696 :         GotoIf(InstanceTypeEqual(current_instance_type, JS_BOUND_FUNCTION_TYPE),
     770        1848 :                &if_boundfunction);
     771        3696 :         Branch(InstanceTypeEqual(current_instance_type, JS_FUNCTION_TYPE),
     772        1848 :                &if_function, &mark_megamorphic);
     773             : 
     774        1848 :         BIND(&if_function);
     775             :         {
     776             :           // Check that the JSFunction {current} is in the current native
     777             :           // context.
     778             :           Node* current_context =
     779             :               LoadObjectField(current, JSFunction::kContextOffset);
     780        3696 :           Node* current_native_context = LoadNativeContext(current_context);
     781        3696 :           Branch(WordEqual(LoadNativeContext(context), current_native_context),
     782        1848 :                  &done_loop, &mark_megamorphic);
     783             :         }
     784             : 
     785        1848 :         BIND(&if_boundfunction);
     786             :         {
     787             :           // Continue with the [[BoundTargetFunction]] of {target}.
     788             :           var_current.Bind(LoadObjectField(
     789        1848 :               current, JSBoundFunction::kBoundTargetFunctionOffset));
     790        1848 :           Goto(&loop);
     791             :         }
     792             :       }
     793        1848 :       BIND(&done_loop);
     794             :       StoreWeakReferenceInFeedbackVector(feedback_vector, slot_id,
     795        1848 :                                          CAST(target));
     796        1848 :       ReportFeedbackUpdate(feedback_vector, slot_id, "Call:Initialize");
     797        1848 :       Goto(&done);
     798             :     }
     799             : 
     800        1848 :     BIND(&mark_megamorphic);
     801             :     {
     802             :       // MegamorphicSentinel is an immortal immovable object so
     803             :       // write-barrier is not needed.
     804        1848 :       Comment("transition to megamorphic");
     805             :       DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kmegamorphic_symbol));
     806             :       StoreFeedbackVectorSlot(
     807             :           feedback_vector, slot_id,
     808        1848 :           HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
     809        1848 :           SKIP_WRITE_BARRIER);
     810        1848 :       ReportFeedbackUpdate(feedback_vector, slot_id,
     811        1848 :                            "Call:TransitionMegamorphic");
     812        1848 :       Goto(&done);
     813             :     }
     814             :   }
     815             : 
     816        1848 :   BIND(&done);
     817        1848 : }
     818             : 
     819        1680 : void InterpreterAssembler::CollectCallFeedback(Node* target, Node* context,
     820             :                                                Node* maybe_feedback_vector,
     821             :                                                Node* slot_id) {
     822        3360 :   Label feedback_done(this);
     823             :   // If feedback_vector is not valid, then nothing to do.
     824        3360 :   GotoIf(IsUndefined(maybe_feedback_vector), &feedback_done);
     825             : 
     826             :   CSA_SLOW_ASSERT(this, IsFeedbackVector(maybe_feedback_vector));
     827             : 
     828             :   // Increment the call count.
     829        1680 :   IncrementCallCount(maybe_feedback_vector, slot_id);
     830             : 
     831             :   // Collect the callable {target} feedback.
     832        1680 :   CollectCallableFeedback(target, context, maybe_feedback_vector, slot_id);
     833        1680 :   Goto(&feedback_done);
     834             : 
     835        1680 :   BIND(&feedback_done);
     836        1680 : }
     837             : 
     838        1008 : void InterpreterAssembler::CallJSAndDispatch(
     839             :     Node* function, Node* context, const RegListNodePair& args,
     840             :     ConvertReceiverMode receiver_mode) {
     841             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     842             :   DCHECK(Bytecodes::IsCallOrConstruct(bytecode_) ||
     843             :          bytecode_ == Bytecode::kInvokeIntrinsic);
     844             :   DCHECK_EQ(Bytecodes::GetReceiverMode(bytecode_), receiver_mode);
     845             : 
     846             :   Node* args_count;
     847        1008 :   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
     848             :     // The receiver is implied, so it is not in the argument list.
     849             :     args_count = args.reg_count();
     850             :   } else {
     851             :     // Subtract the receiver from the argument count.
     852        1344 :     Node* receiver_count = Int32Constant(1);
     853        1344 :     args_count = Int32Sub(args.reg_count(), receiver_count);
     854             :   }
     855             : 
     856             :   Callable callable = CodeFactory::InterpreterPushArgsThenCall(
     857        1008 :       isolate(), receiver_mode, InterpreterPushArgsMode::kOther);
     858             :   Node* code_target = HeapConstant(callable.code());
     859             : 
     860        1008 :   TailCallStubThenBytecodeDispatch(callable.descriptor(), code_target, context,
     861             :                                    args_count, args.base_reg_location(),
     862             :                                    function);
     863             :   // TailCallStubThenDispatch updates accumulator with result.
     864        2016 :   accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
     865        1008 : }
     866             : 
     867             : template <class... TArgs>
     868        1008 : void InterpreterAssembler::CallJSAndDispatch(Node* function, Node* context,
     869             :                                              Node* arg_count,
     870             :                                              ConvertReceiverMode receiver_mode,
     871             :                                              TArgs... args) {
     872             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     873             :   DCHECK(Bytecodes::IsCallOrConstruct(bytecode_) ||
     874             :          bytecode_ == Bytecode::kInvokeIntrinsic);
     875             :   DCHECK_EQ(Bytecodes::GetReceiverMode(bytecode_), receiver_mode);
     876        1008 :   Callable callable = CodeFactory::Call(isolate());
     877             :   Node* code_target = HeapConstant(callable.code());
     878             : 
     879        1008 :   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
     880             :     // The first argument parameter (the receiver) is implied to be undefined.
     881        1008 :     TailCallStubThenBytecodeDispatch(
     882             :         callable.descriptor(), code_target, context, function, arg_count,
     883         504 :         static_cast<Node*>(UndefinedConstant()), args...);
     884             :   } else {
     885         504 :     TailCallStubThenBytecodeDispatch(callable.descriptor(), code_target,
     886             :                                      context, function, arg_count, args...);
     887             :   }
     888             :   // TailCallStubThenDispatch updates accumulator with result.
     889        2016 :   accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
     890        1008 : }
     891             : 
     892             : // Instantiate CallJSAndDispatch() for argument counts used by interpreter
     893             : // generator.
     894             : template V8_EXPORT_PRIVATE void InterpreterAssembler::CallJSAndDispatch(
     895             :     Node* function, Node* context, Node* arg_count,
     896             :     ConvertReceiverMode receiver_mode);
     897             : template V8_EXPORT_PRIVATE void InterpreterAssembler::CallJSAndDispatch(
     898             :     Node* function, Node* context, Node* arg_count,
     899             :     ConvertReceiverMode receiver_mode, Node*);
     900             : template V8_EXPORT_PRIVATE void InterpreterAssembler::CallJSAndDispatch(
     901             :     Node* function, Node* context, Node* arg_count,
     902             :     ConvertReceiverMode receiver_mode, Node*, Node*);
     903             : template V8_EXPORT_PRIVATE void InterpreterAssembler::CallJSAndDispatch(
     904             :     Node* function, Node* context, Node* arg_count,
     905             :     ConvertReceiverMode receiver_mode, Node*, Node*, Node*);
     906             : 
     907         168 : void InterpreterAssembler::CallJSWithSpreadAndDispatch(
     908             :     Node* function, Node* context, const RegListNodePair& args, Node* slot_id,
     909             :     Node* maybe_feedback_vector) {
     910             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     911             :   DCHECK_EQ(Bytecodes::GetReceiverMode(bytecode_), ConvertReceiverMode::kAny);
     912         168 :   CollectCallFeedback(function, context, maybe_feedback_vector, slot_id);
     913         168 :   Comment("call using CallWithSpread builtin");
     914             :   Callable callable = CodeFactory::InterpreterPushArgsThenCall(
     915             :       isolate(), ConvertReceiverMode::kAny,
     916         168 :       InterpreterPushArgsMode::kWithFinalSpread);
     917             :   Node* code_target = HeapConstant(callable.code());
     918             : 
     919         336 :   Node* receiver_count = Int32Constant(1);
     920         336 :   Node* args_count = Int32Sub(args.reg_count(), receiver_count);
     921         168 :   TailCallStubThenBytecodeDispatch(callable.descriptor(), code_target, context,
     922             :                                    args_count, args.base_reg_location(),
     923             :                                    function);
     924             :   // TailCallStubThenDispatch updates accumulator with result.
     925         336 :   accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
     926         168 : }
     927             : 
     928         168 : Node* InterpreterAssembler::Construct(Node* target, Node* context,
     929             :                                       Node* new_target,
     930             :                                       const RegListNodePair& args,
     931             :                                       Node* slot_id, Node* feedback_vector) {
     932             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
     933         336 :   VARIABLE(var_result, MachineRepresentation::kTagged);
     934         336 :   VARIABLE(var_site, MachineRepresentation::kTagged);
     935         168 :   Label extra_checks(this, Label::kDeferred), return_result(this, &var_result),
     936         168 :       construct(this), construct_array(this, &var_site);
     937         336 :   GotoIf(IsUndefined(feedback_vector), &construct);
     938             : 
     939             :   // Increment the call count.
     940         168 :   IncrementCallCount(feedback_vector, slot_id);
     941             : 
     942             :   // Check if we have monomorphic {new_target} feedback already.
     943             :   TNode<MaybeObject> feedback =
     944         168 :       LoadFeedbackVectorSlot(feedback_vector, slot_id);
     945         336 :   Branch(IsWeakReferenceTo(feedback, CAST(new_target)), &construct,
     946         168 :          &extra_checks);
     947             : 
     948         168 :   BIND(&extra_checks);
     949             :   {
     950         168 :     Label check_allocation_site(this), check_initialized(this),
     951         168 :         initialize(this), mark_megamorphic(this);
     952             : 
     953             :     // Check if it is a megamorphic {new_target}..
     954         168 :     Comment("check if megamorphic");
     955             :     Node* is_megamorphic = WordEqual(
     956         168 :         feedback, HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())));
     957         168 :     GotoIf(is_megamorphic, &construct);
     958             : 
     959         168 :     Comment("check if weak reference");
     960         336 :     GotoIfNot(IsWeakOrCleared(feedback), &check_allocation_site);
     961             : 
     962             :     // If the weak reference is cleared, we have a new chance to become
     963             :     // monomorphic.
     964         168 :     Comment("check if weak reference is cleared");
     965         336 :     Branch(IsCleared(feedback), &initialize, &mark_megamorphic);
     966             : 
     967         168 :     BIND(&check_allocation_site);
     968             :     {
     969             :       // Check if it is an AllocationSite.
     970         168 :       Comment("check if allocation site");
     971             :       TNode<HeapObject> strong_feedback = CAST(feedback);
     972         336 :       GotoIfNot(IsAllocationSite(strong_feedback), &check_initialized);
     973             : 
     974             :       // Make sure that {target} and {new_target} are the Array constructor.
     975         672 :       Node* array_function = LoadContextElement(LoadNativeContext(context),
     976         168 :                                                 Context::ARRAY_FUNCTION_INDEX);
     977         336 :       GotoIfNot(WordEqual(target, array_function), &mark_megamorphic);
     978         336 :       GotoIfNot(WordEqual(new_target, array_function), &mark_megamorphic);
     979         168 :       var_site.Bind(strong_feedback);
     980         168 :       Goto(&construct_array);
     981             :     }
     982             : 
     983         168 :     BIND(&check_initialized);
     984             :     {
     985             :       // Check if it is uninitialized.
     986         168 :       Comment("check if uninitialized");
     987             :       Node* is_uninitialized =
     988         168 :           WordEqual(feedback, LoadRoot(RootIndex::kuninitialized_symbol));
     989         168 :       Branch(is_uninitialized, &initialize, &mark_megamorphic);
     990             :     }
     991             : 
     992         168 :     BIND(&initialize);
     993             :     {
     994         168 :       Comment("check if function in same native context");
     995         336 :       GotoIf(TaggedIsSmi(new_target), &mark_megamorphic);
     996             :       // Check if the {new_target} is a JSFunction or JSBoundFunction
     997             :       // in the current native context.
     998         336 :       VARIABLE(var_current, MachineRepresentation::kTagged, new_target);
     999         168 :       Label loop(this, &var_current), done_loop(this);
    1000         168 :       Goto(&loop);
    1001         168 :       BIND(&loop);
    1002             :       {
    1003         168 :         Label if_boundfunction(this), if_function(this);
    1004         168 :         Node* current = var_current.value();
    1005             :         CSA_ASSERT(this, TaggedIsNotSmi(current));
    1006         336 :         Node* current_instance_type = LoadInstanceType(current);
    1007         336 :         GotoIf(InstanceTypeEqual(current_instance_type, JS_BOUND_FUNCTION_TYPE),
    1008         168 :                &if_boundfunction);
    1009         336 :         Branch(InstanceTypeEqual(current_instance_type, JS_FUNCTION_TYPE),
    1010         168 :                &if_function, &mark_megamorphic);
    1011             : 
    1012         168 :         BIND(&if_function);
    1013             :         {
    1014             :           // Check that the JSFunction {current} is in the current native
    1015             :           // context.
    1016             :           Node* current_context =
    1017             :               LoadObjectField(current, JSFunction::kContextOffset);
    1018         336 :           Node* current_native_context = LoadNativeContext(current_context);
    1019         336 :           Branch(WordEqual(LoadNativeContext(context), current_native_context),
    1020         168 :                  &done_loop, &mark_megamorphic);
    1021             :         }
    1022             : 
    1023         168 :         BIND(&if_boundfunction);
    1024             :         {
    1025             :           // Continue with the [[BoundTargetFunction]] of {current}.
    1026             :           var_current.Bind(LoadObjectField(
    1027         168 :               current, JSBoundFunction::kBoundTargetFunctionOffset));
    1028         168 :           Goto(&loop);
    1029             :         }
    1030             :       }
    1031         168 :       BIND(&done_loop);
    1032             : 
    1033             :       // Create an AllocationSite if {target} and {new_target} refer
    1034             :       // to the current native context's Array constructor.
    1035         168 :       Label create_allocation_site(this), store_weak_reference(this);
    1036         336 :       GotoIfNot(WordEqual(target, new_target), &store_weak_reference);
    1037         672 :       Node* array_function = LoadContextElement(LoadNativeContext(context),
    1038         168 :                                                 Context::ARRAY_FUNCTION_INDEX);
    1039         336 :       Branch(WordEqual(target, array_function), &create_allocation_site,
    1040         168 :              &store_weak_reference);
    1041             : 
    1042         168 :       BIND(&create_allocation_site);
    1043             :       {
    1044         336 :         var_site.Bind(CreateAllocationSiteInFeedbackVector(feedback_vector,
    1045         504 :                                                            SmiTag(slot_id)));
    1046         168 :         ReportFeedbackUpdate(feedback_vector, slot_id,
    1047         168 :                              "Construct:CreateAllocationSite");
    1048         168 :         Goto(&construct_array);
    1049             :       }
    1050             : 
    1051         168 :       BIND(&store_weak_reference);
    1052             :       {
    1053             :         StoreWeakReferenceInFeedbackVector(feedback_vector, slot_id,
    1054         168 :                                            CAST(new_target));
    1055         168 :         ReportFeedbackUpdate(feedback_vector, slot_id,
    1056         168 :                              "Construct:StoreWeakReference");
    1057         168 :         Goto(&construct);
    1058             :       }
    1059             :     }
    1060             : 
    1061         168 :     BIND(&mark_megamorphic);
    1062             :     {
    1063             :       // MegamorphicSentinel is an immortal immovable object so
    1064             :       // write-barrier is not needed.
    1065         168 :       Comment("transition to megamorphic");
    1066             :       DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kmegamorphic_symbol));
    1067             :       StoreFeedbackVectorSlot(
    1068             :           feedback_vector, slot_id,
    1069         168 :           HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
    1070         168 :           SKIP_WRITE_BARRIER);
    1071         168 :       ReportFeedbackUpdate(feedback_vector, slot_id,
    1072         168 :                            "Construct:TransitionMegamorphic");
    1073         168 :       Goto(&construct);
    1074             :     }
    1075             :   }
    1076             : 
    1077         168 :   BIND(&construct_array);
    1078             :   {
    1079             :     // TODO(bmeurer): Introduce a dedicated builtin to deal with the Array
    1080             :     // constructor feedback collection inside of Ignition.
    1081         168 :     Comment("call using ConstructArray builtin");
    1082             :     Callable callable = CodeFactory::InterpreterPushArgsThenConstruct(
    1083         168 :         isolate(), InterpreterPushArgsMode::kArrayFunction);
    1084             :     Node* code_target = HeapConstant(callable.code());
    1085         504 :     var_result.Bind(CallStub(callable.descriptor(), code_target, context,
    1086             :                              args.reg_count(), args.base_reg_location(), target,
    1087         504 :                              new_target, var_site.value()));
    1088         168 :     Goto(&return_result);
    1089             :   }
    1090             : 
    1091         168 :   BIND(&construct);
    1092             :   {
    1093             :     // TODO(bmeurer): Remove the generic type_info parameter from the Construct.
    1094         168 :     Comment("call using Construct builtin");
    1095             :     Callable callable = CodeFactory::InterpreterPushArgsThenConstruct(
    1096         168 :         isolate(), InterpreterPushArgsMode::kOther);
    1097             :     Node* code_target = HeapConstant(callable.code());
    1098         504 :     var_result.Bind(CallStub(callable.descriptor(), code_target, context,
    1099             :                              args.reg_count(), args.base_reg_location(), target,
    1100         504 :                              new_target, UndefinedConstant()));
    1101         168 :     Goto(&return_result);
    1102             :   }
    1103             : 
    1104         168 :   BIND(&return_result);
    1105         336 :   return var_result.value();
    1106             : }
    1107             : 
    1108         168 : Node* InterpreterAssembler::ConstructWithSpread(Node* target, Node* context,
    1109             :                                                 Node* new_target,
    1110             :                                                 const RegListNodePair& args,
    1111             :                                                 Node* slot_id,
    1112             :                                                 Node* feedback_vector) {
    1113             :   // TODO(bmeurer): Unify this with the Construct bytecode feedback
    1114             :   // above once we have a way to pass the AllocationSite to the Array
    1115             :   // constructor _and_ spread the last argument at the same time.
    1116             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
    1117         336 :   Label extra_checks(this, Label::kDeferred), construct(this);
    1118         336 :   GotoIf(IsUndefined(feedback_vector), &construct);
    1119             : 
    1120             :   // Increment the call count.
    1121         168 :   IncrementCallCount(feedback_vector, slot_id);
    1122             : 
    1123             :   // Check if we have monomorphic {new_target} feedback already.
    1124             :   TNode<MaybeObject> feedback =
    1125         168 :       LoadFeedbackVectorSlot(feedback_vector, slot_id);
    1126         336 :   Branch(IsWeakReferenceTo(feedback, CAST(new_target)), &construct,
    1127         168 :          &extra_checks);
    1128             : 
    1129         168 :   BIND(&extra_checks);
    1130             :   {
    1131         168 :     Label check_initialized(this), initialize(this), mark_megamorphic(this);
    1132             : 
    1133             :     // Check if it is a megamorphic {new_target}.
    1134         168 :     Comment("check if megamorphic");
    1135             :     Node* is_megamorphic = WordEqual(
    1136         168 :         feedback, HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())));
    1137         168 :     GotoIf(is_megamorphic, &construct);
    1138             : 
    1139         168 :     Comment("check if weak reference");
    1140         336 :     GotoIfNot(IsWeakOrCleared(feedback), &check_initialized);
    1141             : 
    1142             :     // If the weak reference is cleared, we have a new chance to become
    1143             :     // monomorphic.
    1144         168 :     Comment("check if weak reference is cleared");
    1145         336 :     Branch(IsCleared(feedback), &initialize, &mark_megamorphic);
    1146             : 
    1147         168 :     BIND(&check_initialized);
    1148             :     {
    1149             :       // Check if it is uninitialized.
    1150         168 :       Comment("check if uninitialized");
    1151             :       Node* is_uninitialized =
    1152         168 :           WordEqual(feedback, LoadRoot(RootIndex::kuninitialized_symbol));
    1153         168 :       Branch(is_uninitialized, &initialize, &mark_megamorphic);
    1154             :     }
    1155             : 
    1156         168 :     BIND(&initialize);
    1157             :     {
    1158         168 :       Comment("check if function in same native context");
    1159         336 :       GotoIf(TaggedIsSmi(new_target), &mark_megamorphic);
    1160             :       // Check if the {new_target} is a JSFunction or JSBoundFunction
    1161             :       // in the current native context.
    1162         336 :       VARIABLE(var_current, MachineRepresentation::kTagged, new_target);
    1163         168 :       Label loop(this, &var_current), done_loop(this);
    1164         168 :       Goto(&loop);
    1165         168 :       BIND(&loop);
    1166             :       {
    1167         168 :         Label if_boundfunction(this), if_function(this);
    1168         168 :         Node* current = var_current.value();
    1169             :         CSA_ASSERT(this, TaggedIsNotSmi(current));
    1170         336 :         Node* current_instance_type = LoadInstanceType(current);
    1171         336 :         GotoIf(InstanceTypeEqual(current_instance_type, JS_BOUND_FUNCTION_TYPE),
    1172         168 :                &if_boundfunction);
    1173         336 :         Branch(InstanceTypeEqual(current_instance_type, JS_FUNCTION_TYPE),
    1174         168 :                &if_function, &mark_megamorphic);
    1175             : 
    1176         168 :         BIND(&if_function);
    1177             :         {
    1178             :           // Check that the JSFunction {current} is in the current native
    1179             :           // context.
    1180             :           Node* current_context =
    1181             :               LoadObjectField(current, JSFunction::kContextOffset);
    1182         336 :           Node* current_native_context = LoadNativeContext(current_context);
    1183         336 :           Branch(WordEqual(LoadNativeContext(context), current_native_context),
    1184         168 :                  &done_loop, &mark_megamorphic);
    1185             :         }
    1186             : 
    1187         168 :         BIND(&if_boundfunction);
    1188             :         {
    1189             :           // Continue with the [[BoundTargetFunction]] of {current}.
    1190             :           var_current.Bind(LoadObjectField(
    1191         168 :               current, JSBoundFunction::kBoundTargetFunctionOffset));
    1192         168 :           Goto(&loop);
    1193             :         }
    1194             :       }
    1195         168 :       BIND(&done_loop);
    1196             :       StoreWeakReferenceInFeedbackVector(feedback_vector, slot_id,
    1197         168 :                                          CAST(new_target));
    1198         168 :       ReportFeedbackUpdate(feedback_vector, slot_id,
    1199         168 :                            "ConstructWithSpread:Initialize");
    1200         168 :       Goto(&construct);
    1201             :     }
    1202             : 
    1203         168 :     BIND(&mark_megamorphic);
    1204             :     {
    1205             :       // MegamorphicSentinel is an immortal immovable object so
    1206             :       // write-barrier is not needed.
    1207         168 :       Comment("transition to megamorphic");
    1208             :       DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kmegamorphic_symbol));
    1209             :       StoreFeedbackVectorSlot(
    1210             :           feedback_vector, slot_id,
    1211         168 :           HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
    1212         168 :           SKIP_WRITE_BARRIER);
    1213         168 :       ReportFeedbackUpdate(feedback_vector, slot_id,
    1214         168 :                            "ConstructWithSpread:TransitionMegamorphic");
    1215         168 :       Goto(&construct);
    1216             :     }
    1217             :   }
    1218             : 
    1219         168 :   BIND(&construct);
    1220         168 :   Comment("call using ConstructWithSpread builtin");
    1221             :   Callable callable = CodeFactory::InterpreterPushArgsThenConstruct(
    1222         168 :       isolate(), InterpreterPushArgsMode::kWithFinalSpread);
    1223             :   Node* code_target = HeapConstant(callable.code());
    1224         504 :   return CallStub(callable.descriptor(), code_target, context, args.reg_count(),
    1225             :                   args.base_reg_location(), target, new_target,
    1226         504 :                   UndefinedConstant());
    1227             : }
    1228             : 
    1229         342 : Node* InterpreterAssembler::CallRuntimeN(Node* function_id, Node* context,
    1230             :                                          const RegListNodePair& args,
    1231             :                                          int result_size) {
    1232             :   DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
    1233             :   DCHECK(Bytecodes::IsCallRuntime(bytecode_));
    1234         342 :   Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size);
    1235             :   Node* code_target = HeapConstant(callable.code());
    1236             : 
    1237             :   // Get the function entry from the function id.
    1238         684 :   Node* function_table = ExternalConstant(
    1239         342 :       ExternalReference::runtime_function_table_address(isolate()));
    1240             :   Node* function_offset =
    1241        1026 :       Int32Mul(function_id, Int32Constant(sizeof(Runtime::Function)));
    1242             :   Node* function =
    1243        1026 :       IntPtrAdd(function_table, ChangeUint32ToWord(function_offset));
    1244             :   Node* function_entry =
    1245             :       Load(MachineType::Pointer(), function,
    1246         684 :            IntPtrConstant(offsetof(Runtime::Function, entry)));
    1247             : 
    1248         684 :   return CallStubR(StubCallMode::kCallCodeObject, callable.descriptor(),
    1249             :                    result_size, code_target, context, args.reg_count(),
    1250         342 :                    args.base_reg_location(), function_entry);
    1251             : }
    1252             : 
    1253        4256 : void InterpreterAssembler::UpdateInterruptBudget(Node* weight, bool backward) {
    1254        4256 :   Comment("[ UpdateInterruptBudget");
    1255             : 
    1256             :   // Assert that the weight is positive (negative weights should be implemented
    1257             :   // as backward updates).
    1258             :   CSA_ASSERT(this, Int32GreaterThanOrEqual(weight, Int32Constant(0)));
    1259             : 
    1260        4256 :   Label load_budget_from_bytecode(this), load_budget_done(this);
    1261        4256 :   TNode<JSFunction> function = CAST(LoadRegister(Register::function_closure()));
    1262             :   TNode<FeedbackCell> feedback_cell =
    1263        4256 :       CAST(LoadObjectField(function, JSFunction::kFeedbackCellOffset));
    1264             :   TNode<Int32T> old_budget = LoadObjectField<Int32T>(
    1265             :       feedback_cell, FeedbackCell::kInterruptBudgetOffset);
    1266             : 
    1267             :   // Make sure we include the current bytecode in the budget calculation.
    1268             :   TNode<Int32T> budget_after_bytecode =
    1269        8512 :       Signed(Int32Sub(old_budget, Int32Constant(CurrentBytecodeSize())));
    1270             : 
    1271        4256 :   Label done(this);
    1272             :   TVARIABLE(Int32T, new_budget);
    1273        4256 :   if (backward) {
    1274             :     // Update budget by |weight| and check if it reaches zero.
    1275        1120 :     new_budget = Signed(Int32Sub(budget_after_bytecode, weight));
    1276             :     Node* condition =
    1277        1680 :         Int32GreaterThanOrEqual(new_budget.value(), Int32Constant(0));
    1278         560 :     Label ok(this), interrupt_check(this, Label::kDeferred);
    1279         560 :     Branch(condition, &ok, &interrupt_check);
    1280             : 
    1281         560 :     BIND(&interrupt_check);
    1282             :     CallRuntime(Runtime::kBytecodeBudgetInterrupt, GetContext(), function);
    1283         560 :     Goto(&done);
    1284             : 
    1285         560 :     BIND(&ok);
    1286             :   } else {
    1287             :     // For a forward jump, we know we only increase the interrupt budget, so
    1288             :     // no need to check if it's below zero.
    1289        7392 :     new_budget = Signed(Int32Add(budget_after_bytecode, weight));
    1290             :   }
    1291             : 
    1292             :   // Update budget.
    1293             :   StoreObjectFieldNoWriteBarrier(
    1294             :       feedback_cell, FeedbackCell::kInterruptBudgetOffset, new_budget.value(),
    1295        4256 :       MachineRepresentation::kWord32);
    1296        4256 :   Goto(&done);
    1297        4256 :   BIND(&done);
    1298        4256 :   Comment("] UpdateInterruptBudget");
    1299        4256 : }
    1300             : 
    1301       40600 : Node* InterpreterAssembler::Advance() { return Advance(CurrentBytecodeSize()); }
    1302             : 
    1303       40712 : Node* InterpreterAssembler::Advance(int delta) {
    1304       81424 :   return Advance(IntPtrConstant(delta));
    1305             : }
    1306             : 
    1307       44744 : Node* InterpreterAssembler::Advance(Node* delta, bool backward) {
    1308             : #ifdef V8_TRACE_IGNITION
    1309             :   TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
    1310             : #endif
    1311         672 :   Node* next_offset = backward ? IntPtrSub(BytecodeOffset(), delta)
    1312      178640 :                                : IntPtrAdd(BytecodeOffset(), delta);
    1313       44744 :   bytecode_offset_.Bind(next_offset);
    1314       44744 :   return next_offset;
    1315             : }
    1316             : 
    1317        4032 : Node* InterpreterAssembler::Jump(Node* delta, bool backward) {
    1318             :   DCHECK(!Bytecodes::IsStarLookahead(bytecode_, operand_scale_));
    1319             : 
    1320       12096 :   UpdateInterruptBudget(TruncateIntPtrToInt32(delta), backward);
    1321        4032 :   Node* new_bytecode_offset = Advance(delta, backward);
    1322        4032 :   Node* target_bytecode = LoadBytecode(new_bytecode_offset);
    1323        4032 :   return DispatchToBytecode(target_bytecode, new_bytecode_offset);
    1324             : }
    1325             : 
    1326        3696 : Node* InterpreterAssembler::Jump(Node* delta) { return Jump(delta, false); }
    1327             : 
    1328         336 : Node* InterpreterAssembler::JumpBackward(Node* delta) {
    1329         336 :   return Jump(delta, true);
    1330             : }
    1331             : 
    1332        2016 : void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) {
    1333        4032 :   Label match(this), no_match(this);
    1334             : 
    1335        2016 :   Branch(condition, &match, &no_match);
    1336        2016 :   BIND(&match);
    1337             :   Jump(delta);
    1338        2016 :   BIND(&no_match);
    1339        2016 :   Dispatch();
    1340        2016 : }
    1341             : 
    1342        1344 : void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) {
    1343        2688 :   JumpConditional(WordEqual(lhs, rhs), delta);
    1344        1344 : }
    1345             : 
    1346         672 : void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
    1347             :                                               Node* delta) {
    1348        1344 :   JumpConditional(WordNotEqual(lhs, rhs), delta);
    1349         672 : }
    1350             : 
    1351       44744 : Node* InterpreterAssembler::LoadBytecode(Node* bytecode_offset) {
    1352             :   Node* bytecode =
    1353       44744 :       Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), bytecode_offset);
    1354       89488 :   return ChangeUint32ToWord(bytecode);
    1355             : }
    1356             : 
    1357        2240 : Node* InterpreterAssembler::StarDispatchLookahead(Node* target_bytecode) {
    1358        4480 :   Label do_inline_star(this), done(this);
    1359             : 
    1360        4480 :   Variable var_bytecode(this, MachineType::PointerRepresentation());
    1361        2240 :   var_bytecode.Bind(target_bytecode);
    1362             : 
    1363        4480 :   Node* star_bytecode = IntPtrConstant(static_cast<int>(Bytecode::kStar));
    1364        4480 :   Node* is_star = WordEqual(target_bytecode, star_bytecode);
    1365        2240 :   Branch(is_star, &do_inline_star, &done);
    1366             : 
    1367        2240 :   BIND(&do_inline_star);
    1368             :   {
    1369        2240 :     InlineStar();
    1370        2240 :     var_bytecode.Bind(LoadBytecode(BytecodeOffset()));
    1371        2240 :     Goto(&done);
    1372             :   }
    1373        2240 :   BIND(&done);
    1374        4480 :   return var_bytecode.value();
    1375             : }
    1376             : 
    1377        2240 : void InterpreterAssembler::InlineStar() {
    1378        2240 :   Bytecode previous_bytecode = bytecode_;
    1379        2240 :   AccumulatorUse previous_acc_use = accumulator_use_;
    1380             : 
    1381        2240 :   bytecode_ = Bytecode::kStar;
    1382        2240 :   accumulator_use_ = AccumulatorUse::kNone;
    1383             : 
    1384             : #ifdef V8_TRACE_IGNITION
    1385             :   TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
    1386             : #endif
    1387        2240 :   StoreRegister(GetAccumulator(),
    1388        2240 :                 BytecodeOperandReg(0, LoadSensitivity::kSafe));
    1389             : 
    1390             :   DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
    1391             : 
    1392             :   Advance();
    1393        2240 :   bytecode_ = previous_bytecode;
    1394        2240 :   accumulator_use_ = previous_acc_use;
    1395        2240 : }
    1396             : 
    1397       38360 : Node* InterpreterAssembler::Dispatch() {
    1398       38360 :   Comment("========= Dispatch");
    1399             :   DCHECK_IMPLIES(Bytecodes::MakesCallAlongCriticalPath(bytecode_), made_call_);
    1400             :   Node* target_offset = Advance();
    1401       38360 :   Node* target_bytecode = LoadBytecode(target_offset);
    1402             : 
    1403       38360 :   if (Bytecodes::IsStarLookahead(bytecode_, operand_scale_)) {
    1404        2240 :     target_bytecode = StarDispatchLookahead(target_bytecode);
    1405             :   }
    1406       38360 :   return DispatchToBytecode(target_bytecode, BytecodeOffset());
    1407             : }
    1408             : 
    1409       43568 : Node* InterpreterAssembler::DispatchToBytecode(Node* target_bytecode,
    1410             :                                                Node* new_bytecode_offset) {
    1411       43568 :   if (FLAG_trace_ignition_dispatches) {
    1412           0 :     TraceBytecodeDispatch(target_bytecode);
    1413             :   }
    1414             : 
    1415             :   Node* target_code_entry =
    1416       43568 :       Load(MachineType::Pointer(), DispatchTableRawPointer(),
    1417       87136 :            TimesSystemPointerSize(target_bytecode));
    1418             : 
    1419             :   return DispatchToBytecodeHandlerEntry(target_code_entry, new_bytecode_offset,
    1420       43568 :                                         target_bytecode);
    1421             : }
    1422             : 
    1423           0 : Node* InterpreterAssembler::DispatchToBytecodeHandler(Node* handler,
    1424             :                                                       Node* bytecode_offset,
    1425             :                                                       Node* target_bytecode) {
    1426             :   // TODO(ishell): Add CSA::CodeEntryPoint(code).
    1427             :   Node* handler_entry =
    1428             :       IntPtrAdd(BitcastTaggedToWord(handler),
    1429           0 :                 IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
    1430             :   return DispatchToBytecodeHandlerEntry(handler_entry, bytecode_offset,
    1431           0 :                                         target_bytecode);
    1432             : }
    1433             : 
    1434       43680 : Node* InterpreterAssembler::DispatchToBytecodeHandlerEntry(
    1435             :     Node* handler_entry, Node* bytecode_offset, Node* target_bytecode) {
    1436             :   // Propagate speculation poisoning.
    1437       87360 :   Node* poisoned_handler_entry = WordPoisonOnSpeculation(handler_entry);
    1438       87360 :   return TailCallBytecodeDispatch(
    1439             :       InterpreterDispatchDescriptor{}, poisoned_handler_entry,
    1440             :       GetAccumulatorUnchecked(), bytecode_offset, BytecodeArrayTaggedPointer(),
    1441       87360 :       DispatchTableRawPointer());
    1442             : }
    1443             : 
    1444         112 : void InterpreterAssembler::DispatchWide(OperandScale operand_scale) {
    1445             :   // Dispatching a wide bytecode requires treating the prefix
    1446             :   // bytecode a base pointer into the dispatch table and dispatching
    1447             :   // the bytecode that follows relative to this base.
    1448             :   //
    1449             :   //   Indices 0-255 correspond to bytecodes with operand_scale == 0
    1450             :   //   Indices 256-511 correspond to bytecodes with operand_scale == 1
    1451             :   //   Indices 512-767 correspond to bytecodes with operand_scale == 2
    1452             :   DCHECK_IMPLIES(Bytecodes::MakesCallAlongCriticalPath(bytecode_), made_call_);
    1453         112 :   Node* next_bytecode_offset = Advance(1);
    1454         112 :   Node* next_bytecode = LoadBytecode(next_bytecode_offset);
    1455             : 
    1456         112 :   if (FLAG_trace_ignition_dispatches) {
    1457           0 :     TraceBytecodeDispatch(next_bytecode);
    1458             :   }
    1459             : 
    1460             :   Node* base_index;
    1461         112 :   switch (operand_scale) {
    1462             :     case OperandScale::kDouble:
    1463         112 :       base_index = IntPtrConstant(1 << kBitsPerByte);
    1464          56 :       break;
    1465             :     case OperandScale::kQuadruple:
    1466         112 :       base_index = IntPtrConstant(2 << kBitsPerByte);
    1467          56 :       break;
    1468             :     default:
    1469           0 :       UNREACHABLE();
    1470             :   }
    1471         224 :   Node* target_index = IntPtrAdd(base_index, next_bytecode);
    1472             :   Node* target_code_entry =
    1473         112 :       Load(MachineType::Pointer(), DispatchTableRawPointer(),
    1474         224 :            TimesSystemPointerSize(target_index));
    1475             : 
    1476             :   DispatchToBytecodeHandlerEntry(target_code_entry, next_bytecode_offset,
    1477         112 :                                  next_bytecode);
    1478         112 : }
    1479             : 
    1480         224 : void InterpreterAssembler::UpdateInterruptBudgetOnReturn() {
    1481             :   // TODO(rmcilroy): Investigate whether it is worth supporting self
    1482             :   // optimization of primitive functions like FullCodegen.
    1483             : 
    1484             :   // Update profiling count by the number of bytes between the end of the
    1485             :   // current bytecode and the start of the first one, to simulate backedge to
    1486             :   // start of function.
    1487             :   //
    1488             :   // With headers and current offset, the bytecode array layout looks like:
    1489             :   //
    1490             :   //           <---------- simulated backedge ----------
    1491             :   // | header | first bytecode | .... | return bytecode |
    1492             :   //  |<------ current offset ------->
    1493             :   //  ^ tagged bytecode array pointer
    1494             :   //
    1495             :   // UpdateInterruptBudget already handles adding the bytecode size to the
    1496             :   // length of the back-edge, so we just have to correct for the non-zero offset
    1497             :   // of the first bytecode.
    1498             : 
    1499             :   const int kFirstBytecodeOffset = BytecodeArray::kHeaderSize - kHeapObjectTag;
    1500        1120 :   Node* profiling_weight = Int32Sub(TruncateIntPtrToInt32(BytecodeOffset()),
    1501         672 :                                     Int32Constant(kFirstBytecodeOffset));
    1502         224 :   UpdateInterruptBudget(profiling_weight, true);
    1503         224 : }
    1504             : 
    1505         168 : Node* InterpreterAssembler::LoadOSRNestingLevel() {
    1506         504 :   return LoadObjectField(BytecodeArrayTaggedPointer(),
    1507             :                          BytecodeArray::kOSRNestingLevelOffset,
    1508         336 :                          MachineType::Int8());
    1509             : }
    1510             : 
    1511        1960 : void InterpreterAssembler::Abort(AbortReason abort_reason) {
    1512        1960 :   disable_stack_check_across_call_ = true;
    1513        1960 :   Node* abort_id = SmiConstant(abort_reason);
    1514             :   CallRuntime(Runtime::kAbort, GetContext(), abort_id);
    1515        1960 :   disable_stack_check_across_call_ = false;
    1516        1960 : }
    1517             : 
    1518           0 : void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs,
    1519             :                                                AbortReason abort_reason) {
    1520           0 :   Label ok(this), abort(this, Label::kDeferred);
    1521           0 :   Branch(WordEqual(lhs, rhs), &ok, &abort);
    1522             : 
    1523           0 :   BIND(&abort);
    1524           0 :   Abort(abort_reason);
    1525           0 :   Goto(&ok);
    1526             : 
    1527           0 :   BIND(&ok);
    1528           0 : }
    1529             : 
    1530        1176 : void InterpreterAssembler::MaybeDropFrames(Node* context) {
    1531             :   Node* restart_fp_address =
    1532        2352 :       ExternalConstant(ExternalReference::debug_restart_fp_address(isolate()));
    1533             : 
    1534        1176 :   Node* restart_fp = Load(MachineType::Pointer(), restart_fp_address);
    1535        2352 :   Node* null = IntPtrConstant(0);
    1536             : 
    1537        1176 :   Label ok(this), drop_frames(this);
    1538        2352 :   Branch(IntPtrEqual(restart_fp, null), &ok, &drop_frames);
    1539             : 
    1540        1176 :   BIND(&drop_frames);
    1541             :   // We don't expect this call to return since the frame dropper tears down
    1542             :   // the stack and jumps into the function on the target frame to restart it.
    1543        2352 :   CallStub(CodeFactory::FrameDropperTrampoline(isolate()), context, restart_fp);
    1544        1176 :   Abort(AbortReason::kUnexpectedReturnFromFrameDropper);
    1545        1176 :   Goto(&ok);
    1546             : 
    1547        1176 :   BIND(&ok);
    1548        1176 : }
    1549             : 
    1550           0 : void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) {
    1551             :   CallRuntime(function_id, GetContext(), BytecodeArrayTaggedPointer(),
    1552           0 :               SmiTag(BytecodeOffset()), GetAccumulatorUnchecked());
    1553           0 : }
    1554             : 
    1555           0 : void InterpreterAssembler::TraceBytecodeDispatch(Node* target_bytecode) {
    1556           0 :   Node* counters_table = ExternalConstant(
    1557           0 :       ExternalReference::interpreter_dispatch_counters(isolate()));
    1558           0 :   Node* source_bytecode_table_index = IntPtrConstant(
    1559           0 :       static_cast<int>(bytecode_) * (static_cast<int>(Bytecode::kLast) + 1));
    1560             : 
    1561           0 :   Node* counter_offset = TimesSystemPointerSize(
    1562           0 :       IntPtrAdd(source_bytecode_table_index, target_bytecode));
    1563             :   Node* old_counter =
    1564           0 :       Load(MachineType::IntPtr(), counters_table, counter_offset);
    1565             : 
    1566           0 :   Label counter_ok(this), counter_saturated(this, Label::kDeferred);
    1567             : 
    1568           0 :   Node* counter_reached_max = WordEqual(
    1569           0 :       old_counter, IntPtrConstant(std::numeric_limits<uintptr_t>::max()));
    1570           0 :   Branch(counter_reached_max, &counter_saturated, &counter_ok);
    1571             : 
    1572           0 :   BIND(&counter_ok);
    1573             :   {
    1574           0 :     Node* new_counter = IntPtrAdd(old_counter, IntPtrConstant(1));
    1575             :     StoreNoWriteBarrier(MachineType::PointerRepresentation(), counters_table,
    1576           0 :                         counter_offset, new_counter);
    1577           0 :     Goto(&counter_saturated);
    1578             :   }
    1579             : 
    1580           0 :   BIND(&counter_saturated);
    1581           0 : }
    1582             : 
    1583             : // static
    1584         590 : bool InterpreterAssembler::TargetSupportsUnalignedAccess() {
    1585             : #if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
    1586             :   return false;
    1587             : #elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390 || \
    1588             :     V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_PPC
    1589         590 :   return true;
    1590             : #else
    1591             : #error "Unknown Architecture"
    1592             : #endif
    1593             : }
    1594             : 
    1595           0 : void InterpreterAssembler::AbortIfRegisterCountInvalid(
    1596             :     Node* parameters_and_registers, Node* formal_parameter_count,
    1597             :     Node* register_count) {
    1598           0 :   Node* array_size = LoadAndUntagFixedArrayBaseLength(parameters_and_registers);
    1599             : 
    1600           0 :   Label ok(this), abort(this, Label::kDeferred);
    1601           0 :   Branch(UintPtrLessThanOrEqual(
    1602           0 :              IntPtrAdd(formal_parameter_count, register_count), array_size),
    1603           0 :          &ok, &abort);
    1604             : 
    1605           0 :   BIND(&abort);
    1606           0 :   Abort(AbortReason::kInvalidParametersAndRegistersInGenerator);
    1607           0 :   Goto(&ok);
    1608             : 
    1609           0 :   BIND(&ok);
    1610           0 : }
    1611             : 
    1612         168 : Node* InterpreterAssembler::ExportParametersAndRegisterFile(
    1613             :     TNode<FixedArray> array, const RegListNodePair& registers,
    1614             :     TNode<Int32T> formal_parameter_count) {
    1615             :   // Store the formal parameters (without receiver) followed by the
    1616             :   // registers into the generator's internal parameters_and_registers field.
    1617             :   TNode<IntPtrT> formal_parameter_count_intptr =
    1618         168 :       ChangeInt32ToIntPtr(formal_parameter_count);
    1619         336 :   Node* register_count = ChangeUint32ToWord(registers.reg_count());
    1620         168 :   if (FLAG_debug_code) {
    1621             :     CSA_ASSERT(this, IntPtrEqual(registers.base_reg_location(),
    1622             :                                  RegisterLocation(Register(0))));
    1623             :     AbortIfRegisterCountInvalid(array, formal_parameter_count_intptr,
    1624           0 :                                 register_count);
    1625             :   }
    1626             : 
    1627             :   {
    1628         336 :     Variable var_index(this, MachineType::PointerRepresentation());
    1629         336 :     var_index.Bind(IntPtrConstant(0));
    1630             : 
    1631             :     // Iterate over parameters and write them into the array.
    1632         168 :     Label loop(this, &var_index), done_loop(this);
    1633             : 
    1634             :     Node* reg_base = IntPtrAdd(
    1635         504 :         IntPtrConstant(Register::FromParameterIndex(0, 1).ToOperand() - 1),
    1636         168 :         formal_parameter_count_intptr);
    1637             : 
    1638         168 :     Goto(&loop);
    1639         168 :     BIND(&loop);
    1640             :     {
    1641         168 :       Node* index = var_index.value();
    1642         336 :       GotoIfNot(UintPtrLessThan(index, formal_parameter_count_intptr),
    1643         168 :                 &done_loop);
    1644             : 
    1645         336 :       Node* reg_index = IntPtrSub(reg_base, index);
    1646         168 :       Node* value = LoadRegister(reg_index);
    1647             : 
    1648         168 :       StoreFixedArrayElement(array, index, value);
    1649             : 
    1650         504 :       var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
    1651         168 :       Goto(&loop);
    1652             :     }
    1653         168 :     BIND(&done_loop);
    1654             :   }
    1655             : 
    1656             :   {
    1657             :     // Iterate over register file and write values into array.
    1658             :     // The mapping of register to array index must match that used in
    1659             :     // BytecodeGraphBuilder::VisitResumeGenerator.
    1660         336 :     Variable var_index(this, MachineType::PointerRepresentation());
    1661         336 :     var_index.Bind(IntPtrConstant(0));
    1662             : 
    1663         168 :     Label loop(this, &var_index), done_loop(this);
    1664         168 :     Goto(&loop);
    1665         168 :     BIND(&loop);
    1666             :     {
    1667         168 :       Node* index = var_index.value();
    1668         336 :       GotoIfNot(UintPtrLessThan(index, register_count), &done_loop);
    1669             : 
    1670             :       Node* reg_index =
    1671         504 :           IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index);
    1672         168 :       Node* value = LoadRegister(reg_index);
    1673             : 
    1674         336 :       Node* array_index = IntPtrAdd(formal_parameter_count_intptr, index);
    1675         168 :       StoreFixedArrayElement(array, array_index, value);
    1676             : 
    1677         504 :       var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
    1678         168 :       Goto(&loop);
    1679             :     }
    1680         168 :     BIND(&done_loop);
    1681             :   }
    1682             : 
    1683         168 :   return array;
    1684             : }
    1685             : 
    1686         168 : Node* InterpreterAssembler::ImportRegisterFile(
    1687             :     TNode<FixedArray> array, const RegListNodePair& registers,
    1688             :     TNode<Int32T> formal_parameter_count) {
    1689             :   TNode<IntPtrT> formal_parameter_count_intptr =
    1690         168 :       ChangeInt32ToIntPtr(formal_parameter_count);
    1691         168 :   TNode<UintPtrT> register_count = ChangeUint32ToWord(registers.reg_count());
    1692         168 :   if (FLAG_debug_code) {
    1693             :     CSA_ASSERT(this, IntPtrEqual(registers.base_reg_location(),
    1694             :                                  RegisterLocation(Register(0))));
    1695             :     AbortIfRegisterCountInvalid(array, formal_parameter_count_intptr,
    1696           0 :                                 register_count);
    1697             :   }
    1698             : 
    1699         168 :   TVARIABLE(IntPtrT, var_index, IntPtrConstant(0));
    1700             : 
    1701             :   // Iterate over array and write values into register file.  Also erase the
    1702             :   // array contents to not keep them alive artificially.
    1703         168 :   Label loop(this, &var_index), done_loop(this);
    1704         168 :   Goto(&loop);
    1705         168 :   BIND(&loop);
    1706             :   {
    1707             :     TNode<IntPtrT> index = var_index.value();
    1708         336 :     GotoIfNot(UintPtrLessThan(index, register_count), &done_loop);
    1709             : 
    1710             :     TNode<IntPtrT> array_index =
    1711             :         IntPtrAdd(formal_parameter_count_intptr, index);
    1712             :     TNode<Object> value = LoadFixedArrayElement(array, array_index);
    1713             : 
    1714             :     TNode<IntPtrT> reg_index =
    1715         168 :         IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index);
    1716         168 :     StoreRegister(value, reg_index);
    1717             : 
    1718         168 :     StoreFixedArrayElement(array, array_index,
    1719         504 :                            LoadRoot(RootIndex::kStaleRegister));
    1720             : 
    1721         168 :     var_index = IntPtrAdd(index, IntPtrConstant(1));
    1722         168 :     Goto(&loop);
    1723             :   }
    1724         168 :   BIND(&done_loop);
    1725             : 
    1726         168 :   return array;
    1727             : }
    1728             : 
    1729           0 : int InterpreterAssembler::CurrentBytecodeSize() const {
    1730       44856 :   return Bytecodes::Size(bytecode_, operand_scale_);
    1731             : }
    1732             : 
    1733         336 : void InterpreterAssembler::ToNumberOrNumeric(Object::Conversion mode) {
    1734         336 :   Node* object = GetAccumulator();
    1735             :   Node* context = GetContext();
    1736             : 
    1737         672 :   Variable var_type_feedback(this, MachineRepresentation::kTaggedSigned);
    1738         672 :   Variable var_result(this, MachineRepresentation::kTagged);
    1739         336 :   Label if_done(this), if_objectissmi(this), if_objectisheapnumber(this),
    1740         336 :       if_objectisother(this, Label::kDeferred);
    1741             : 
    1742         672 :   GotoIf(TaggedIsSmi(object), &if_objectissmi);
    1743         672 :   Branch(IsHeapNumber(object), &if_objectisheapnumber, &if_objectisother);
    1744             : 
    1745         336 :   BIND(&if_objectissmi);
    1746             :   {
    1747         336 :     var_result.Bind(object);
    1748         336 :     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kSignedSmall));
    1749         336 :     Goto(&if_done);
    1750             :   }
    1751             : 
    1752         336 :   BIND(&if_objectisheapnumber);
    1753             :   {
    1754         336 :     var_result.Bind(object);
    1755         336 :     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kNumber));
    1756         336 :     Goto(&if_done);
    1757             :   }
    1758             : 
    1759         336 :   BIND(&if_objectisother);
    1760             :   {
    1761             :     auto builtin = Builtins::kNonNumberToNumber;
    1762         336 :     if (mode == Object::Conversion::kToNumeric) {
    1763             :       builtin = Builtins::kNonNumberToNumeric;
    1764             :       // Special case for collecting BigInt feedback.
    1765         168 :       Label not_bigint(this);
    1766         336 :       GotoIfNot(IsBigInt(object), &not_bigint);
    1767             :       {
    1768         168 :         var_result.Bind(object);
    1769         168 :         var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kBigInt));
    1770         168 :         Goto(&if_done);
    1771             :       }
    1772         168 :       BIND(&not_bigint);
    1773             :     }
    1774             : 
    1775             :     // Convert {object} by calling out to the appropriate builtin.
    1776         672 :     var_result.Bind(CallBuiltin(builtin, context, object));
    1777         336 :     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kAny));
    1778         336 :     Goto(&if_done);
    1779             :   }
    1780             : 
    1781         336 :   BIND(&if_done);
    1782             : 
    1783             :   // Record the type feedback collected for {object}.
    1784         336 :   Node* slot_index = BytecodeOperandIdx(0);
    1785         672 :   Node* maybe_feedback_vector = LoadFeedbackVector();
    1786             : 
    1787         336 :   UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector, slot_index);
    1788             : 
    1789         336 :   SetAccumulator(var_result.value());
    1790         336 :   Dispatch();
    1791         336 : }
    1792             : 
    1793             : }  // namespace interpreter
    1794             : }  // namespace internal
    1795       59456 : }  // namespace v8

Generated by: LCOV version 1.10