LCOV - code coverage report
Current view: top level - src/builtins/x64 - builtins-x64.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 1203 1264 95.2 %
Date: 2019-01-20 Functions: 65 65 100.0 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #if V8_TARGET_ARCH_X64
       6             : 
       7             : #include "src/api-arguments.h"
       8             : #include "src/base/adapters.h"
       9             : #include "src/code-factory.h"
      10             : #include "src/counters.h"
      11             : #include "src/deoptimizer.h"
      12             : #include "src/frame-constants.h"
      13             : #include "src/frames.h"
      14             : #include "src/macro-assembler-inl.h"
      15             : #include "src/objects-inl.h"
      16             : #include "src/objects/cell.h"
      17             : #include "src/objects/debug-objects.h"
      18             : #include "src/objects/foreign.h"
      19             : #include "src/objects/heap-number.h"
      20             : #include "src/objects/js-generator.h"
      21             : #include "src/objects/smi.h"
      22             : #include "src/register-configuration.h"
      23             : #include "src/wasm/wasm-linkage.h"
      24             : #include "src/wasm/wasm-objects.h"
      25             : 
      26             : namespace v8 {
      27             : namespace internal {
      28             : 
      29             : #define __ ACCESS_MASM(masm)
      30             : 
      31       15624 : void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
      32             :                                 ExitFrameType exit_frame_type) {
      33             :   __ LoadAddress(kJavaScriptCallExtraArg1Register,
      34       15624 :                  ExternalReference::Create(address));
      35       15624 :   if (exit_frame_type == BUILTIN_EXIT) {
      36             :     __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
      37       15456 :             RelocInfo::CODE_TARGET);
      38             :   } else {
      39             :     DCHECK(exit_frame_type == EXIT);
      40             :     __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
      41         168 :             RelocInfo::CODE_TARGET);
      42             :   }
      43       15624 : }
      44             : 
      45         280 : static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
      46             :                                            Runtime::FunctionId function_id) {
      47             :   // ----------- S t a t e -------------
      48             :   //  -- rax : argument count (preserved for callee)
      49             :   //  -- rdx : new target (preserved for callee)
      50             :   //  -- rdi : target function (preserved for callee)
      51             :   // -----------------------------------
      52             :   {
      53         280 :     FrameScope scope(masm, StackFrame::INTERNAL);
      54             :     // Push the number of arguments to the callee.
      55         280 :     __ SmiTag(rax, rax);
      56         280 :     __ Push(rax);
      57             :     // Push a copy of the target function and the new target.
      58         280 :     __ Push(rdi);
      59         280 :     __ Push(rdx);
      60             :     // Function is also the parameter to the runtime call.
      61         280 :     __ Push(rdi);
      62             : 
      63             :     __ CallRuntime(function_id, 1);
      64         280 :     __ movp(rcx, rax);
      65             : 
      66             :     // Restore target function and new target.
      67         280 :     __ Pop(rdx);
      68         280 :     __ Pop(rdi);
      69         280 :     __ Pop(rax);
      70         280 :     __ SmiUntag(rax, rax);
      71             :   }
      72             :   static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
      73         280 :   __ JumpCodeObject(rcx);
      74         280 : }
      75             : 
      76             : namespace {
      77             : 
      78          56 : void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
      79             :   // ----------- S t a t e -------------
      80             :   //  -- rax: number of arguments
      81             :   //  -- rdi: constructor function
      82             :   //  -- rdx: new target
      83             :   //  -- rsi: context
      84             :   // -----------------------------------
      85             : 
      86             :   // Enter a construct frame.
      87             :   {
      88          56 :     FrameScope scope(masm, StackFrame::CONSTRUCT);
      89             : 
      90             :     // Preserve the incoming parameters on the stack.
      91          56 :     __ SmiTag(rcx, rax);
      92          56 :     __ Push(rsi);
      93          56 :     __ Push(rcx);
      94             : 
      95             :     // The receiver for the builtin/api call.
      96          56 :     __ PushRoot(RootIndex::kTheHoleValue);
      97             : 
      98             :     // Set up pointer to last argument.
      99         112 :     __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
     100             : 
     101             :     // Copy arguments and receiver to the expression stack.
     102          56 :     Label loop, entry;
     103             :     __ movp(rcx, rax);
     104             :     // ----------- S t a t e -------------
     105             :     //  --                      rax: number of arguments (untagged)
     106             :     //  --                      rdi: constructor function
     107             :     //  --                      rdx: new target
     108             :     //  --                      rbx: pointer to last argument
     109             :     //  --                      rcx: counter
     110             :     //  -- sp[0*kSystemPointerSize]: the hole (receiver)
     111             :     //  -- sp[1*kSystemPointerSize]: number of arguments (tagged)
     112             :     //  -- sp[2*kSystemPointerSize]: context
     113             :     // -----------------------------------
     114          56 :     __ jmp(&entry);
     115          56 :     __ bind(&loop);
     116          56 :     __ Push(Operand(rbx, rcx, times_pointer_size, 0));
     117          56 :     __ bind(&entry);
     118             :     __ decp(rcx);
     119          56 :     __ j(greater_equal, &loop, Label::kNear);
     120             : 
     121             :     // Call the function.
     122             :     // rax: number of arguments (untagged)
     123             :     // rdi: constructor function
     124             :     // rdx: new target
     125             :     ParameterCount actual(rax);
     126          56 :     __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
     127             : 
     128             :     // Restore context from the frame.
     129         112 :     __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
     130             :     // Restore smi-tagged arguments count from the frame.
     131         112 :     __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
     132             : 
     133             :     // Leave construct frame.
     134             :   }
     135             : 
     136             :   // Remove caller arguments from the stack and return.
     137             :   __ PopReturnAddressTo(rcx);
     138          56 :   SmiIndex index = masm->SmiToIndex(rbx, rbx, kSystemPointerSizeLog2);
     139         112 :   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kSystemPointerSize));
     140             :   __ PushReturnAddressFrom(rcx);
     141             : 
     142          56 :   __ ret(0);
     143          56 : }
     144             : 
     145         952 : void Generate_StackOverflowCheck(
     146             :     MacroAssembler* masm, Register num_args, Register scratch,
     147             :     Label* stack_overflow,
     148             :     Label::Distance stack_overflow_distance = Label::kFar) {
     149             :   // Check the stack for overflow. We are not trying to catch
     150             :   // interruptions (e.g. debug break and preemption) here, so the "real stack
     151             :   // limit" is checked.
     152         952 :   __ LoadRoot(kScratchRegister, RootIndex::kRealStackLimit);
     153         952 :   __ movp(scratch, rsp);
     154             :   // Make scratch the space we have left. The stack might already be overflowed
     155             :   // here which will cause scratch to become negative.
     156         952 :   __ subp(scratch, kScratchRegister);
     157             :   __ sarp(scratch, Immediate(kSystemPointerSizeLog2));
     158             :   // Check if the arguments will overflow the stack.
     159         952 :   __ cmpp(scratch, num_args);
     160             :   // Signed comparison.
     161         952 :   __ j(less_equal, stack_overflow, stack_overflow_distance);
     162         952 : }
     163             : 
     164             : }  // namespace
     165             : 
     166             : // The construct stub for ES5 constructor functions and ES6 class constructors.
     167          56 : void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
     168             :   // ----------- S t a t e -------------
     169             :   //  -- rax: number of arguments (untagged)
     170             :   //  -- rdi: constructor function
     171             :   //  -- rdx: new target
     172             :   //  -- rsi: context
     173             :   //  -- sp[...]: constructor arguments
     174             :   // -----------------------------------
     175             : 
     176             :   // Enter a construct frame.
     177             :   {
     178          56 :     FrameScope scope(masm, StackFrame::CONSTRUCT);
     179          56 :     Label post_instantiation_deopt_entry, not_create_implicit_receiver;
     180             : 
     181             :     // Preserve the incoming parameters on the stack.
     182          56 :     __ SmiTag(rcx, rax);
     183          56 :     __ Push(rsi);
     184          56 :     __ Push(rcx);
     185          56 :     __ Push(rdi);
     186          56 :     __ PushRoot(RootIndex::kTheHoleValue);
     187          56 :     __ Push(rdx);
     188             : 
     189             :     // ----------- S t a t e -------------
     190             :     //  --         sp[0*kSystemPointerSize]: new target
     191             :     //  --         sp[1*kSystemPointerSize]: padding
     192             :     //  -- rdi and sp[2*kSystemPointerSize]: constructor function
     193             :     //  --         sp[3*kSystemPointerSize]: argument count
     194             :     //  --         sp[4*kSystemPointerSize]: context
     195             :     // -----------------------------------
     196             : 
     197             :     Register decompr_scratch_for_debug =
     198          56 :         COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
     199             : 
     200             :     __ LoadTaggedPointerField(
     201             :         rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset),
     202          56 :         decompr_scratch_for_debug);
     203             :     __ testl(FieldOperand(rbx, SharedFunctionInfo::kFlagsOffset),
     204          56 :              Immediate(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
     205          56 :     __ j(not_zero, &not_create_implicit_receiver, Label::kNear);
     206             : 
     207             :     // If not derived class constructor: Allocate the new receiver object.
     208         224 :     __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
     209             :     __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
     210          56 :             RelocInfo::CODE_TARGET);
     211          56 :     __ jmp(&post_instantiation_deopt_entry, Label::kNear);
     212             : 
     213             :     // Else: use TheHoleValue as receiver for constructor call
     214          56 :     __ bind(&not_create_implicit_receiver);
     215          56 :     __ LoadRoot(rax, RootIndex::kTheHoleValue);
     216             : 
     217             :     // ----------- S t a t e -------------
     218             :     //  -- rax                          implicit receiver
     219             :     //  -- Slot 4 / sp[0*kSystemPointerSize]  new target
     220             :     //  -- Slot 3 / sp[1*kSystemPointerSize]  padding
     221             :     //  -- Slot 2 / sp[2*kSystemPointerSize]  constructor function
     222             :     //  -- Slot 1 / sp[3*kSystemPointerSize]  number of arguments (tagged)
     223             :     //  -- Slot 0 / sp[4*kSystemPointerSize]  context
     224             :     // -----------------------------------
     225             :     // Deoptimizer enters here.
     226             :     masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
     227         168 :         masm->pc_offset());
     228          56 :     __ bind(&post_instantiation_deopt_entry);
     229             : 
     230             :     // Restore new target.
     231          56 :     __ Pop(rdx);
     232             : 
     233             :     // Push the allocated receiver to the stack. We need two copies
     234             :     // because we may have to return the original one and the calling
     235             :     // conventions dictate that the called function pops the receiver.
     236          56 :     __ Push(rax);
     237          56 :     __ Push(rax);
     238             : 
     239             :     // ----------- S t a t e -------------
     240             :     //  -- sp[0*kSystemPointerSize]  implicit receiver
     241             :     //  -- sp[1*kSystemPointerSize]  implicit receiver
     242             :     //  -- sp[2*kSystemPointerSize]  padding
     243             :     //  -- sp[3*kSystemPointerSize]  constructor function
     244             :     //  -- sp[4*kSystemPointerSize]  number of arguments (tagged)
     245             :     //  -- sp[5*kSystemPointerSize]  context
     246             :     // -----------------------------------
     247             : 
     248             :     // Restore constructor function and argument count.
     249         112 :     __ movp(rdi, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
     250          56 :     __ SmiUntag(rax, Operand(rbp, ConstructFrameConstants::kLengthOffset));
     251             : 
     252             :     // Set up pointer to last argument.
     253         112 :     __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
     254             : 
     255             :     // Check if we have enough stack space to push all arguments.
     256             :     // Argument count in rax. Clobbers rcx.
     257          56 :     Label enough_stack_space, stack_overflow;
     258          56 :     Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
     259          56 :     __ jmp(&enough_stack_space, Label::kNear);
     260             : 
     261          56 :     __ bind(&stack_overflow);
     262             :     // Restore context from the frame.
     263         112 :     __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
     264          56 :     __ CallRuntime(Runtime::kThrowStackOverflow);
     265             :     // This should be unreachable.
     266          56 :     __ int3();
     267             : 
     268          56 :     __ bind(&enough_stack_space);
     269             : 
     270             :     // Copy arguments and receiver to the expression stack.
     271          56 :     Label loop, entry;
     272             :     __ movp(rcx, rax);
     273             :     // ----------- S t a t e -------------
     274             :     //  --                              rax: number of arguments (untagged)
     275             :     //  --                              rdx: new target
     276             :     //  --                              rbx: pointer to last argument
     277             :     //  --                              rcx: counter (tagged)
     278             :     //  --         sp[0*kSystemPointerSize]: implicit receiver
     279             :     //  --         sp[1*kSystemPointerSize]: implicit receiver
     280             :     //  --         sp[2*kSystemPointerSize]: padding
     281             :     //  -- rdi and sp[3*kSystemPointerSize]: constructor function
     282             :     //  --         sp[4*kSystemPointerSize]: number of arguments (tagged)
     283             :     //  --         sp[5*kSystemPointerSize]: context
     284             :     // -----------------------------------
     285          56 :     __ jmp(&entry, Label::kNear);
     286          56 :     __ bind(&loop);
     287          56 :     __ Push(Operand(rbx, rcx, times_pointer_size, 0));
     288          56 :     __ bind(&entry);
     289             :     __ decp(rcx);
     290          56 :     __ j(greater_equal, &loop, Label::kNear);
     291             : 
     292             :     // Call the function.
     293             :     ParameterCount actual(rax);
     294          56 :     __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
     295             : 
     296             :     // ----------- S t a t e -------------
     297             :     //  -- rax                 constructor result
     298             :     //  -- sp[0*kSystemPointerSize]  implicit receiver
     299             :     //  -- sp[1*kSystemPointerSize]  padding
     300             :     //  -- sp[2*kSystemPointerSize]  constructor function
     301             :     //  -- sp[3*kSystemPointerSize]  number of arguments
     302             :     //  -- sp[4*kSystemPointerSize]  context
     303             :     // -----------------------------------
     304             : 
     305             :     // Store offset of return address for deoptimizer.
     306             :     masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
     307          56 :         masm->pc_offset());
     308             : 
     309             :     // Restore context from the frame.
     310         112 :     __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
     311             : 
     312             :     // If the result is an object (in the ECMA sense), we should get rid
     313             :     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
     314             :     // on page 74.
     315          56 :     Label use_receiver, do_throw, leave_frame;
     316             : 
     317             :     // If the result is undefined, we jump out to using the implicit receiver.
     318             :     __ JumpIfRoot(rax, RootIndex::kUndefinedValue, &use_receiver, Label::kNear);
     319             : 
     320             :     // Otherwise we do a smi check and fall through to check if the return value
     321             :     // is a valid receiver.
     322             : 
     323             :     // If the result is a smi, it is *not* an object in the ECMA sense.
     324          56 :     __ JumpIfSmi(rax, &use_receiver, Label::kNear);
     325             : 
     326             :     // If the type of the result (stored in its map) is less than
     327             :     // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
     328             :     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
     329          56 :     __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
     330          56 :     __ j(above_equal, &leave_frame, Label::kNear);
     331          56 :     __ jmp(&use_receiver, Label::kNear);
     332             : 
     333          56 :     __ bind(&do_throw);
     334          56 :     __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
     335             : 
     336             :     // Throw away the result of the constructor invocation and use the
     337             :     // on-stack receiver as the result.
     338          56 :     __ bind(&use_receiver);
     339         112 :     __ movp(rax, Operand(rsp, 0 * kSystemPointerSize));
     340             :     __ JumpIfRoot(rax, RootIndex::kTheHoleValue, &do_throw, Label::kNear);
     341             : 
     342          56 :     __ bind(&leave_frame);
     343             :     // Restore the arguments count.
     344         112 :     __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
     345             :     // Leave construct frame.
     346             :   }
     347             :   // Remove caller arguments from the stack and return.
     348             :   __ PopReturnAddressTo(rcx);
     349          56 :   SmiIndex index = masm->SmiToIndex(rbx, rbx, kSystemPointerSizeLog2);
     350         112 :   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kSystemPointerSize));
     351             :   __ PushReturnAddressFrom(rcx);
     352          56 :   __ ret(0);
     353          56 : }
     354             : 
     355          56 : void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
     356          56 :   Generate_JSBuiltinsConstructStubHelper(masm);
     357          56 : }
     358             : 
     359          56 : void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
     360          56 :   FrameScope scope(masm, StackFrame::INTERNAL);
     361          56 :   __ Push(rdi);
     362          56 :   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
     363          56 : }
     364             : 
     365             : namespace {
     366             : 
     367             : // Called with the native C calling convention. The corresponding function
     368             : // signature is either:
     369             : //   using JSEntryFunction = GeneratedCode<Address(
     370             : //       Address root_register_value, Address new_target, Address target,
     371             : //       Address receiver, intptr_t argc, Address** argv)>;
     372             : // or
     373             : //   using JSEntryFunction = GeneratedCode<Address(
     374             : //       Address root_register_value, MicrotaskQueue* microtask_queue)>;
     375         168 : void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
     376             :                              Builtins::Name entry_trampoline) {
     377         168 :   Label invoke, handler_entry, exit;
     378         168 :   Label not_outermost_js, not_outermost_js_2;
     379             : 
     380             :   {  // NOLINT. Scope block confuses linter.
     381             :     NoRootArrayScope uninitialized_root_register(masm);
     382             :     // Set up frame.
     383         168 :     __ pushq(rbp);
     384             :     __ movp(rbp, rsp);
     385             : 
     386             :     // Push the stack frame type.
     387         168 :     __ Push(Immediate(StackFrame::TypeToMarker(type)));
     388             :     // Reserve a slot for the context. It is filled after the root register has
     389             :     // been set up.
     390         168 :     __ subp(rsp, Immediate(kSystemPointerSize));
     391             :     // Save callee-saved registers (X64/X32/Win64 calling conventions).
     392         168 :     __ pushq(r12);
     393         168 :     __ pushq(r13);
     394         168 :     __ pushq(r14);
     395         168 :     __ pushq(r15);
     396             : #ifdef _WIN64
     397             :     __ pushq(rdi);  // Only callee save in Win64 ABI, argument in AMD64 ABI.
     398             :     __ pushq(rsi);  // Only callee save in Win64 ABI, argument in AMD64 ABI.
     399             : #endif
     400         168 :     __ pushq(rbx);
     401             : 
     402             : #ifdef _WIN64
     403             :     // On Win64 XMM6-XMM15 are callee-save.
     404             :     __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
     405             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
     406             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
     407             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
     408             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
     409             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
     410             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
     411             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
     412             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
     413             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
     414             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
     415             :     STATIC_ASSERT(EntryFrameConstants::kCalleeSaveXMMRegisters == 10);
     416             :     STATIC_ASSERT(EntryFrameConstants::kXMMRegistersBlockSize ==
     417             :                   EntryFrameConstants::kXMMRegisterSize *
     418             :                       EntryFrameConstants::kCalleeSaveXMMRegisters);
     419             : #endif
     420             : 
     421             :     // Initialize the root register.
     422             :     // C calling convention. The first argument is passed in arg_reg_1.
     423             :     __ movp(kRootRegister, arg_reg_1);
     424             :   }
     425             : 
     426             :   // Save copies of the top frame descriptor on the stack.
     427             :   ExternalReference c_entry_fp = ExternalReference::Create(
     428        1008 :       IsolateAddressId::kCEntryFPAddress, masm->isolate());
     429             :   {
     430         168 :     Operand c_entry_fp_operand = masm->ExternalReferenceAsOperand(c_entry_fp);
     431         168 :     __ Push(c_entry_fp_operand);
     432             :   }
     433             : 
     434             :   // Store the context address in the previously-reserved slot.
     435             :   ExternalReference context_address = ExternalReference::Create(
     436         168 :       IsolateAddressId::kContextAddress, masm->isolate());
     437         168 :   __ Load(kScratchRegister, context_address);
     438             :   static constexpr int kOffsetToContextSlot = -2 * kSystemPointerSize;
     439         336 :   __ movp(Operand(rbp, kOffsetToContextSlot), kScratchRegister);
     440             : 
     441             :   // If this is the outermost JS call, set js_entry_sp value.
     442             :   ExternalReference js_entry_sp = ExternalReference::Create(
     443         168 :       IsolateAddressId::kJSEntrySPAddress, masm->isolate());
     444         168 :   __ Load(rax, js_entry_sp);
     445             :   __ testp(rax, rax);
     446         168 :   __ j(not_zero, &not_outermost_js);
     447         168 :   __ Push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
     448             :   __ movp(rax, rbp);
     449         168 :   __ Store(js_entry_sp, rax);
     450         168 :   Label cont;
     451         168 :   __ jmp(&cont);
     452         168 :   __ bind(&not_outermost_js);
     453         168 :   __ Push(Immediate(StackFrame::INNER_JSENTRY_FRAME));
     454         168 :   __ bind(&cont);
     455             : 
     456             :   // Jump to a faked try block that does the invoke, with a faked catch
     457             :   // block that sets the pending exception.
     458         168 :   __ jmp(&invoke);
     459         168 :   __ bind(&handler_entry);
     460             : 
     461             :   // Store the current pc as the handler offset. It's used later to create the
     462             :   // handler table.
     463         336 :   masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
     464             : 
     465             :   // Caught exception: Store result (exception) in the pending exception
     466             :   // field in the JSEnv and return a failure sentinel.
     467             :   ExternalReference pending_exception = ExternalReference::Create(
     468         168 :       IsolateAddressId::kPendingExceptionAddress, masm->isolate());
     469         168 :   __ Store(pending_exception, rax);
     470         168 :   __ LoadRoot(rax, RootIndex::kException);
     471         168 :   __ jmp(&exit);
     472             : 
     473             :   // Invoke: Link this frame into the handler chain.
     474         168 :   __ bind(&invoke);
     475         168 :   __ PushStackHandler();
     476             : 
     477             :   // Invoke the function by calling through JS entry trampoline builtin and
     478             :   // pop the faked function when we return.
     479             :   Handle<Code> trampoline_code =
     480         168 :       masm->isolate()->builtins()->builtin_handle(entry_trampoline);
     481         168 :   __ Call(trampoline_code, RelocInfo::CODE_TARGET);
     482             : 
     483             :   // Unlink this frame from the handler chain.
     484         168 :   __ PopStackHandler();
     485             : 
     486         168 :   __ bind(&exit);
     487             :   // Check if the current stack frame is marked as the outermost JS frame.
     488         168 :   __ Pop(rbx);
     489         168 :   __ cmpp(rbx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
     490         168 :   __ j(not_equal, &not_outermost_js_2);
     491         168 :   __ Move(kScratchRegister, js_entry_sp);
     492         336 :   __ movp(Operand(kScratchRegister, 0), Immediate(0));
     493         168 :   __ bind(&not_outermost_js_2);
     494             : 
     495             :   // Restore the top frame descriptor from the stack.
     496             :   {
     497         168 :     Operand c_entry_fp_operand = masm->ExternalReferenceAsOperand(c_entry_fp);
     498         168 :     __ Pop(c_entry_fp_operand);
     499             :   }
     500             : 
     501             :   // Restore callee-saved registers (X64 conventions).
     502             : #ifdef _WIN64
     503             :   // On Win64 XMM6-XMM15 are callee-save
     504             :   __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
     505             :   __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
     506             :   __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
     507             :   __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
     508             :   __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
     509             :   __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
     510             :   __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
     511             :   __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
     512             :   __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
     513             :   __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
     514             :   __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
     515             : #endif
     516             : 
     517         168 :   __ popq(rbx);
     518             : #ifdef _WIN64
     519             :   // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
     520             :   __ popq(rsi);
     521             :   __ popq(rdi);
     522             : #endif
     523         168 :   __ popq(r15);
     524         168 :   __ popq(r14);
     525         168 :   __ popq(r13);
     526         168 :   __ popq(r12);
     527         168 :   __ addp(rsp, Immediate(2 * kSystemPointerSize));  // remove markers
     528             : 
     529             :   // Restore frame pointer and return.
     530         168 :   __ popq(rbp);
     531         168 :   __ ret(0);
     532         168 : }
     533             : 
     534             : }  // namespace
     535             : 
     536          56 : void Builtins::Generate_JSEntry(MacroAssembler* masm) {
     537             :   Generate_JSEntryVariant(masm, StackFrame::ENTRY,
     538          56 :                           Builtins::kJSEntryTrampoline);
     539          56 : }
     540             : 
     541          56 : void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
     542             :   Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
     543          56 :                           Builtins::kJSConstructEntryTrampoline);
     544          56 : }
     545             : 
     546          56 : void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
     547             :   Generate_JSEntryVariant(masm, StackFrame::ENTRY,
     548          56 :                           Builtins::kRunMicrotasksTrampoline);
     549          56 : }
     550             : 
     551         112 : static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
     552             :                                              bool is_construct) {
     553             :   // Expects six C++ function parameters.
     554             :   // - Address root_register_value
     555             :   // - Address new_target (tagged Object pointer)
     556             :   // - Address function (tagged JSFunction pointer)
     557             :   // - Address receiver (tagged Object pointer)
     558             :   // - intptr_t argc
     559             :   // - Address** argv (pointer to array of tagged Object pointers)
     560             :   // (see Handle::Invoke in execution.cc).
     561             : 
     562             :   // Open a C++ scope for the FrameScope.
     563             :   {
     564             :     // Platform specific argument handling. After this, the stack contains
     565             :     // an internal frame and the pushed function and receiver, and
     566             :     // register rax and rbx holds the argument count and argument array,
     567             :     // while rdi holds the function pointer, rsi the context, and rdx the
     568             :     // new.target.
     569             : 
     570             :     // MSVC parameters in:
     571             :     // rcx        : root_register_value
     572             :     // rdx        : new_target
     573             :     // r8         : function
     574             :     // r9         : receiver
     575             :     // [rsp+0x20] : argc
     576             :     // [rsp+0x28] : argv
     577             :     //
     578             :     // GCC parameters in:
     579             :     // rdi : root_register_value
     580             :     // rsi : new_target
     581             :     // rdx : function
     582             :     // rcx : receiver
     583             :     // r8  : argc
     584             :     // r9  : argv
     585             : 
     586         112 :     __ movp(rdi, arg_reg_3);
     587         112 :     __ Move(rdx, arg_reg_2);
     588             :     // rdi : function
     589             :     // rdx : new_target
     590             : 
     591             :     // Clear the context before we push it when entering the internal frame.
     592         112 :     __ Set(rsi, 0);
     593             : 
     594             :     // Enter an internal frame.
     595         112 :     FrameScope scope(masm, StackFrame::INTERNAL);
     596             : 
     597             :     // Setup the context (we need to use the caller context from the isolate).
     598             :     ExternalReference context_address = ExternalReference::Create(
     599         224 :         IsolateAddressId::kContextAddress, masm->isolate());
     600         112 :     __ movp(rsi, masm->ExternalReferenceAsOperand(context_address));
     601             : 
     602             :     // Push the function and the receiver onto the stack.
     603         112 :     __ Push(rdi);
     604         112 :     __ Push(arg_reg_4);
     605             : 
     606             : #ifdef _WIN64
     607             :     // Load the previous frame pointer to access C arguments on stack
     608             :     __ movp(kScratchRegister, Operand(rbp, 0));
     609             :     // Load the number of arguments and setup pointer to the arguments.
     610             :     __ movp(rax, Operand(kScratchRegister, EntryFrameConstants::kArgcOffset));
     611             :     __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
     612             : #else   // _WIN64
     613             :     // Load the number of arguments and setup pointer to the arguments.
     614             :     __ movp(rax, r8);
     615             :     __ movp(rbx, r9);
     616             : #endif  // _WIN64
     617             : 
     618             :     // Current stack contents:
     619             :     // [rsp + 2 * kSystemPointerSize ... ] : Internal frame
     620             :     // [rsp + kSystemPointerSize]          : function
     621             :     // [rsp]                         : receiver
     622             :     // Current register contents:
     623             :     // rax : argc
     624             :     // rbx : argv
     625             :     // rsi : context
     626             :     // rdi : function
     627             :     // rdx : new.target
     628             : 
     629             :     // Check if we have enough stack space to push all arguments.
     630             :     // Argument count in rax. Clobbers rcx.
     631         112 :     Label enough_stack_space, stack_overflow;
     632         112 :     Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
     633         112 :     __ jmp(&enough_stack_space, Label::kNear);
     634             : 
     635         112 :     __ bind(&stack_overflow);
     636         112 :     __ CallRuntime(Runtime::kThrowStackOverflow);
     637             :     // This should be unreachable.
     638         112 :     __ int3();
     639             : 
     640         112 :     __ bind(&enough_stack_space);
     641             : 
     642             :     // Copy arguments to the stack in a loop.
     643             :     // Register rbx points to array of pointers to handle locations.
     644             :     // Push the values of these handles.
     645         112 :     Label loop, entry;
     646         112 :     __ Set(rcx, 0);  // Set loop variable to 0.
     647         112 :     __ jmp(&entry, Label::kNear);
     648         112 :     __ bind(&loop);
     649         224 :     __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
     650         112 :     __ Push(Operand(kScratchRegister, 0));  // dereference handle
     651         112 :     __ addp(rcx, Immediate(1));
     652         112 :     __ bind(&entry);
     653         112 :     __ cmpp(rcx, rax);
     654         112 :     __ j(not_equal, &loop, Label::kNear);
     655             : 
     656             :     // Invoke the builtin code.
     657             :     Handle<Code> builtin = is_construct
     658          56 :                                ? BUILTIN_CODE(masm->isolate(), Construct)
     659         224 :                                : masm->isolate()->builtins()->Call();
     660         112 :     __ Call(builtin, RelocInfo::CODE_TARGET);
     661             : 
     662             :     // Exit the internal frame. Notice that this also removes the empty
     663             :     // context and the function left on the stack by the code
     664             :     // invocation.
     665             :   }
     666             : 
     667         112 :   __ ret(0);
     668         112 : }
     669             : 
     670          56 : void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
     671          56 :   Generate_JSEntryTrampolineHelper(masm, false);
     672          56 : }
     673             : 
     674          56 : void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
     675          56 :   Generate_JSEntryTrampolineHelper(masm, true);
     676          56 : }
     677             : 
     678          56 : void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
     679             :   // arg_reg_2: microtask_queue
     680          56 :   __ movp(RunMicrotasksDescriptor::MicrotaskQueueRegister(), arg_reg_2);
     681          56 :   __ Jump(BUILTIN_CODE(masm->isolate(), RunMicrotasks), RelocInfo::CODE_TARGET);
     682          56 : }
     683             : 
     684          56 : static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
     685             :                                           Register sfi_data,
     686             :                                           Register scratch1) {
     687          56 :   Label done;
     688             : 
     689          56 :   __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
     690          56 :   __ j(not_equal, &done, Label::kNear);
     691             :   Register decompr_scratch_for_debug =
     692          56 :       COMPRESS_POINTERS_BOOL ? scratch1 : no_reg;
     693             : 
     694             :   __ LoadTaggedPointerField(
     695             :       sfi_data, FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset),
     696          56 :       decompr_scratch_for_debug);
     697             : 
     698          56 :   __ bind(&done);
     699          56 : }
     700             : 
     701             : // static
     702          56 : void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
     703             :   // ----------- S t a t e -------------
     704             :   //  -- rax    : the value to pass to the generator
     705             :   //  -- rdx    : the JSGeneratorObject to resume
     706             :   //  -- rsp[0] : return address
     707             :   // -----------------------------------
     708          56 :   __ AssertGeneratorObject(rdx);
     709             : 
     710             :   // Store input value into generator object.
     711             :   __ StoreTaggedField(
     712          56 :       FieldOperand(rdx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
     713             :   __ RecordWriteField(rdx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx,
     714          56 :                       kDontSaveFPRegs);
     715             : 
     716          56 :   Register decompr_scratch1 = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
     717          56 :   Register decompr_scratch2 = COMPRESS_POINTERS_BOOL ? r12 : no_reg;
     718             :   Register decompr_scratch_for_debug =
     719          56 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
     720             : 
     721             :   // Load suspended function and context.
     722             :   __ LoadTaggedPointerField(
     723             :       rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset),
     724          56 :       decompr_scratch_for_debug);
     725             :   __ LoadTaggedPointerField(rsi, FieldOperand(rdi, JSFunction::kContextOffset),
     726          56 :                             decompr_scratch_for_debug);
     727             : 
     728             :   // Flood function if we are stepping.
     729          56 :   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
     730          56 :   Label stepping_prepared;
     731             :   ExternalReference debug_hook =
     732         112 :       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
     733          56 :   Operand debug_hook_operand = masm->ExternalReferenceAsOperand(debug_hook);
     734          56 :   __ cmpb(debug_hook_operand, Immediate(0));
     735          56 :   __ j(not_equal, &prepare_step_in_if_stepping);
     736             : 
     737             :   // Flood function if we need to continue stepping in the suspended generator.
     738             :   ExternalReference debug_suspended_generator =
     739          56 :       ExternalReference::debug_suspended_generator_address(masm->isolate());
     740             :   Operand debug_suspended_generator_operand =
     741          56 :       masm->ExternalReferenceAsOperand(debug_suspended_generator);
     742          56 :   __ cmpp(rdx, debug_suspended_generator_operand);
     743          56 :   __ j(equal, &prepare_step_in_suspended_generator);
     744          56 :   __ bind(&stepping_prepared);
     745             : 
     746             :   // Check the stack for overflow. We are not trying to catch interruptions
     747             :   // (i.e. debug break and preemption) here, so check the "real stack limit".
     748          56 :   Label stack_overflow;
     749          56 :   __ CompareRoot(rsp, RootIndex::kRealStackLimit);
     750          56 :   __ j(below, &stack_overflow);
     751             : 
     752             :   // Pop return address.
     753             :   __ PopReturnAddressTo(rax);
     754             : 
     755             :   // Push receiver.
     756             :   __ PushTaggedPointerField(
     757             :       FieldOperand(rdx, JSGeneratorObject::kReceiverOffset), decompr_scratch1,
     758          56 :       decompr_scratch_for_debug);
     759             : 
     760             :   // ----------- S t a t e -------------
     761             :   //  -- rax    : return address
     762             :   //  -- rdx    : the JSGeneratorObject to resume
     763             :   //  -- rdi    : generator function
     764             :   //  -- rsi    : generator context
     765             :   //  -- rsp[0] : generator receiver
     766             :   // -----------------------------------
     767             : 
     768             :   // Copy the function arguments from the generator object's register file.
     769             :   __ LoadTaggedPointerField(
     770             :       rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset),
     771          56 :       decompr_scratch_for_debug);
     772             :   __ movzxwq(
     773             :       rcx, FieldOperand(rcx, SharedFunctionInfo::kFormalParameterCountOffset));
     774             : 
     775             :   __ LoadTaggedPointerField(
     776             :       rbx, FieldOperand(rdx, JSGeneratorObject::kParametersAndRegistersOffset),
     777          56 :       decompr_scratch_for_debug);
     778             : 
     779             :   {
     780          56 :     Label done_loop, loop;
     781          56 :     __ Set(r9, 0);
     782             : 
     783          56 :     __ bind(&loop);
     784          56 :     __ cmpl(r9, rcx);
     785          56 :     __ j(greater_equal, &done_loop, Label::kNear);
     786             :     __ PushTaggedAnyField(
     787             :         FieldOperand(rbx, r9, times_tagged_size, FixedArray::kHeaderSize),
     788          56 :         decompr_scratch1, decompr_scratch2, decompr_scratch_for_debug);
     789          56 :     __ addl(r9, Immediate(1));
     790          56 :     __ jmp(&loop);
     791             : 
     792          56 :     __ bind(&done_loop);
     793             :   }
     794             : 
     795             :   // Underlying function needs to have bytecode available.
     796          56 :   if (FLAG_debug_code) {
     797             :     __ LoadTaggedPointerField(
     798             :         rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset),
     799           0 :         decompr_scratch_for_debug);
     800             :     __ LoadTaggedPointerField(
     801             :         rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset),
     802           0 :         decompr_scratch_for_debug);
     803           0 :     GetSharedFunctionInfoBytecode(masm, rcx, kScratchRegister);
     804           0 :     __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
     805           0 :     __ Assert(equal, AbortReason::kMissingBytecodeArray);
     806             :   }
     807             : 
     808             :   // Resume (Ignition/TurboFan) generator object.
     809             :   {
     810             :     __ PushReturnAddressFrom(rax);
     811             :     __ LoadTaggedPointerField(
     812             :         rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset),
     813          56 :         decompr_scratch_for_debug);
     814             :     __ movzxwq(rax, FieldOperand(
     815             :                         rax, SharedFunctionInfo::kFormalParameterCountOffset));
     816             :     // We abuse new.target both to indicate that this is a resume call and to
     817             :     // pass in the generator object.  In ordinary calls, new.target is always
     818             :     // undefined because generator functions are non-constructable.
     819             :     static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
     820             :     __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset),
     821          56 :                               decompr_scratch_for_debug);
     822          56 :     __ JumpCodeObject(rcx);
     823             :   }
     824             : 
     825          56 :   __ bind(&prepare_step_in_if_stepping);
     826             :   {
     827          56 :     FrameScope scope(masm, StackFrame::INTERNAL);
     828          56 :     __ Push(rdx);
     829          56 :     __ Push(rdi);
     830             :     // Push hole as receiver since we do not use it for stepping.
     831          56 :     __ PushRoot(RootIndex::kTheHoleValue);
     832          56 :     __ CallRuntime(Runtime::kDebugOnFunctionCall);
     833          56 :     __ Pop(rdx);
     834             :     __ LoadTaggedPointerField(
     835             :         rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset),
     836          56 :         decompr_scratch_for_debug);
     837             :   }
     838          56 :   __ jmp(&stepping_prepared);
     839             : 
     840          56 :   __ bind(&prepare_step_in_suspended_generator);
     841             :   {
     842          56 :     FrameScope scope(masm, StackFrame::INTERNAL);
     843          56 :     __ Push(rdx);
     844          56 :     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
     845          56 :     __ Pop(rdx);
     846             :     __ LoadTaggedPointerField(
     847             :         rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset),
     848          56 :         decompr_scratch_for_debug);
     849             :   }
     850          56 :   __ jmp(&stepping_prepared);
     851             : 
     852          56 :   __ bind(&stack_overflow);
     853             :   {
     854          56 :     FrameScope scope(masm, StackFrame::INTERNAL);
     855          56 :     __ CallRuntime(Runtime::kThrowStackOverflow);
     856          56 :     __ int3();  // This should be unreachable.
     857             :   }
     858          56 : }
     859             : 
     860             : // TODO(juliana): if we remove the code below then we don't need all
     861             : // the parameters.
     862          56 : static void ReplaceClosureCodeWithOptimizedCode(
     863             :     MacroAssembler* masm, Register optimized_code, Register closure,
     864             :     Register scratch1, Register scratch2, Register scratch3) {
     865             : 
     866             :   // Store the optimized code in the closure.
     867             :   __ StoreTaggedField(FieldOperand(closure, JSFunction::kCodeOffset),
     868          56 :                       optimized_code);
     869          56 :   __ movp(scratch1, optimized_code);  // Write barrier clobbers scratch1 below.
     870             :   __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
     871          56 :                       kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
     872          56 : }
     873             : 
     874          56 : static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
     875             :                                   Register scratch2) {
     876          56 :   Register args_count = scratch1;
     877          56 :   Register return_pc = scratch2;
     878             : 
     879             :   // Get the arguments + receiver count.
     880             :   __ movp(args_count,
     881         112 :           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
     882             :   __ movl(args_count,
     883             :           FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
     884             : 
     885             :   // Leave the frame (also dropping the register file).
     886          56 :   __ leave();
     887             : 
     888             :   // Drop receiver + arguments.
     889             :   __ PopReturnAddressTo(return_pc);
     890          56 :   __ addp(rsp, args_count);
     891             :   __ PushReturnAddressFrom(return_pc);
     892          56 : }
     893             : 
     894             : // Tail-call |function_id| if |smi_entry| == |marker|
     895         168 : static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
     896             :                                           Register smi_entry,
     897             :                                           OptimizationMarker marker,
     898             :                                           Runtime::FunctionId function_id) {
     899         168 :   Label no_match;
     900         168 :   __ SmiCompare(smi_entry, Smi::FromEnum(marker));
     901         168 :   __ j(not_equal, &no_match);
     902         168 :   GenerateTailCallToReturnedCode(masm, function_id);
     903         168 :   __ bind(&no_match);
     904         168 : }
     905             : 
     906          56 : static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
     907             :                                            Register feedback_vector,
     908             :                                            Register scratch1, Register scratch2,
     909             :                                            Register scratch3) {
     910             :   // ----------- S t a t e -------------
     911             :   //  -- rax : argument count (preserved for callee if needed, and caller)
     912             :   //  -- rdx : new target (preserved for callee if needed, and caller)
     913             :   //  -- rdi : target function (preserved for callee if needed, and caller)
     914             :   //  -- feedback vector (preserved for caller if needed)
     915             :   // -----------------------------------
     916             :   DCHECK(!AreAliased(feedback_vector, rax, rdx, rdi, scratch1, scratch2,
     917             :                      scratch3));
     918             : 
     919          56 :   Label optimized_code_slot_is_weak_ref, fallthrough;
     920             : 
     921          56 :   Register closure = rdi;
     922          56 :   Register optimized_code_entry = scratch1;
     923          56 :   Register decompr_scratch = COMPRESS_POINTERS_BOOL ? scratch2 : no_reg;
     924             :   Register decompr_scratch_for_debug =
     925          56 :       COMPRESS_POINTERS_BOOL ? scratch3 : no_reg;
     926             : 
     927             :   __ LoadAnyTaggedField(
     928             :       optimized_code_entry,
     929             :       FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset),
     930          56 :       decompr_scratch, decompr_scratch_for_debug);
     931             : 
     932             :   // Check if the code entry is a Smi. If yes, we interpret it as an
     933             :   // optimisation marker. Otherwise, interpret it as a weak reference to a code
     934             :   // object.
     935          56 :   __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
     936             : 
     937             :   {
     938             :     // Optimized code slot is a Smi optimization marker.
     939             : 
     940             :     // Fall through if no optimization trigger.
     941             :     __ SmiCompare(optimized_code_entry,
     942          56 :                   Smi::FromEnum(OptimizationMarker::kNone));
     943          56 :     __ j(equal, &fallthrough);
     944             : 
     945             :     // TODO(v8:8394): The logging of first execution will break if
     946             :     // feedback vectors are not allocated. We need to find a different way of
     947             :     // logging these events if required.
     948             :     TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
     949             :                                   OptimizationMarker::kLogFirstExecution,
     950          56 :                                   Runtime::kFunctionFirstExecution);
     951             :     TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
     952             :                                   OptimizationMarker::kCompileOptimized,
     953          56 :                                   Runtime::kCompileOptimized_NotConcurrent);
     954             :     TailCallRuntimeIfMarkerEquals(
     955             :         masm, optimized_code_entry,
     956             :         OptimizationMarker::kCompileOptimizedConcurrent,
     957          56 :         Runtime::kCompileOptimized_Concurrent);
     958             : 
     959             :     {
     960             :       // Otherwise, the marker is InOptimizationQueue, so fall through hoping
     961             :       // that an interrupt will eventually update the slot with optimized code.
     962          56 :       if (FLAG_debug_code) {
     963             :         __ SmiCompare(optimized_code_entry,
     964           0 :                       Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
     965           0 :         __ Assert(equal, AbortReason::kExpectedOptimizationSentinel);
     966             :       }
     967          56 :       __ jmp(&fallthrough);
     968             :     }
     969             :   }
     970             : 
     971             :   {
     972             :     // Optimized code slot is a weak reference.
     973          56 :     __ bind(&optimized_code_slot_is_weak_ref);
     974             : 
     975          56 :     __ LoadWeakValue(optimized_code_entry, &fallthrough);
     976             : 
     977             :     // Check if the optimized code is marked for deopt. If it is, call the
     978             :     // runtime to clear it.
     979          56 :     Label found_deoptimized_code;
     980             :     __ LoadTaggedPointerField(
     981             :         scratch2,
     982             :         FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset),
     983          56 :         decompr_scratch_for_debug);
     984             :     __ testl(
     985             :         FieldOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset),
     986             :         Immediate(1 << Code::kMarkedForDeoptimizationBit));
     987          56 :     __ j(not_zero, &found_deoptimized_code);
     988             : 
     989             :     // Optimized code is good, get it into the closure and link the closure into
     990             :     // the optimized functions list, then tail call the optimized code.
     991             :     // The feedback vector is no longer used, so re-use it as a scratch
     992             :     // register.
     993             :     ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
     994          56 :                                         scratch2, scratch3, feedback_vector);
     995             :     static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
     996          56 :     __ Move(rcx, optimized_code_entry);
     997          56 :     __ JumpCodeObject(rcx);
     998             : 
     999             :     // Optimized code slot contains deoptimized code, evict it and re-enter the
    1000             :     // closure's code.
    1001          56 :     __ bind(&found_deoptimized_code);
    1002          56 :     GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
    1003             :   }
    1004             : 
    1005             :   // Fall-through if the optimized code cell is clear and there is no
    1006             :   // optimization marker.
    1007          56 :   __ bind(&fallthrough);
    1008          56 : }
    1009             : 
    1010             : // Advance the current bytecode offset. This simulates what all bytecode
    1011             : // handlers do upon completion of the underlying operation. Will bail out to a
    1012             : // label if the bytecode (without prefix) is a return bytecode.
    1013         112 : static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
    1014             :                                           Register bytecode_array,
    1015             :                                           Register bytecode_offset,
    1016             :                                           Register bytecode, Register scratch1,
    1017             :                                           Label* if_return) {
    1018         112 :   Register bytecode_size_table = scratch1;
    1019             :   DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
    1020             :                      bytecode));
    1021             : 
    1022             :   __ Move(bytecode_size_table,
    1023         112 :           ExternalReference::bytecode_size_table_address());
    1024             : 
    1025             :   // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
    1026         112 :   Label process_bytecode, extra_wide;
    1027             :   STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
    1028             :   STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
    1029             :   STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
    1030             :   STATIC_ASSERT(3 ==
    1031             :                 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
    1032         112 :   __ cmpb(bytecode, Immediate(0x3));
    1033         112 :   __ j(above, &process_bytecode, Label::kNear);
    1034         112 :   __ testb(bytecode, Immediate(0x1));
    1035         112 :   __ j(not_equal, &extra_wide, Label::kNear);
    1036             : 
    1037             :   // Load the next bytecode and update table to the wide scaled table.
    1038             :   __ incl(bytecode_offset);
    1039         224 :   __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
    1040             :   __ addp(bytecode_size_table,
    1041         112 :           Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
    1042         112 :   __ jmp(&process_bytecode, Label::kNear);
    1043             : 
    1044         112 :   __ bind(&extra_wide);
    1045             :   // Load the next bytecode and update table to the extra wide scaled table.
    1046             :   __ incl(bytecode_offset);
    1047         224 :   __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
    1048             :   __ addp(bytecode_size_table,
    1049         112 :           Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
    1050             : 
    1051         112 :   __ bind(&process_bytecode);
    1052             : 
    1053             : // Bailout to the return label if this is a return bytecode.
    1054             : #define JUMP_IF_EQUAL(NAME)                                             \
    1055             :   __ cmpb(bytecode,                                                     \
    1056             :           Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
    1057             :   __ j(equal, if_return, Label::kFar);
    1058         224 :   RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
    1059             : #undef JUMP_IF_EQUAL
    1060             : 
    1061             :   // Otherwise, load the size of the current bytecode and advance the offset.
    1062         112 :   __ addl(bytecode_offset, Operand(bytecode_size_table, bytecode, times_4, 0));
    1063         112 : }
    1064             : 
    1065             : // Generate code for entering a JS function with the interpreter.
    1066             : // On entry to the function the receiver and arguments have been pushed on the
    1067             : // stack left to right.  The actual argument count matches the formal parameter
    1068             : // count expected by the function.
    1069             : //
    1070             : // The live registers are:
    1071             : //   o rdi: the JS function object being called
    1072             : //   o rdx: the incoming new target or generator object
    1073             : //   o rsi: our context
    1074             : //   o rbp: the caller's frame pointer
    1075             : //   o rsp: stack pointer (pointing to return address)
    1076             : //
    1077             : // The function builds an interpreter frame.  See InterpreterFrameConstants in
    1078             : // frames.h for its layout.
    1079          56 : void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
    1080             :   Register closure = rdi;
    1081             :   Register feedback_vector = rbx;
    1082             :   Register decompr_scratch_for_debug =
    1083          56 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    1084             : 
    1085             :   // Get the bytecode array from the function object and load it into
    1086             :   // kInterpreterBytecodeArrayRegister.
    1087             :   __ LoadTaggedPointerField(
    1088             :       rax, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset),
    1089          56 :       decompr_scratch_for_debug);
    1090             :   __ LoadTaggedPointerField(
    1091             :       kInterpreterBytecodeArrayRegister,
    1092             :       FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset),
    1093          56 :       decompr_scratch_for_debug);
    1094             :   GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister,
    1095          56 :                                 kScratchRegister);
    1096             : 
    1097             :   // The bytecode array could have been flushed from the shared function info,
    1098             :   // if so, call into CompileLazy.
    1099          56 :   Label compile_lazy;
    1100          56 :   __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE, rax);
    1101          56 :   __ j(not_equal, &compile_lazy);
    1102             : 
    1103             :   // Load the feedback vector from the closure.
    1104             :   __ LoadTaggedPointerField(
    1105             :       feedback_vector, FieldOperand(closure, JSFunction::kFeedbackCellOffset),
    1106          56 :       decompr_scratch_for_debug);
    1107             :   __ LoadTaggedPointerField(feedback_vector,
    1108             :                             FieldOperand(feedback_vector, Cell::kValueOffset),
    1109          56 :                             decompr_scratch_for_debug);
    1110             : 
    1111          56 :   Label push_stack_frame;
    1112             :   // Check if feedback vector is valid. If valid, check for optimized code
    1113             :   // and update invocation count. Otherwise, setup the stack frame.
    1114             :   __ JumpIfRoot(feedback_vector, RootIndex::kUndefinedValue, &push_stack_frame);
    1115             : 
    1116             :   // Read off the optimized code slot in the feedback vector, and if there
    1117             :   // is optimized code or an optimization marker, call that instead.
    1118          56 :   MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r11, r15);
    1119             : 
    1120             :   // Increment invocation count for the function.
    1121             :   __ incl(
    1122             :       FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
    1123             : 
    1124             :   // Open a frame scope to indicate that there is a frame on the stack.  The
    1125             :   // MANUAL indicates that the scope shouldn't actually generate code to set up
    1126             :   // the frame (that is done below).
    1127          56 :   __ bind(&push_stack_frame);
    1128         112 :   FrameScope frame_scope(masm, StackFrame::MANUAL);
    1129          56 :   __ pushq(rbp);  // Caller's frame pointer.
    1130             :   __ movp(rbp, rsp);
    1131          56 :   __ Push(rsi);  // Callee's context.
    1132          56 :   __ Push(rdi);  // Callee's JS function.
    1133             : 
    1134             :   // Reset code age.
    1135             :   __ movb(FieldOperand(kInterpreterBytecodeArrayRegister,
    1136             :                        BytecodeArray::kBytecodeAgeOffset),
    1137          56 :           Immediate(BytecodeArray::kNoAgeBytecodeAge));
    1138             : 
    1139             :   // Load initial bytecode offset.
    1140             :   __ movp(kInterpreterBytecodeOffsetRegister,
    1141             :           Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
    1142             : 
    1143             :   // Push bytecode array and Smi tagged bytecode offset.
    1144          56 :   __ Push(kInterpreterBytecodeArrayRegister);
    1145          56 :   __ SmiTag(rcx, kInterpreterBytecodeOffsetRegister);
    1146          56 :   __ Push(rcx);
    1147             : 
    1148             :   // Allocate the local and temporary register file on the stack.
    1149             :   {
    1150             :     // Load frame size from the BytecodeArray object.
    1151             :     __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
    1152             :                               BytecodeArray::kFrameSizeOffset));
    1153             : 
    1154             :     // Do a stack check to ensure we don't go over the limit.
    1155          56 :     Label ok;
    1156             :     __ movp(rax, rsp);
    1157          56 :     __ subp(rax, rcx);
    1158          56 :     __ CompareRoot(rax, RootIndex::kRealStackLimit);
    1159          56 :     __ j(above_equal, &ok, Label::kNear);
    1160          56 :     __ CallRuntime(Runtime::kThrowStackOverflow);
    1161          56 :     __ bind(&ok);
    1162             : 
    1163             :     // If ok, push undefined as the initial value for all register file entries.
    1164          56 :     Label loop_header;
    1165          56 :     Label loop_check;
    1166          56 :     __ LoadRoot(rax, RootIndex::kUndefinedValue);
    1167          56 :     __ j(always, &loop_check, Label::kNear);
    1168          56 :     __ bind(&loop_header);
    1169             :     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
    1170          56 :     __ Push(rax);
    1171             :     // Continue loop if not done.
    1172          56 :     __ bind(&loop_check);
    1173          56 :     __ subp(rcx, Immediate(kSystemPointerSize));
    1174          56 :     __ j(greater_equal, &loop_header, Label::kNear);
    1175             :   }
    1176             : 
    1177             :   // If the bytecode array has a valid incoming new target or generator object
    1178             :   // register, initialize it with incoming value which was passed in rdx.
    1179          56 :   Label no_incoming_new_target_or_generator_register;
    1180             :   __ movsxlq(
    1181             :       rax,
    1182             :       FieldOperand(kInterpreterBytecodeArrayRegister,
    1183          56 :                    BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
    1184             :   __ testl(rax, rax);
    1185          56 :   __ j(zero, &no_incoming_new_target_or_generator_register, Label::kNear);
    1186         112 :   __ movp(Operand(rbp, rax, times_pointer_size, 0), rdx);
    1187          56 :   __ bind(&no_incoming_new_target_or_generator_register);
    1188             : 
    1189             :   // Load accumulator with undefined.
    1190          56 :   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
    1191             : 
    1192             :   // Load the dispatch table into a register and dispatch to the bytecode
    1193             :   // handler at the current bytecode offset.
    1194          56 :   Label do_dispatch;
    1195          56 :   __ bind(&do_dispatch);
    1196             :   __ Move(
    1197             :       kInterpreterDispatchTableRegister,
    1198         112 :       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
    1199             :   __ movzxbp(r11, Operand(kInterpreterBytecodeArrayRegister,
    1200         112 :                           kInterpreterBytecodeOffsetRegister, times_1, 0));
    1201             :   __ movp(
    1202             :       kJavaScriptCallCodeStartRegister,
    1203         112 :       Operand(kInterpreterDispatchTableRegister, r11, times_pointer_size, 0));
    1204          56 :   __ call(kJavaScriptCallCodeStartRegister);
    1205         112 :   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
    1206             : 
    1207             :   // Any returns to the entry trampoline are either due to the return bytecode
    1208             :   // or the interpreter tail calling a builtin and then a dispatch.
    1209             : 
    1210             :   // Get bytecode array and bytecode offset from the stack frame.
    1211             :   __ movp(kInterpreterBytecodeArrayRegister,
    1212         112 :           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    1213             :   __ movp(kInterpreterBytecodeOffsetRegister,
    1214         112 :           Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
    1215             :   __ SmiUntag(kInterpreterBytecodeOffsetRegister,
    1216          56 :               kInterpreterBytecodeOffsetRegister);
    1217             : 
    1218             :   // Either return, or advance to the next bytecode and dispatch.
    1219          56 :   Label do_return;
    1220             :   __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
    1221         112 :                           kInterpreterBytecodeOffsetRegister, times_1, 0));
    1222             :   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
    1223             :                                 kInterpreterBytecodeOffsetRegister, rbx, rcx,
    1224          56 :                                 &do_return);
    1225          56 :   __ jmp(&do_dispatch);
    1226             : 
    1227          56 :   __ bind(&do_return);
    1228             :   // The return value is in rax.
    1229          56 :   LeaveInterpreterFrame(masm, rbx, rcx);
    1230          56 :   __ ret(0);
    1231             : 
    1232          56 :   __ bind(&compile_lazy);
    1233          56 :   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
    1234          56 :   __ int3();  // Should not return.
    1235          56 : }
    1236             : 
    1237         336 : static void Generate_InterpreterPushArgs(MacroAssembler* masm,
    1238             :                                          Register num_args,
    1239             :                                          Register start_address,
    1240             :                                          Register scratch) {
    1241             :   // Find the address of the last argument.
    1242         336 :   __ Move(scratch, num_args);
    1243         336 :   __ shlp(scratch, Immediate(kSystemPointerSizeLog2));
    1244             :   __ negp(scratch);
    1245         336 :   __ addp(scratch, start_address);
    1246             : 
    1247             :   // Push the arguments.
    1248         336 :   Label loop_header, loop_check;
    1249         336 :   __ j(always, &loop_check, Label::kNear);
    1250         336 :   __ bind(&loop_header);
    1251         336 :   __ Push(Operand(start_address, 0));
    1252         336 :   __ subp(start_address, Immediate(kSystemPointerSize));
    1253         336 :   __ bind(&loop_check);
    1254         336 :   __ cmpp(start_address, scratch);
    1255         336 :   __ j(greater, &loop_header, Label::kNear);
    1256         336 : }
    1257             : 
    1258             : // static
    1259         168 : void Builtins::Generate_InterpreterPushArgsThenCallImpl(
    1260             :     MacroAssembler* masm, ConvertReceiverMode receiver_mode,
    1261             :     InterpreterPushArgsMode mode) {
    1262             :   DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
    1263             :   // ----------- S t a t e -------------
    1264             :   //  -- rax : the number of arguments (not including the receiver)
    1265             :   //  -- rbx : the address of the first argument to be pushed. Subsequent
    1266             :   //           arguments should be consecutive above this, in the same order as
    1267             :   //           they are to be pushed onto the stack.
    1268             :   //  -- rdi : the target to call (can be any Object).
    1269             :   // -----------------------------------
    1270         168 :   Label stack_overflow;
    1271             : 
    1272             :   // Number of values to be pushed.
    1273         336 :   __ leal(rcx, Operand(rax, 1));  // Add one for receiver.
    1274             : 
    1275             :   // Add a stack check before pushing arguments.
    1276         168 :   Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow);
    1277             : 
    1278             :   // Pop return address to allow tail-call after pushing arguments.
    1279             :   __ PopReturnAddressTo(kScratchRegister);
    1280             : 
    1281             :   // Push "undefined" as the receiver arg if we need to.
    1282         168 :   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
    1283          56 :     __ PushRoot(RootIndex::kUndefinedValue);
    1284             :     __ decl(rcx);  // Subtract one for receiver.
    1285             :   }
    1286             : 
    1287             :   // rbx and rdx will be modified.
    1288         168 :   Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
    1289             : 
    1290         168 :   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    1291          56 :     __ Pop(rbx);                 // Pass the spread in a register
    1292             :     __ decl(rax);                // Subtract one for spread
    1293             :   }
    1294             : 
    1295             :   // Call the target.
    1296             :   __ PushReturnAddressFrom(kScratchRegister);  // Re-push return address.
    1297             : 
    1298         168 :   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    1299             :     __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
    1300          56 :             RelocInfo::CODE_TARGET);
    1301             :   } else {
    1302             :     __ Jump(masm->isolate()->builtins()->Call(receiver_mode),
    1303         112 :             RelocInfo::CODE_TARGET);
    1304             :   }
    1305             : 
    1306             :   // Throw stack overflow exception.
    1307         168 :   __ bind(&stack_overflow);
    1308             :   {
    1309         168 :     __ TailCallRuntime(Runtime::kThrowStackOverflow);
    1310             :     // This should be unreachable.
    1311         168 :     __ int3();
    1312             :   }
    1313         168 : }
    1314             : 
    1315             : // static
    1316         168 : void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
    1317             :     MacroAssembler* masm, InterpreterPushArgsMode mode) {
    1318             :   // ----------- S t a t e -------------
    1319             :   //  -- rax : the number of arguments (not including the receiver)
    1320             :   //  -- rdx : the new target (either the same as the constructor or
    1321             :   //           the JSFunction on which new was invoked initially)
    1322             :   //  -- rdi : the constructor to call (can be any Object)
    1323             :   //  -- rbx : the allocation site feedback if available, undefined otherwise
    1324             :   //  -- rcx : the address of the first argument to be pushed. Subsequent
    1325             :   //           arguments should be consecutive above this, in the same order as
    1326             :   //           they are to be pushed onto the stack.
    1327             :   // -----------------------------------
    1328         168 :   Label stack_overflow;
    1329             : 
    1330             :   // Add a stack check before pushing arguments.
    1331         168 :   Generate_StackOverflowCheck(masm, rax, r8, &stack_overflow);
    1332             : 
    1333             :   // Pop return address to allow tail-call after pushing arguments.
    1334             :   __ PopReturnAddressTo(kScratchRegister);
    1335             : 
    1336             :   // Push slot for the receiver to be constructed.
    1337         168 :   __ Push(Immediate(0));
    1338             : 
    1339             :   // rcx and r8 will be modified.
    1340         168 :   Generate_InterpreterPushArgs(masm, rax, rcx, r8);
    1341             : 
    1342         168 :   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    1343          56 :     __ Pop(rbx);                 // Pass the spread in a register
    1344          56 :     __ decl(rax);                // Subtract one for spread
    1345             : 
    1346             :     // Push return address in preparation for the tail-call.
    1347             :     __ PushReturnAddressFrom(kScratchRegister);
    1348             :   } else {
    1349             :     __ PushReturnAddressFrom(kScratchRegister);
    1350         112 :     __ AssertUndefinedOrAllocationSite(rbx);
    1351             :   }
    1352             : 
    1353         168 :   if (mode == InterpreterPushArgsMode::kArrayFunction) {
    1354             :     // Tail call to the array construct stub (still in the caller
    1355             :     // context at this point).
    1356          56 :     __ AssertFunction(rdi);
    1357             :     // Jump to the constructor function (rax, rbx, rdx passed on).
    1358         168 :     Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
    1359          56 :     __ Jump(code, RelocInfo::CODE_TARGET);
    1360         112 :   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    1361             :     // Call the constructor (rax, rdx, rdi passed on).
    1362             :     __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
    1363          56 :             RelocInfo::CODE_TARGET);
    1364             :   } else {
    1365             :     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
    1366             :     // Call the constructor (rax, rdx, rdi passed on).
    1367          56 :     __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
    1368             :   }
    1369             : 
    1370             :   // Throw stack overflow exception.
    1371         168 :   __ bind(&stack_overflow);
    1372             :   {
    1373         168 :     __ TailCallRuntime(Runtime::kThrowStackOverflow);
    1374             :     // This should be unreachable.
    1375         168 :     __ int3();
    1376             :   }
    1377         168 : }
    1378             : 
    1379         112 : static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
    1380             :   // Set the return address to the correct point in the interpreter entry
    1381             :   // trampoline.
    1382         112 :   Label builtin_trampoline, trampoline_loaded;
    1383             :   Smi interpreter_entry_return_pc_offset(
    1384         336 :       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
    1385             :   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
    1386             : 
    1387             :   Register decompr_scratch_for_debug =
    1388         112 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    1389             : 
    1390             :   // If the SFI function_data is an InterpreterData, the function will have a
    1391             :   // custom copy of the interpreter entry trampoline for profiling. If so,
    1392             :   // get the custom trampoline, otherwise grab the entry address of the global
    1393             :   // trampoline.
    1394         224 :   __ movp(rbx, Operand(rbp, StandardFrameConstants::kFunctionOffset));
    1395             :   __ LoadTaggedPointerField(
    1396             :       rbx, FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset),
    1397         112 :       decompr_scratch_for_debug);
    1398             :   __ LoadTaggedPointerField(
    1399             :       rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset),
    1400         112 :       decompr_scratch_for_debug);
    1401         112 :   __ CmpObjectType(rbx, INTERPRETER_DATA_TYPE, kScratchRegister);
    1402         112 :   __ j(not_equal, &builtin_trampoline, Label::kNear);
    1403             : 
    1404             :   __ movp(rbx,
    1405             :           FieldOperand(rbx, InterpreterData::kInterpreterTrampolineOffset));
    1406         112 :   __ addp(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
    1407         112 :   __ jmp(&trampoline_loaded, Label::kNear);
    1408             : 
    1409         112 :   __ bind(&builtin_trampoline);
    1410             :   // TODO(jgruber): Replace this by a lookup in the builtin entry table.
    1411             :   __ movp(rbx,
    1412             :           __ ExternalReferenceAsOperand(
    1413             :               ExternalReference::
    1414             :                   address_of_interpreter_entry_trampoline_instruction_start(
    1415             :                       masm->isolate()),
    1416         112 :               kScratchRegister));
    1417             : 
    1418         112 :   __ bind(&trampoline_loaded);
    1419         112 :   __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value()));
    1420         112 :   __ Push(rbx);
    1421             : 
    1422             :   // Initialize dispatch table register.
    1423             :   __ Move(
    1424             :       kInterpreterDispatchTableRegister,
    1425         112 :       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
    1426             : 
    1427             :   // Get the bytecode array pointer from the frame.
    1428             :   __ movp(kInterpreterBytecodeArrayRegister,
    1429         224 :           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    1430             : 
    1431         112 :   if (FLAG_debug_code) {
    1432             :     // Check function data field is actually a BytecodeArray object.
    1433           0 :     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
    1434             :     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
    1435           0 :                      rbx);
    1436             :     __ Assert(
    1437             :         equal,
    1438           0 :         AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
    1439             :   }
    1440             : 
    1441             :   // Get the target bytecode offset from the frame.
    1442             :   __ movp(kInterpreterBytecodeOffsetRegister,
    1443         224 :           Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
    1444             :   __ SmiUntag(kInterpreterBytecodeOffsetRegister,
    1445         112 :               kInterpreterBytecodeOffsetRegister);
    1446             : 
    1447             :   // Dispatch to the target bytecode.
    1448             :   __ movzxbp(r11, Operand(kInterpreterBytecodeArrayRegister,
    1449         224 :                           kInterpreterBytecodeOffsetRegister, times_1, 0));
    1450             :   __ movp(
    1451             :       kJavaScriptCallCodeStartRegister,
    1452         224 :       Operand(kInterpreterDispatchTableRegister, r11, times_pointer_size, 0));
    1453         112 :   __ jmp(kJavaScriptCallCodeStartRegister);
    1454         112 : }
    1455             : 
    1456          56 : void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
    1457             :   // Get bytecode array and bytecode offset from the stack frame.
    1458             :   __ movp(kInterpreterBytecodeArrayRegister,
    1459         112 :           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    1460             :   __ movp(kInterpreterBytecodeOffsetRegister,
    1461         112 :           Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
    1462             :   __ SmiUntag(kInterpreterBytecodeOffsetRegister,
    1463          56 :               kInterpreterBytecodeOffsetRegister);
    1464             : 
    1465             :   // Load the current bytecode.
    1466             :   __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
    1467         112 :                           kInterpreterBytecodeOffsetRegister, times_1, 0));
    1468             : 
    1469             :   // Advance to the next bytecode.
    1470          56 :   Label if_return;
    1471             :   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
    1472             :                                 kInterpreterBytecodeOffsetRegister, rbx, rcx,
    1473          56 :                                 &if_return);
    1474             : 
    1475             :   // Convert new bytecode offset to a Smi and save in the stackframe.
    1476          56 :   __ SmiTag(rbx, kInterpreterBytecodeOffsetRegister);
    1477         112 :   __ movp(Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp), rbx);
    1478             : 
    1479          56 :   Generate_InterpreterEnterBytecode(masm);
    1480             : 
    1481             :   // We should never take the if_return path.
    1482          56 :   __ bind(&if_return);
    1483          56 :   __ Abort(AbortReason::kInvalidBytecodeAdvance);
    1484          56 : }
    1485             : 
    1486          56 : void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
    1487          56 :   Generate_InterpreterEnterBytecode(masm);
    1488          56 : }
    1489             : 
    1490          56 : void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
    1491             :   // ----------- S t a t e -------------
    1492             :   //  -- rax : argument count (preserved for callee)
    1493             :   //  -- rdx : new target (preserved for callee)
    1494             :   //  -- rdi : target function (preserved for callee)
    1495             :   // -----------------------------------
    1496          56 :   Label failed;
    1497             :   {
    1498          56 :     FrameScope scope(masm, StackFrame::INTERNAL);
    1499             :     // Preserve argument count for later compare.
    1500          56 :     __ movp(rcx, rax);
    1501             :     // Push the number of arguments to the callee.
    1502          56 :     __ SmiTag(rax, rax);
    1503          56 :     __ Push(rax);
    1504             :     // Push a copy of the target function and the new target.
    1505          56 :     __ Push(rdi);
    1506          56 :     __ Push(rdx);
    1507             : 
    1508             :     // The function.
    1509          56 :     __ Push(rdi);
    1510             :     // Copy arguments from caller (stdlib, foreign, heap).
    1511          56 :     Label args_done;
    1512         280 :     for (int j = 0; j < 4; ++j) {
    1513         224 :       Label over;
    1514         224 :       if (j < 3) {
    1515         168 :         __ cmpp(rcx, Immediate(j));
    1516         168 :         __ j(not_equal, &over, Label::kNear);
    1517             :       }
    1518         560 :       for (int i = j - 1; i >= 0; --i) {
    1519             :         __ Push(Operand(rbp, StandardFrameConstants::kCallerSPOffset +
    1520         336 :                                  i * kSystemPointerSize));
    1521             :       }
    1522         336 :       for (int i = 0; i < 3 - j; ++i) {
    1523         336 :         __ PushRoot(RootIndex::kUndefinedValue);
    1524             :       }
    1525         224 :       if (j < 3) {
    1526         168 :         __ jmp(&args_done, Label::kNear);
    1527         168 :         __ bind(&over);
    1528             :       }
    1529             :     }
    1530          56 :     __ bind(&args_done);
    1531             : 
    1532             :     // Call runtime, on success unwind frame, and parent frame.
    1533             :     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
    1534             :     // A smi 0 is returned on failure, an object on success.
    1535          56 :     __ JumpIfSmi(rax, &failed, Label::kNear);
    1536             : 
    1537          56 :     __ Drop(2);
    1538          56 :     __ Pop(rcx);
    1539          56 :     __ SmiUntag(rcx, rcx);
    1540          56 :     scope.GenerateLeaveFrame();
    1541             : 
    1542             :     __ PopReturnAddressTo(rbx);
    1543             :     __ incp(rcx);
    1544         112 :     __ leap(rsp, Operand(rsp, rcx, times_pointer_size, 0));
    1545             :     __ PushReturnAddressFrom(rbx);
    1546          56 :     __ ret(0);
    1547             : 
    1548          56 :     __ bind(&failed);
    1549             :     // Restore target function and new target.
    1550          56 :     __ Pop(rdx);
    1551          56 :     __ Pop(rdi);
    1552          56 :     __ Pop(rax);
    1553          56 :     __ SmiUntag(rax, rax);
    1554             :   }
    1555             :   // On failure, tail call back to regular js by re-calling the function
    1556             :   // which has be reset to the compile lazy builtin.
    1557             :   Register decompr_scratch_for_debug =
    1558          56 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    1559             :   __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset),
    1560          56 :                             decompr_scratch_for_debug);
    1561          56 :   __ JumpCodeObject(rcx);
    1562          56 : }
    1563             : 
    1564             : namespace {
    1565         224 : void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
    1566             :                                       bool java_script_builtin,
    1567             :                                       bool with_result) {
    1568        2912 :   const RegisterConfiguration* config(RegisterConfiguration::Default());
    1569             :   int allocatable_register_count = config->num_allocatable_general_registers();
    1570         224 :   if (with_result) {
    1571             :     // Overwrite the hole inserted by the deoptimizer with the return value from
    1572             :     // the LAZY deopt point.
    1573             :     __ movq(
    1574             :         Operand(rsp, config->num_allocatable_general_registers() *
    1575             :                              kSystemPointerSize +
    1576             :                          BuiltinContinuationFrameConstants::kFixedFrameSize),
    1577         224 :         rax);
    1578             :   }
    1579        2912 :   for (int i = allocatable_register_count - 1; i >= 0; --i) {
    1580             :     int code = config->GetAllocatableGeneralCode(i);
    1581        2688 :     __ popq(Register::from_code(code));
    1582        2688 :     if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
    1583         112 :       __ SmiUntag(Register::from_code(code), Register::from_code(code));
    1584             :     }
    1585             :   }
    1586             :   __ movq(
    1587             :       rbp,
    1588         448 :       Operand(rsp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
    1589             :   const int offsetToPC =
    1590             :       BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp -
    1591             :       kSystemPointerSize;
    1592         224 :   __ popq(Operand(rsp, offsetToPC));
    1593         224 :   __ Drop(offsetToPC / kSystemPointerSize);
    1594         224 :   __ addq(Operand(rsp, 0), Immediate(Code::kHeaderSize - kHeapObjectTag));
    1595         224 :   __ Ret();
    1596         224 : }
    1597             : }  // namespace
    1598             : 
    1599          56 : void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
    1600          56 :   Generate_ContinueToBuiltinHelper(masm, false, false);
    1601          56 : }
    1602             : 
    1603          56 : void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
    1604             :     MacroAssembler* masm) {
    1605          56 :   Generate_ContinueToBuiltinHelper(masm, false, true);
    1606          56 : }
    1607             : 
    1608          56 : void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
    1609          56 :   Generate_ContinueToBuiltinHelper(masm, true, false);
    1610          56 : }
    1611             : 
    1612          56 : void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
    1613             :     MacroAssembler* masm) {
    1614          56 :   Generate_ContinueToBuiltinHelper(masm, true, true);
    1615          56 : }
    1616             : 
    1617          56 : void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
    1618             :   // Enter an internal frame.
    1619             :   {
    1620          56 :     FrameScope scope(masm, StackFrame::INTERNAL);
    1621          56 :     __ CallRuntime(Runtime::kNotifyDeoptimized);
    1622             :     // Tear down internal frame.
    1623             :   }
    1624             : 
    1625             :   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
    1626         112 :   __ movp(rax, Operand(rsp, kPCOnStackSize));
    1627          56 :   __ ret(1 * kSystemPointerSize);  // Remove rax.
    1628          56 : }
    1629             : 
    1630             : // static
    1631          56 : void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
    1632             :   // ----------- S t a t e -------------
    1633             :   //  -- rax     : argc
    1634             :   //  -- rsp[0]  : return address
    1635             :   //  -- rsp[8]  : argArray
    1636             :   //  -- rsp[16] : thisArg
    1637             :   //  -- rsp[24] : receiver
    1638             :   // -----------------------------------
    1639             : 
    1640             :   // 1. Load receiver into rdi, argArray into rbx (if present), remove all
    1641             :   // arguments from the stack (including the receiver), and push thisArg (if
    1642             :   // present) instead.
    1643             :   {
    1644          56 :     Label no_arg_array, no_this_arg;
    1645             :     StackArgumentsAccessor args(rsp, rax);
    1646          56 :     __ LoadRoot(rdx, RootIndex::kUndefinedValue);
    1647          56 :     __ movp(rbx, rdx);
    1648             :     __ movp(rdi, args.GetReceiverOperand());
    1649             :     __ testp(rax, rax);
    1650          56 :     __ j(zero, &no_this_arg, Label::kNear);
    1651             :     {
    1652          56 :       __ movp(rdx, args.GetArgumentOperand(1));
    1653          56 :       __ cmpp(rax, Immediate(1));
    1654          56 :       __ j(equal, &no_arg_array, Label::kNear);
    1655          56 :       __ movp(rbx, args.GetArgumentOperand(2));
    1656          56 :       __ bind(&no_arg_array);
    1657             :     }
    1658          56 :     __ bind(&no_this_arg);
    1659             :     __ PopReturnAddressTo(rcx);
    1660         112 :     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kSystemPointerSize));
    1661          56 :     __ Push(rdx);
    1662             :     __ PushReturnAddressFrom(rcx);
    1663             :   }
    1664             : 
    1665             :   // ----------- S t a t e -------------
    1666             :   //  -- rbx     : argArray
    1667             :   //  -- rdi     : receiver
    1668             :   //  -- rsp[0]  : return address
    1669             :   //  -- rsp[8]  : thisArg
    1670             :   // -----------------------------------
    1671             : 
    1672             :   // 2. We don't need to check explicitly for callable receiver here,
    1673             :   // since that's the first thing the Call/CallWithArrayLike builtins
    1674             :   // will do.
    1675             : 
    1676             :   // 3. Tail call with no arguments if argArray is null or undefined.
    1677          56 :   Label no_arguments;
    1678             :   __ JumpIfRoot(rbx, RootIndex::kNullValue, &no_arguments, Label::kNear);
    1679             :   __ JumpIfRoot(rbx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
    1680             : 
    1681             :   // 4a. Apply the receiver to the given argArray.
    1682             :   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
    1683         112 :           RelocInfo::CODE_TARGET);
    1684             : 
    1685             :   // 4b. The argArray is either null or undefined, so we tail call without any
    1686             :   // arguments to the receiver. Since we did not create a frame for
    1687             :   // Function.prototype.apply() yet, we use a normal Call builtin here.
    1688          56 :   __ bind(&no_arguments);
    1689             :   {
    1690          56 :     __ Set(rax, 0);
    1691          56 :     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
    1692             :   }
    1693          56 : }
    1694             : 
    1695             : // static
    1696          56 : void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
    1697             :   // Stack Layout:
    1698             :   // rsp[0]           : Return address
    1699             :   // rsp[8]           : Argument n
    1700             :   // rsp[16]          : Argument n-1
    1701             :   //  ...
    1702             :   // rsp[8 * n]       : Argument 1
    1703             :   // rsp[8 * (n + 1)] : Receiver (callable to call)
    1704             :   //
    1705             :   // rax contains the number of arguments, n, not counting the receiver.
    1706             :   //
    1707             :   // 1. Make sure we have at least one argument.
    1708             :   {
    1709          56 :     Label done;
    1710          56 :     __ testp(rax, rax);
    1711          56 :     __ j(not_zero, &done, Label::kNear);
    1712             :     __ PopReturnAddressTo(rbx);
    1713          56 :     __ PushRoot(RootIndex::kUndefinedValue);
    1714             :     __ PushReturnAddressFrom(rbx);
    1715             :     __ incp(rax);
    1716          56 :     __ bind(&done);
    1717             :   }
    1718             : 
    1719             :   // 2. Get the callable to call (passed as receiver) from the stack.
    1720             :   {
    1721             :     StackArgumentsAccessor args(rsp, rax);
    1722             :     __ movp(rdi, args.GetReceiverOperand());
    1723             :   }
    1724             : 
    1725             :   // 3. Shift arguments and return address one slot down on the stack
    1726             :   //    (overwriting the original receiver).  Adjust argument count to make
    1727             :   //    the original first argument the new receiver.
    1728             :   {
    1729          56 :     Label loop;
    1730             :     __ movp(rcx, rax);
    1731             :     StackArgumentsAccessor args(rsp, rcx);
    1732          56 :     __ bind(&loop);
    1733          56 :     __ movp(rbx, args.GetArgumentOperand(1));
    1734          56 :     __ movp(args.GetArgumentOperand(0), rbx);
    1735             :     __ decp(rcx);
    1736          56 :     __ j(not_zero, &loop);              // While non-zero.
    1737          56 :     __ DropUnderReturnAddress(1, rbx);  // Drop one slot under return address.
    1738             :     __ decp(rax);  // One fewer argument (first argument is new receiver).
    1739             :   }
    1740             : 
    1741             :   // 4. Call the callable.
    1742             :   // Since we did not create a frame for Function.prototype.call() yet,
    1743             :   // we use a normal Call builtin here.
    1744          56 :   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
    1745          56 : }
    1746             : 
    1747          56 : void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
    1748             :   // ----------- S t a t e -------------
    1749             :   //  -- rax     : argc
    1750             :   //  -- rsp[0]  : return address
    1751             :   //  -- rsp[8]  : argumentsList
    1752             :   //  -- rsp[16] : thisArgument
    1753             :   //  -- rsp[24] : target
    1754             :   //  -- rsp[32] : receiver
    1755             :   // -----------------------------------
    1756             : 
    1757             :   // 1. Load target into rdi (if present), argumentsList into rbx (if present),
    1758             :   // remove all arguments from the stack (including the receiver), and push
    1759             :   // thisArgument (if present) instead.
    1760             :   {
    1761          56 :     Label done;
    1762             :     StackArgumentsAccessor args(rsp, rax);
    1763          56 :     __ LoadRoot(rdi, RootIndex::kUndefinedValue);
    1764          56 :     __ movp(rdx, rdi);
    1765             :     __ movp(rbx, rdi);
    1766          56 :     __ cmpp(rax, Immediate(1));
    1767          56 :     __ j(below, &done, Label::kNear);
    1768          56 :     __ movp(rdi, args.GetArgumentOperand(1));  // target
    1769          56 :     __ j(equal, &done, Label::kNear);
    1770          56 :     __ movp(rdx, args.GetArgumentOperand(2));  // thisArgument
    1771          56 :     __ cmpp(rax, Immediate(3));
    1772          56 :     __ j(below, &done, Label::kNear);
    1773          56 :     __ movp(rbx, args.GetArgumentOperand(3));  // argumentsList
    1774          56 :     __ bind(&done);
    1775             :     __ PopReturnAddressTo(rcx);
    1776         112 :     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kSystemPointerSize));
    1777          56 :     __ Push(rdx);
    1778             :     __ PushReturnAddressFrom(rcx);
    1779             :   }
    1780             : 
    1781             :   // ----------- S t a t e -------------
    1782             :   //  -- rbx     : argumentsList
    1783             :   //  -- rdi     : target
    1784             :   //  -- rsp[0]  : return address
    1785             :   //  -- rsp[8]  : thisArgument
    1786             :   // -----------------------------------
    1787             : 
    1788             :   // 2. We don't need to check explicitly for callable target here,
    1789             :   // since that's the first thing the Call/CallWithArrayLike builtins
    1790             :   // will do.
    1791             : 
    1792             :   // 3. Apply the target to the given argumentsList.
    1793             :   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
    1794          56 :           RelocInfo::CODE_TARGET);
    1795          56 : }
    1796             : 
    1797          56 : void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
    1798             :   // ----------- S t a t e -------------
    1799             :   //  -- rax     : argc
    1800             :   //  -- rsp[0]  : return address
    1801             :   //  -- rsp[8]  : new.target (optional)
    1802             :   //  -- rsp[16] : argumentsList
    1803             :   //  -- rsp[24] : target
    1804             :   //  -- rsp[32] : receiver
    1805             :   // -----------------------------------
    1806             : 
    1807             :   // 1. Load target into rdi (if present), argumentsList into rbx (if present),
    1808             :   // new.target into rdx (if present, otherwise use target), remove all
    1809             :   // arguments from the stack (including the receiver), and push thisArgument
    1810             :   // (if present) instead.
    1811             :   {
    1812          56 :     Label done;
    1813             :     StackArgumentsAccessor args(rsp, rax);
    1814          56 :     __ LoadRoot(rdi, RootIndex::kUndefinedValue);
    1815          56 :     __ movp(rdx, rdi);
    1816             :     __ movp(rbx, rdi);
    1817          56 :     __ cmpp(rax, Immediate(1));
    1818          56 :     __ j(below, &done, Label::kNear);
    1819          56 :     __ movp(rdi, args.GetArgumentOperand(1));  // target
    1820             :     __ movp(rdx, rdi);                         // new.target defaults to target
    1821          56 :     __ j(equal, &done, Label::kNear);
    1822          56 :     __ movp(rbx, args.GetArgumentOperand(2));  // argumentsList
    1823          56 :     __ cmpp(rax, Immediate(3));
    1824          56 :     __ j(below, &done, Label::kNear);
    1825          56 :     __ movp(rdx, args.GetArgumentOperand(3));  // new.target
    1826          56 :     __ bind(&done);
    1827             :     __ PopReturnAddressTo(rcx);
    1828         112 :     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kSystemPointerSize));
    1829          56 :     __ PushRoot(RootIndex::kUndefinedValue);
    1830             :     __ PushReturnAddressFrom(rcx);
    1831             :   }
    1832             : 
    1833             :   // ----------- S t a t e -------------
    1834             :   //  -- rbx     : argumentsList
    1835             :   //  -- rdx     : new.target
    1836             :   //  -- rdi     : target
    1837             :   //  -- rsp[0]  : return address
    1838             :   //  -- rsp[8]  : receiver (undefined)
    1839             :   // -----------------------------------
    1840             : 
    1841             :   // 2. We don't need to check explicitly for constructor target here,
    1842             :   // since that's the first thing the Construct/ConstructWithArrayLike
    1843             :   // builtins will do.
    1844             : 
    1845             :   // 3. We don't need to check explicitly for constructor new.target here,
    1846             :   // since that's the second thing the Construct/ConstructWithArrayLike
    1847             :   // builtins will do.
    1848             : 
    1849             :   // 4. Construct the target with the given new.target and argumentsList.
    1850             :   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
    1851          56 :           RelocInfo::CODE_TARGET);
    1852          56 : }
    1853             : 
    1854          56 : void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
    1855             :   // ----------- S t a t e -------------
    1856             :   //  -- rax    : argc
    1857             :   //  -- rsp[0] : return address
    1858             :   //  -- rsp[8] : last argument
    1859             :   // -----------------------------------
    1860             :   Label generic_array_code;
    1861             : 
    1862          56 :   if (FLAG_debug_code) {
    1863             :     Register decompr_scratch_for_debug =
    1864           0 :         COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    1865             :     // Initial map for the builtin InternalArray functions should be maps.
    1866             :     __ LoadTaggedPointerField(
    1867             :         rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset),
    1868           0 :         decompr_scratch_for_debug);
    1869             :     // Will both indicate a nullptr and a Smi.
    1870             :     STATIC_ASSERT(kSmiTag == 0);
    1871           0 :     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
    1872             :     __ Check(not_smi,
    1873           0 :              AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
    1874           0 :     __ CmpObjectType(rbx, MAP_TYPE, rcx);
    1875           0 :     __ Check(equal, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
    1876             :   }
    1877             : 
    1878             :   // Run the native code for the InternalArray function called as a normal
    1879             :   // function.
    1880             :   __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
    1881          56 :           RelocInfo::CODE_TARGET);
    1882          56 : }
    1883             : 
    1884         112 : static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
    1885         112 :   __ pushq(rbp);
    1886             :   __ movp(rbp, rsp);
    1887             : 
    1888             :   // Store the arguments adaptor context sentinel.
    1889         112 :   __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
    1890             : 
    1891             :   // Push the function on the stack.
    1892         112 :   __ Push(rdi);
    1893             : 
    1894             :   // Preserve the number of arguments on the stack. Must preserve rax,
    1895             :   // rbx and rcx because these registers are used when copying the
    1896             :   // arguments and the receiver.
    1897         112 :   __ SmiTag(r8, rax);
    1898         112 :   __ Push(r8);
    1899             : 
    1900         112 :   __ Push(Immediate(0));  // Padding.
    1901         112 : }
    1902             : 
    1903          56 : static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
    1904             :   // Retrieve the number of arguments from the stack. Number is a Smi.
    1905         112 :   __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
    1906             : 
    1907             :   // Leave the frame.
    1908             :   __ movp(rsp, rbp);
    1909          56 :   __ popq(rbp);
    1910             : 
    1911             :   // Remove caller arguments from the stack.
    1912             :   __ PopReturnAddressTo(rcx);
    1913          56 :   SmiIndex index = masm->SmiToIndex(rbx, rbx, kSystemPointerSizeLog2);
    1914         112 :   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kSystemPointerSize));
    1915             :   __ PushReturnAddressFrom(rcx);
    1916          56 : }
    1917             : 
    1918          56 : void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
    1919             :   // ----------- S t a t e -------------
    1920             :   //  -- rax : actual number of arguments
    1921             :   //  -- rbx : expected number of arguments
    1922             :   //  -- rdx : new target (passed through to callee)
    1923             :   //  -- rdi : function (passed through to callee)
    1924             :   // -----------------------------------
    1925             : 
    1926             :   Register decompr_scratch_for_debug =
    1927          56 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    1928             : 
    1929          56 :   Label invoke, dont_adapt_arguments, stack_overflow, enough, too_few;
    1930          56 :   __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
    1931          56 :   __ j(equal, &dont_adapt_arguments);
    1932          56 :   __ cmpp(rax, rbx);
    1933          56 :   __ j(less, &too_few);
    1934             : 
    1935             :   {  // Enough parameters: Actual >= expected.
    1936          56 :     __ bind(&enough);
    1937          56 :     EnterArgumentsAdaptorFrame(masm);
    1938             :     // The registers rcx and r8 will be modified. The register rbx is only read.
    1939          56 :     Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
    1940             : 
    1941             :     // Copy receiver and all expected arguments.
    1942             :     const int offset = StandardFrameConstants::kCallerSPOffset;
    1943         112 :     __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
    1944          56 :     __ Set(r8, -1);  // account for receiver
    1945             : 
    1946          56 :     Label copy;
    1947          56 :     __ bind(&copy);
    1948             :     __ incp(r8);
    1949          56 :     __ Push(Operand(rax, 0));
    1950          56 :     __ subp(rax, Immediate(kSystemPointerSize));
    1951          56 :     __ cmpp(r8, rbx);
    1952          56 :     __ j(less, &copy);
    1953          56 :     __ jmp(&invoke);
    1954             :   }
    1955             : 
    1956             :   {  // Too few parameters: Actual < expected.
    1957          56 :     __ bind(&too_few);
    1958             : 
    1959          56 :     EnterArgumentsAdaptorFrame(masm);
    1960             :     // The registers rcx and r8 will be modified. The register rbx is only read.
    1961          56 :     Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
    1962             : 
    1963             :     // Copy receiver and all actual arguments.
    1964             :     const int offset = StandardFrameConstants::kCallerSPOffset;
    1965         112 :     __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
    1966          56 :     __ Set(r8, -1);  // account for receiver
    1967             : 
    1968          56 :     Label copy;
    1969          56 :     __ bind(&copy);
    1970             :     __ incp(r8);
    1971          56 :     __ Push(Operand(rdi, 0));
    1972          56 :     __ subp(rdi, Immediate(kSystemPointerSize));
    1973          56 :     __ cmpp(r8, rax);
    1974          56 :     __ j(less, &copy);
    1975             : 
    1976             :     // Fill remaining expected arguments with undefined values.
    1977          56 :     Label fill;
    1978          56 :     __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
    1979          56 :     __ bind(&fill);
    1980             :     __ incp(r8);
    1981          56 :     __ Push(kScratchRegister);
    1982          56 :     __ cmpp(r8, rbx);
    1983          56 :     __ j(less, &fill);
    1984             : 
    1985             :     // Restore function pointer.
    1986         112 :     __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
    1987             :   }
    1988             : 
    1989             :   // Call the entry point.
    1990          56 :   __ bind(&invoke);
    1991             :   __ movp(rax, rbx);
    1992             :   // rax : expected number of arguments
    1993             :   // rdx : new target (passed through to callee)
    1994             :   // rdi : function (passed through to callee)
    1995             :   static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
    1996             :   __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset),
    1997          56 :                             decompr_scratch_for_debug);
    1998          56 :   __ CallCodeObject(rcx);
    1999             : 
    2000             :   // Store offset of return address for deoptimizer.
    2001         112 :   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
    2002             : 
    2003             :   // Leave frame and return.
    2004          56 :   LeaveArgumentsAdaptorFrame(masm);
    2005          56 :   __ ret(0);
    2006             : 
    2007             :   // -------------------------------------------
    2008             :   // Dont adapt arguments.
    2009             :   // -------------------------------------------
    2010          56 :   __ bind(&dont_adapt_arguments);
    2011             :   static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
    2012             :   __ LoadTaggedPointerField(rcx, FieldOperand(rdi, JSFunction::kCodeOffset),
    2013          56 :                             decompr_scratch_for_debug);
    2014          56 :   __ JumpCodeObject(rcx);
    2015             : 
    2016          56 :   __ bind(&stack_overflow);
    2017             :   {
    2018          56 :     FrameScope frame(masm, StackFrame::MANUAL);
    2019          56 :     __ CallRuntime(Runtime::kThrowStackOverflow);
    2020          56 :     __ int3();
    2021             :   }
    2022          56 : }
    2023             : 
    2024             : // static
    2025         112 : void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
    2026             :                                                Handle<Code> code) {
    2027             :   // ----------- S t a t e -------------
    2028             :   //  -- rdi    : target
    2029             :   //  -- rax    : number of parameters on the stack (not including the receiver)
    2030             :   //  -- rbx    : arguments list (a FixedArray)
    2031             :   //  -- rcx    : len (number of elements to push from args)
    2032             :   //  -- rdx    : new.target (for [[Construct]])
    2033             :   //  -- rsp[0] : return address
    2034             :   // -----------------------------------
    2035             :   Register scratch = r11;
    2036         112 :   Register decompr_scratch = COMPRESS_POINTERS_BOOL ? r12 : no_reg;
    2037             :   Register decompr_scratch_for_debug =
    2038         112 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    2039             : 
    2040         112 :   if (masm->emit_debug_code()) {
    2041             :     // Allow rbx to be a FixedArray, or a FixedDoubleArray if rcx == 0.
    2042           0 :     Label ok, fail;
    2043           0 :     __ AssertNotSmi(rbx);
    2044           0 :     Register map = r9;
    2045             :     __ LoadTaggedPointerField(map, FieldOperand(rbx, HeapObject::kMapOffset),
    2046           0 :                               decompr_scratch_for_debug);
    2047           0 :     __ CmpInstanceType(map, FIXED_ARRAY_TYPE);
    2048           0 :     __ j(equal, &ok);
    2049           0 :     __ CmpInstanceType(map, FIXED_DOUBLE_ARRAY_TYPE);
    2050           0 :     __ j(not_equal, &fail);
    2051           0 :     __ cmpl(rcx, Immediate(0));
    2052           0 :     __ j(equal, &ok);
    2053             :     // Fall through.
    2054           0 :     __ bind(&fail);
    2055           0 :     __ Abort(AbortReason::kOperandIsNotAFixedArray);
    2056             : 
    2057           0 :     __ bind(&ok);
    2058             :   }
    2059             : 
    2060         112 :   Label stack_overflow;
    2061         112 :   Generate_StackOverflowCheck(masm, rcx, r8, &stack_overflow, Label::kNear);
    2062             : 
    2063             :   // Push additional arguments onto the stack.
    2064             :   {
    2065         112 :     Register value = scratch;
    2066             :     __ PopReturnAddressTo(r8);
    2067         112 :     __ Set(r9, 0);
    2068         112 :     Label done, push, loop;
    2069         112 :     __ bind(&loop);
    2070         112 :     __ cmpl(r9, rcx);
    2071         112 :     __ j(equal, &done, Label::kNear);
    2072             :     // Turn the hole into undefined as we go.
    2073             :     __ LoadAnyTaggedField(
    2074             :         value,
    2075             :         FieldOperand(rbx, r9, times_tagged_size, FixedArray::kHeaderSize),
    2076         112 :         decompr_scratch, decompr_scratch_for_debug);
    2077         112 :     __ CompareRoot(value, RootIndex::kTheHoleValue);
    2078         112 :     __ j(not_equal, &push, Label::kNear);
    2079         112 :     __ LoadRoot(value, RootIndex::kUndefinedValue);
    2080         112 :     __ bind(&push);
    2081         112 :     __ Push(value);
    2082             :     __ incl(r9);
    2083         112 :     __ jmp(&loop);
    2084         112 :     __ bind(&done);
    2085             :     __ PushReturnAddressFrom(r8);
    2086         112 :     __ addq(rax, r9);
    2087             :   }
    2088             : 
    2089             :   // Tail-call to the actual Call or Construct builtin.
    2090         112 :   __ Jump(code, RelocInfo::CODE_TARGET);
    2091             : 
    2092         112 :   __ bind(&stack_overflow);
    2093         112 :   __ TailCallRuntime(Runtime::kThrowStackOverflow);
    2094         112 : }
    2095             : 
    2096             : // static
    2097         224 : void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
    2098             :                                                       CallOrConstructMode mode,
    2099             :                                                       Handle<Code> code) {
    2100             :   // ----------- S t a t e -------------
    2101             :   //  -- rax : the number of arguments (not including the receiver)
    2102             :   //  -- rdx : the new target (for [[Construct]] calls)
    2103             :   //  -- rdi : the target to call (can be any Object)
    2104             :   //  -- rcx : start index (to support rest parameters)
    2105             :   // -----------------------------------
    2106             : 
    2107             :   Register decompr_scratch_for_debug =
    2108         224 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    2109             : 
    2110             :   // Check if new.target has a [[Construct]] internal method.
    2111         224 :   if (mode == CallOrConstructMode::kConstruct) {
    2112         112 :     Label new_target_constructor, new_target_not_constructor;
    2113         112 :     __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
    2114             :     __ LoadTaggedPointerField(rbx, FieldOperand(rdx, HeapObject::kMapOffset),
    2115         112 :                               decompr_scratch_for_debug);
    2116             :     __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
    2117         112 :              Immediate(Map::IsConstructorBit::kMask));
    2118         112 :     __ j(not_zero, &new_target_constructor, Label::kNear);
    2119         112 :     __ bind(&new_target_not_constructor);
    2120             :     {
    2121         112 :       FrameScope scope(masm, StackFrame::MANUAL);
    2122         112 :       __ EnterFrame(StackFrame::INTERNAL);
    2123         112 :       __ Push(rdx);
    2124         112 :       __ CallRuntime(Runtime::kThrowNotConstructor);
    2125             :     }
    2126         112 :     __ bind(&new_target_constructor);
    2127             :   }
    2128             : 
    2129             :   // Check if we have an arguments adaptor frame below the function frame.
    2130         224 :   Label arguments_adaptor, arguments_done;
    2131         448 :   __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
    2132             :   __ cmpp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
    2133         224 :           Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
    2134         224 :   __ j(equal, &arguments_adaptor, Label::kNear);
    2135             :   {
    2136         448 :     __ movp(r8, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
    2137             :     __ LoadTaggedPointerField(
    2138             :         r8, FieldOperand(r8, JSFunction::kSharedFunctionInfoOffset),
    2139         224 :         decompr_scratch_for_debug);
    2140             :     __ movzxwq(
    2141             :         r8, FieldOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
    2142             :     __ movp(rbx, rbp);
    2143             :   }
    2144         224 :   __ jmp(&arguments_done, Label::kNear);
    2145         224 :   __ bind(&arguments_adaptor);
    2146             :   {
    2147             :     __ SmiUntag(r8,
    2148         224 :                 Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
    2149             :   }
    2150         224 :   __ bind(&arguments_done);
    2151             : 
    2152         224 :   Label stack_done, stack_overflow;
    2153         224 :   __ subl(r8, rcx);
    2154         224 :   __ j(less_equal, &stack_done);
    2155             :   {
    2156             :     // Check for stack overflow.
    2157         224 :     Generate_StackOverflowCheck(masm, r8, rcx, &stack_overflow, Label::kNear);
    2158             : 
    2159             :     // Forward the arguments from the caller frame.
    2160             :     {
    2161         224 :       Label loop;
    2162         224 :       __ addl(rax, r8);
    2163             :       __ PopReturnAddressTo(rcx);
    2164         224 :       __ bind(&loop);
    2165             :       {
    2166             :         StackArgumentsAccessor args(rbx, r8, ARGUMENTS_DONT_CONTAIN_RECEIVER);
    2167         224 :         __ Push(args.GetArgumentOperand(0));
    2168             :         __ decl(r8);
    2169         224 :         __ j(not_zero, &loop);
    2170             :       }
    2171             :       __ PushReturnAddressFrom(rcx);
    2172             :     }
    2173             :   }
    2174         224 :   __ jmp(&stack_done, Label::kNear);
    2175         224 :   __ bind(&stack_overflow);
    2176         224 :   __ TailCallRuntime(Runtime::kThrowStackOverflow);
    2177         224 :   __ bind(&stack_done);
    2178             : 
    2179             :   // Tail-call to the {code} handler.
    2180         224 :   __ Jump(code, RelocInfo::CODE_TARGET);
    2181         224 : }
    2182             : 
    2183             : // static
    2184         168 : void Builtins::Generate_CallFunction(MacroAssembler* masm,
    2185             :                                      ConvertReceiverMode mode) {
    2186             :   // ----------- S t a t e -------------
    2187             :   //  -- rax : the number of arguments (not including the receiver)
    2188             :   //  -- rdi : the function to call (checked to be a JSFunction)
    2189             :   // -----------------------------------
    2190             :   Register decompr_scratch_for_debug =
    2191         168 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    2192             : 
    2193             :   StackArgumentsAccessor args(rsp, rax);
    2194         168 :   __ AssertFunction(rdi);
    2195             : 
    2196             :   // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
    2197             :   // Check that the function is not a "classConstructor".
    2198         168 :   Label class_constructor;
    2199             :   __ LoadTaggedPointerField(
    2200             :       rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset),
    2201         168 :       decompr_scratch_for_debug);
    2202             :   __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
    2203         168 :            Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask));
    2204         168 :   __ j(not_zero, &class_constructor);
    2205             : 
    2206             :   // ----------- S t a t e -------------
    2207             :   //  -- rax : the number of arguments (not including the receiver)
    2208             :   //  -- rdx : the shared function info.
    2209             :   //  -- rdi : the function to call (checked to be a JSFunction)
    2210             :   // -----------------------------------
    2211             : 
    2212             :   // Enter the context of the function; ToObject has to run in the function
    2213             :   // context, and we also need to take the global proxy from the function
    2214             :   // context in case of conversion.
    2215             :   __ LoadTaggedPointerField(rsi, FieldOperand(rdi, JSFunction::kContextOffset),
    2216         168 :                             decompr_scratch_for_debug);
    2217             :   // We need to convert the receiver for non-native sloppy mode functions.
    2218         168 :   Label done_convert;
    2219             :   __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
    2220             :            Immediate(SharedFunctionInfo::IsNativeBit::kMask |
    2221             :                      SharedFunctionInfo::IsStrictBit::kMask));
    2222         168 :   __ j(not_zero, &done_convert);
    2223             :   {
    2224             :     // ----------- S t a t e -------------
    2225             :     //  -- rax : the number of arguments (not including the receiver)
    2226             :     //  -- rdx : the shared function info.
    2227             :     //  -- rdi : the function to call (checked to be a JSFunction)
    2228             :     //  -- rsi : the function context.
    2229             :     // -----------------------------------
    2230             : 
    2231         168 :     if (mode == ConvertReceiverMode::kNullOrUndefined) {
    2232             :       // Patch receiver to global proxy.
    2233             :       __ LoadGlobalProxy(rcx);
    2234             :     } else {
    2235         112 :       Label convert_to_object, convert_receiver;
    2236             :       __ movp(rcx, args.GetReceiverOperand());
    2237         112 :       __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
    2238             :       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
    2239         112 :       __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
    2240         112 :       __ j(above_equal, &done_convert);
    2241         112 :       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
    2242          56 :         Label convert_global_proxy;
    2243             :         __ JumpIfRoot(rcx, RootIndex::kUndefinedValue, &convert_global_proxy,
    2244             :                       Label::kNear);
    2245             :         __ JumpIfNotRoot(rcx, RootIndex::kNullValue, &convert_to_object,
    2246             :                          Label::kNear);
    2247          56 :         __ bind(&convert_global_proxy);
    2248             :         {
    2249             :           // Patch receiver to global proxy.
    2250             :           __ LoadGlobalProxy(rcx);
    2251             :         }
    2252          56 :         __ jmp(&convert_receiver);
    2253             :       }
    2254         112 :       __ bind(&convert_to_object);
    2255             :       {
    2256             :         // Convert receiver using ToObject.
    2257             :         // TODO(bmeurer): Inline the allocation here to avoid building the frame
    2258             :         // in the fast case? (fall back to AllocateInNewSpace?)
    2259         112 :         FrameScope scope(masm, StackFrame::INTERNAL);
    2260         112 :         __ SmiTag(rax, rax);
    2261         112 :         __ Push(rax);
    2262         112 :         __ Push(rdi);
    2263             :         __ movp(rax, rcx);
    2264         112 :         __ Push(rsi);
    2265             :         __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
    2266         112 :                 RelocInfo::CODE_TARGET);
    2267         112 :         __ Pop(rsi);
    2268             :         __ movp(rcx, rax);
    2269         112 :         __ Pop(rdi);
    2270         112 :         __ Pop(rax);
    2271         112 :         __ SmiUntag(rax, rax);
    2272             :       }
    2273             :       __ LoadTaggedPointerField(
    2274             :           rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset),
    2275         112 :           decompr_scratch_for_debug);
    2276         112 :       __ bind(&convert_receiver);
    2277             :     }
    2278             :     __ movp(args.GetReceiverOperand(), rcx);
    2279             :   }
    2280         168 :   __ bind(&done_convert);
    2281             : 
    2282             :   // ----------- S t a t e -------------
    2283             :   //  -- rax : the number of arguments (not including the receiver)
    2284             :   //  -- rdx : the shared function info.
    2285             :   //  -- rdi : the function to call (checked to be a JSFunction)
    2286             :   //  -- rsi : the function context.
    2287             :   // -----------------------------------
    2288             : 
    2289             :   __ movzxwq(
    2290             :       rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
    2291             :   ParameterCount actual(rax);
    2292             :   ParameterCount expected(rbx);
    2293             : 
    2294         168 :   __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION);
    2295             : 
    2296             :   // The function is a "classConstructor", need to raise an exception.
    2297         168 :   __ bind(&class_constructor);
    2298             :   {
    2299         168 :     FrameScope frame(masm, StackFrame::INTERNAL);
    2300         168 :     __ Push(rdi);
    2301         168 :     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
    2302             :   }
    2303         168 : }
    2304             : 
    2305             : namespace {
    2306             : 
    2307         112 : void Generate_PushBoundArguments(MacroAssembler* masm) {
    2308             :   // ----------- S t a t e -------------
    2309             :   //  -- rax : the number of arguments (not including the receiver)
    2310             :   //  -- rdx : new.target (only in case of [[Construct]])
    2311             :   //  -- rdi : target (checked to be a JSBoundFunction)
    2312             :   // -----------------------------------
    2313             : 
    2314         112 :   Register decompr_scratch = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
    2315             :   Register decompr_scratch_for_debug =
    2316         112 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    2317             : 
    2318             :   // Load [[BoundArguments]] into rcx and length of that into rbx.
    2319         112 :   Label no_bound_arguments;
    2320             :   __ LoadTaggedPointerField(
    2321             :       rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset),
    2322         112 :       decompr_scratch_for_debug);
    2323         112 :   __ SmiUntagField(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
    2324         112 :   __ testl(rbx, rbx);
    2325         112 :   __ j(zero, &no_bound_arguments);
    2326             :   {
    2327             :     // ----------- S t a t e -------------
    2328             :     //  -- rax : the number of arguments (not including the receiver)
    2329             :     //  -- rdx : new.target (only in case of [[Construct]])
    2330             :     //  -- rdi : target (checked to be a JSBoundFunction)
    2331             :     //  -- rcx : the [[BoundArguments]] (implemented as FixedArray)
    2332             :     //  -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
    2333             :     // -----------------------------------
    2334             : 
    2335             :     // Reserve stack space for the [[BoundArguments]].
    2336             :     {
    2337         112 :       Label done;
    2338         224 :       __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
    2339         112 :       __ subp(rsp, kScratchRegister);
    2340             :       // Check the stack for overflow. We are not trying to catch interruptions
    2341             :       // (i.e. debug break and preemption) here, so check the "real stack
    2342             :       // limit".
    2343         112 :       __ CompareRoot(rsp, RootIndex::kRealStackLimit);
    2344         112 :       __ j(above_equal, &done, Label::kNear);
    2345             :       // Restore the stack pointer.
    2346         224 :       __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
    2347             :       {
    2348         112 :         FrameScope scope(masm, StackFrame::MANUAL);
    2349         112 :         __ EnterFrame(StackFrame::INTERNAL);
    2350         112 :         __ CallRuntime(Runtime::kThrowStackOverflow);
    2351             :       }
    2352         112 :       __ bind(&done);
    2353             :     }
    2354             : 
    2355             :     // Adjust effective number of arguments to include return address.
    2356             :     __ incl(rax);
    2357             : 
    2358             :     // Relocate arguments and return address down the stack.
    2359             :     {
    2360         112 :       Label loop;
    2361         112 :       __ Set(rcx, 0);
    2362         224 :       __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
    2363         112 :       __ bind(&loop);
    2364         224 :       __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
    2365         224 :       __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
    2366             :       __ incl(rcx);
    2367         112 :       __ cmpl(rcx, rax);
    2368         112 :       __ j(less, &loop);
    2369             :     }
    2370             : 
    2371             :     // Copy [[BoundArguments]] to the stack (below the arguments).
    2372             :     {
    2373         112 :       Label loop;
    2374             :       __ LoadTaggedPointerField(
    2375             :           rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset),
    2376         112 :           decompr_scratch_for_debug);
    2377         112 :       __ SmiUntagField(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
    2378         112 :       __ bind(&loop);
    2379             :       // Instead of doing decl(rbx) here subtract kTaggedSize from the header
    2380             :       // offset in order to move be able to move decl(rbx) right before the loop
    2381             :       // condition. This is necessary in order to avoid flags corruption by
    2382             :       // pointer decompression code.
    2383             :       __ LoadAnyTaggedField(r12,
    2384             :                             FieldOperand(rcx, rbx, times_tagged_size,
    2385             :                                          FixedArray::kHeaderSize - kTaggedSize),
    2386         112 :                             decompr_scratch, decompr_scratch_for_debug);
    2387         224 :       __ movp(Operand(rsp, rax, times_pointer_size, 0), r12);
    2388         224 :       __ leal(rax, Operand(rax, 1));
    2389             :       __ decl(rbx);
    2390         112 :       __ j(greater, &loop);
    2391             :     }
    2392             : 
    2393             :     // Adjust effective number of arguments (rax contains the number of
    2394             :     // arguments from the call plus return address plus the number of
    2395             :     // [[BoundArguments]]), so we need to subtract one for the return address.
    2396             :     __ decl(rax);
    2397             :   }
    2398         112 :   __ bind(&no_bound_arguments);
    2399         112 : }
    2400             : 
    2401             : }  // namespace
    2402             : 
    2403             : // static
    2404          56 : void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
    2405             :   // ----------- S t a t e -------------
    2406             :   //  -- rax : the number of arguments (not including the receiver)
    2407             :   //  -- rdi : the function to call (checked to be a JSBoundFunction)
    2408             :   // -----------------------------------
    2409          56 :   __ AssertBoundFunction(rdi);
    2410             : 
    2411          56 :   Register decompr_scratch = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
    2412             :   Register decompr_scratch_for_debug =
    2413          56 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    2414             : 
    2415             :   // Patch the receiver to [[BoundThis]].
    2416             :   StackArgumentsAccessor args(rsp, rax);
    2417             :   __ LoadAnyTaggedField(rbx,
    2418             :                         FieldOperand(rdi, JSBoundFunction::kBoundThisOffset),
    2419          56 :                         decompr_scratch, decompr_scratch_for_debug);
    2420          56 :   __ movp(args.GetReceiverOperand(), rbx);
    2421             : 
    2422             :   // Push the [[BoundArguments]] onto the stack.
    2423          56 :   Generate_PushBoundArguments(masm);
    2424             : 
    2425             :   // Call the [[BoundTargetFunction]] via the Call builtin.
    2426             :   __ LoadTaggedPointerField(
    2427             :       rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset),
    2428          56 :       decompr_scratch_for_debug);
    2429             :   __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
    2430          56 :           RelocInfo::CODE_TARGET);
    2431          56 : }
    2432             : 
    2433             : // static
    2434         168 : void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
    2435             :   // ----------- S t a t e -------------
    2436             :   //  -- rax : the number of arguments (not including the receiver)
    2437             :   //  -- rdi : the target to call (can be any Object)
    2438             :   // -----------------------------------
    2439             :   StackArgumentsAccessor args(rsp, rax);
    2440             : 
    2441         168 :   Label non_callable;
    2442         168 :   __ JumpIfSmi(rdi, &non_callable);
    2443         168 :   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
    2444             :   __ Jump(masm->isolate()->builtins()->CallFunction(mode),
    2445         672 :           RelocInfo::CODE_TARGET, equal);
    2446             : 
    2447         168 :   __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
    2448             :   __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
    2449         168 :           RelocInfo::CODE_TARGET, equal);
    2450             : 
    2451             :   // Check if target has a [[Call]] internal method.
    2452             :   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
    2453         168 :            Immediate(Map::IsCallableBit::kMask));
    2454         168 :   __ j(zero, &non_callable, Label::kNear);
    2455             : 
    2456             :   // Check if target is a proxy and call CallProxy external builtin
    2457         168 :   __ CmpInstanceType(rcx, JS_PROXY_TYPE);
    2458             :   __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET,
    2459         168 :           equal);
    2460             : 
    2461             :   // 2. Call to something else, which might have a [[Call]] internal method (if
    2462             :   // not we raise an exception).
    2463             : 
    2464             :   // Overwrite the original receiver with the (original) target.
    2465             :   __ movp(args.GetReceiverOperand(), rdi);
    2466             :   // Let the "call_as_function_delegate" take care of the rest.
    2467         168 :   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
    2468             :   __ Jump(masm->isolate()->builtins()->CallFunction(
    2469             :               ConvertReceiverMode::kNotNullOrUndefined),
    2470         168 :           RelocInfo::CODE_TARGET);
    2471             : 
    2472             :   // 3. Call to something that is not callable.
    2473         168 :   __ bind(&non_callable);
    2474             :   {
    2475         168 :     FrameScope scope(masm, StackFrame::INTERNAL);
    2476         168 :     __ Push(rdi);
    2477         168 :     __ CallRuntime(Runtime::kThrowCalledNonCallable);
    2478             :   }
    2479         168 : }
    2480             : 
    2481             : // static
    2482          56 : void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
    2483             :   // ----------- S t a t e -------------
    2484             :   //  -- rax : the number of arguments (not including the receiver)
    2485             :   //  -- rdx : the new target (checked to be a constructor)
    2486             :   //  -- rdi : the constructor to call (checked to be a JSFunction)
    2487             :   // -----------------------------------
    2488          56 :   __ AssertConstructor(rdi);
    2489          56 :   __ AssertFunction(rdi);
    2490             : 
    2491             :   Register decompr_scratch_for_debug =
    2492          56 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    2493             : 
    2494             :   // Calling convention for function specific ConstructStubs require
    2495             :   // rbx to contain either an AllocationSite or undefined.
    2496          56 :   __ LoadRoot(rbx, RootIndex::kUndefinedValue);
    2497             : 
    2498             :   // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
    2499             :   __ LoadTaggedPointerField(
    2500             :       rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset),
    2501          56 :       decompr_scratch_for_debug);
    2502             :   __ testl(FieldOperand(rcx, SharedFunctionInfo::kFlagsOffset),
    2503          56 :            Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
    2504             :   __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
    2505          56 :           RelocInfo::CODE_TARGET, not_zero);
    2506             : 
    2507             :   __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
    2508          56 :           RelocInfo::CODE_TARGET);
    2509          56 : }
    2510             : 
    2511             : // static
    2512          56 : void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
    2513             :   // ----------- S t a t e -------------
    2514             :   //  -- rax : the number of arguments (not including the receiver)
    2515             :   //  -- rdx : the new target (checked to be a constructor)
    2516             :   //  -- rdi : the constructor to call (checked to be a JSBoundFunction)
    2517             :   // -----------------------------------
    2518          56 :   __ AssertConstructor(rdi);
    2519          56 :   __ AssertBoundFunction(rdi);
    2520             : 
    2521             :   Register decompr_scratch_for_debug =
    2522          56 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    2523             : 
    2524             :   // Push the [[BoundArguments]] onto the stack.
    2525          56 :   Generate_PushBoundArguments(masm);
    2526             : 
    2527             :   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
    2528             :   {
    2529          56 :     Label done;
    2530          56 :     __ cmpp(rdi, rdx);
    2531          56 :     __ j(not_equal, &done, Label::kNear);
    2532             :     __ LoadTaggedPointerField(
    2533             :         rdx, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset),
    2534          56 :         decompr_scratch_for_debug);
    2535          56 :     __ bind(&done);
    2536             :   }
    2537             : 
    2538             :   // Construct the [[BoundTargetFunction]] via the Construct builtin.
    2539             :   __ LoadTaggedPointerField(
    2540             :       rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset),
    2541          56 :       decompr_scratch_for_debug);
    2542          56 :   __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
    2543          56 : }
    2544             : 
    2545             : // static
    2546          56 : void Builtins::Generate_Construct(MacroAssembler* masm) {
    2547             :   // ----------- S t a t e -------------
    2548             :   //  -- rax : the number of arguments (not including the receiver)
    2549             :   //  -- rdx : the new target (either the same as the constructor or
    2550             :   //           the JSFunction on which new was invoked initially)
    2551             :   //  -- rdi : the constructor to call (can be any Object)
    2552             :   // -----------------------------------
    2553             :   StackArgumentsAccessor args(rsp, rax);
    2554             : 
    2555             :   Register decompr_scratch_for_debug =
    2556          56 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    2557             : 
    2558             :   // Check if target is a Smi.
    2559          56 :   Label non_constructor;
    2560          56 :   __ JumpIfSmi(rdi, &non_constructor);
    2561             : 
    2562             :   // Check if target has a [[Construct]] internal method.
    2563             :   __ LoadTaggedPointerField(rcx, FieldOperand(rdi, HeapObject::kMapOffset),
    2564          56 :                             decompr_scratch_for_debug);
    2565             :   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
    2566          56 :            Immediate(Map::IsConstructorBit::kMask));
    2567          56 :   __ j(zero, &non_constructor);
    2568             : 
    2569             :   // Dispatch based on instance type.
    2570          56 :   __ CmpInstanceType(rcx, JS_FUNCTION_TYPE);
    2571             :   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
    2572         280 :           RelocInfo::CODE_TARGET, equal);
    2573             : 
    2574             :   // Only dispatch to bound functions after checking whether they are
    2575             :   // constructors.
    2576          56 :   __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
    2577             :   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
    2578          56 :           RelocInfo::CODE_TARGET, equal);
    2579             : 
    2580             :   // Only dispatch to proxies after checking whether they are constructors.
    2581          56 :   __ CmpInstanceType(rcx, JS_PROXY_TYPE);
    2582             :   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy), RelocInfo::CODE_TARGET,
    2583          56 :           equal);
    2584             : 
    2585             :   // Called Construct on an exotic Object with a [[Construct]] internal method.
    2586             :   {
    2587             :     // Overwrite the original receiver with the (original) target.
    2588             :     __ movp(args.GetReceiverOperand(), rdi);
    2589             :     // Let the "call_as_constructor_delegate" take care of the rest.
    2590          56 :     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
    2591             :     __ Jump(masm->isolate()->builtins()->CallFunction(),
    2592          56 :             RelocInfo::CODE_TARGET);
    2593             :   }
    2594             : 
    2595             :   // Called Construct on an Object that doesn't have a [[Construct]] internal
    2596             :   // method.
    2597          56 :   __ bind(&non_constructor);
    2598             :   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
    2599          56 :           RelocInfo::CODE_TARGET);
    2600          56 : }
    2601             : 
    2602          56 : void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
    2603             :   // Lookup the function in the JavaScript frame.
    2604         112 :   __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
    2605         112 :   __ movp(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset));
    2606             : 
    2607             :   {
    2608          56 :     FrameScope scope(masm, StackFrame::INTERNAL);
    2609             :     // Pass function as argument.
    2610          56 :     __ Push(rax);
    2611          56 :     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
    2612             :   }
    2613             : 
    2614          56 :   Label skip;
    2615             :   // If the code object is null, just return to the caller.
    2616             :   __ testp(rax, rax);
    2617          56 :   __ j(not_equal, &skip, Label::kNear);
    2618          56 :   __ ret(0);
    2619             : 
    2620          56 :   __ bind(&skip);
    2621             : 
    2622             :   // Drop the handler frame that is be sitting on top of the actual
    2623             :   // JavaScript frame. This is the case then OSR is triggered from bytecode.
    2624          56 :   __ leave();
    2625             : 
    2626             :   // Load deoptimization data from the code object.
    2627             :   __ LoadTaggedPointerField(rbx,
    2628          56 :                             FieldOperand(rax, Code::kDeoptimizationDataOffset));
    2629             : 
    2630             :   // Load the OSR entrypoint offset from the deoptimization data.
    2631             :   __ SmiUntagField(
    2632             :       rbx, FieldOperand(rbx, FixedArray::OffsetOfElementAt(
    2633          56 :                                  DeoptimizationData::kOsrPcOffsetIndex)));
    2634             : 
    2635             :   // Compute the target address = code_obj + header_size + osr_offset
    2636             :   __ leap(rax, FieldOperand(rax, rbx, times_1, Code::kHeaderSize));
    2637             : 
    2638             :   // Overwrite the return address on the stack.
    2639             :   __ movq(StackOperandForReturnAddress(0), rax);
    2640             : 
    2641             :   // And "return" to the OSR entry point of the function.
    2642          56 :   __ ret(0);
    2643          56 : }
    2644             : 
    2645          56 : void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
    2646             :   // The function index was pushed to the stack by the caller as int32.
    2647          56 :   __ Pop(r11);
    2648             :   // Convert to Smi for the runtime call.
    2649          56 :   __ SmiTag(r11, r11);
    2650             :   {
    2651             :     HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
    2652         112 :     FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
    2653             : 
    2654             :     // Save all parameter registers (see wasm-linkage.cc). They might be
    2655             :     // overwritten in the runtime call below. We don't have any callee-saved
    2656             :     // registers in wasm, so no need to store anything else.
    2657             :     static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
    2658             :                       arraysize(wasm::kGpParamRegisters),
    2659             :                   "frame size mismatch");
    2660         392 :     for (Register reg : wasm::kGpParamRegisters) {
    2661         336 :       __ Push(reg);
    2662             :     }
    2663             :     static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs ==
    2664             :                       arraysize(wasm::kFpParamRegisters),
    2665             :                   "frame size mismatch");
    2666          56 :     __ subp(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
    2667             :     int offset = 0;
    2668         392 :     for (DoubleRegister reg : wasm::kFpParamRegisters) {
    2669         336 :       __ movdqu(Operand(rsp, offset), reg);
    2670         336 :       offset += kSimd128Size;
    2671             :     }
    2672             : 
    2673             :     // Push the WASM instance as an explicit argument to WasmCompileLazy.
    2674          56 :     __ Push(kWasmInstanceRegister);
    2675             :     // Push the function index as second argument.
    2676          56 :     __ Push(r11);
    2677             :     // Load the correct CEntry builtin from the instance object.
    2678             :     Register decompr_scratch_for_debug =
    2679          56 :         COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    2680             :     __ LoadTaggedPointerField(
    2681             :         rcx,
    2682             :         FieldOperand(kWasmInstanceRegister,
    2683             :                      WasmInstanceObject::kCEntryStubOffset),
    2684          56 :         decompr_scratch_for_debug);
    2685             :     // Initialize the JavaScript context with 0. CEntry will use it to
    2686             :     // set the current context on the isolate.
    2687          56 :     __ Move(kContextRegister, Smi::zero());
    2688          56 :     __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, rcx);
    2689             :     // The entrypoint address is the return value.
    2690             :     __ movq(r11, kReturnRegister0);
    2691             : 
    2692             :     // Restore registers.
    2693         728 :     for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
    2694         336 :       offset -= kSimd128Size;
    2695         336 :       __ movdqu(reg, Operand(rsp, offset));
    2696             :     }
    2697             :     DCHECK_EQ(0, offset);
    2698          56 :     __ addp(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
    2699         728 :     for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
    2700         336 :       __ Pop(reg);
    2701             :     }
    2702             :   }
    2703             :   // Finally, jump to the entrypoint.
    2704          56 :   __ jmp(r11);
    2705          56 : }
    2706             : 
    2707         560 : void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
    2708             :                                SaveFPRegsMode save_doubles, ArgvMode argv_mode,
    2709             :                                bool builtin_exit_frame) {
    2710             :   // rax: number of arguments including receiver
    2711             :   // rbx: pointer to C function  (C callee-saved)
    2712             :   // rbp: frame pointer of calling JS frame (restored after C call)
    2713             :   // rsp: stack pointer  (restored after C call)
    2714             :   // rsi: current context (restored)
    2715             :   //
    2716             :   // If argv_mode == kArgvInRegister:
    2717             :   // r15: pointer to the first argument
    2718             : 
    2719             : #ifdef _WIN64
    2720             :   // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
    2721             :   // stack to be aligned to 16 bytes. It only allows a single-word to be
    2722             :   // returned in register rax. Larger return sizes must be written to an address
    2723             :   // passed as a hidden first argument.
    2724             :   const Register kCCallArg0 = rcx;
    2725             :   const Register kCCallArg1 = rdx;
    2726             :   const Register kCCallArg2 = r8;
    2727             :   const Register kCCallArg3 = r9;
    2728             :   const int kArgExtraStackSpace = 2;
    2729             :   const int kMaxRegisterResultSize = 1;
    2730             : #else
    2731             :   // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
    2732             :   // are returned in rax, and a struct of two pointers are returned in rax+rdx.
    2733             :   // Larger return sizes must be written to an address passed as a hidden first
    2734             :   // argument.
    2735             :   const Register kCCallArg0 = rdi;
    2736             :   const Register kCCallArg1 = rsi;
    2737             :   const Register kCCallArg2 = rdx;
    2738         560 :   const Register kCCallArg3 = rcx;
    2739             :   const int kArgExtraStackSpace = 0;
    2740             :   const int kMaxRegisterResultSize = 2;
    2741             : #endif  // _WIN64
    2742             : 
    2743             :   // Enter the exit frame that transitions from JavaScript to C++.
    2744             :   int arg_stack_space =
    2745         560 :       kArgExtraStackSpace +
    2746             :       (result_size <= kMaxRegisterResultSize ? 0 : result_size);
    2747         560 :   if (argv_mode == kArgvInRegister) {
    2748             :     DCHECK(save_doubles == kDontSaveFPRegs);
    2749             :     DCHECK(!builtin_exit_frame);
    2750         112 :     __ EnterApiExitFrame(arg_stack_space);
    2751             :     // Move argc into r14 (argv is already in r15).
    2752         112 :     __ movp(r14, rax);
    2753             :   } else {
    2754             :     __ EnterExitFrame(
    2755             :         arg_stack_space, save_doubles == kSaveFPRegs,
    2756         448 :         builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
    2757             :   }
    2758             : 
    2759             :   // rbx: pointer to builtin function  (C callee-saved).
    2760             :   // rbp: frame pointer of exit frame  (restored after C call).
    2761             :   // rsp: stack pointer (restored after C call).
    2762             :   // r14: number of arguments including receiver (C callee-saved).
    2763             :   // r15: argv pointer (C callee-saved).
    2764             : 
    2765             :   // Check stack alignment.
    2766         560 :   if (FLAG_debug_code) {
    2767           0 :     __ CheckStackAlignment();
    2768             :   }
    2769             : 
    2770             :   // Call C function. The arguments object will be created by stubs declared by
    2771             :   // DECLARE_RUNTIME_FUNCTION().
    2772         560 :   if (result_size <= kMaxRegisterResultSize) {
    2773             :     // Pass a pointer to the Arguments object as the first argument.
    2774             :     // Return result in single register (rax), or a register pair (rax, rdx).
    2775         560 :     __ movp(kCCallArg0, r14);  // argc.
    2776             :     __ movp(kCCallArg1, r15);  // argv.
    2777        3360 :     __ Move(kCCallArg2, ExternalReference::isolate_address(masm->isolate()));
    2778             :   } else {
    2779             :     DCHECK_LE(result_size, 2);
    2780             :     // Pass a pointer to the result location as the first argument.
    2781           0 :     __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
    2782             :     // Pass a pointer to the Arguments object as the second argument.
    2783             :     __ movp(kCCallArg1, r14);  // argc.
    2784             :     __ movp(kCCallArg2, r15);  // argv.
    2785           0 :     __ Move(kCCallArg3, ExternalReference::isolate_address(masm->isolate()));
    2786             :   }
    2787         560 :   __ call(rbx);
    2788             : 
    2789         560 :   if (result_size > kMaxRegisterResultSize) {
    2790             :     // Read result values stored on stack. Result is stored
    2791             :     // above the the two Arguments object slots on Win64.
    2792             :     DCHECK_LE(result_size, 2);
    2793             :     __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
    2794             :     __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
    2795             :   }
    2796             :   // Result is in rax or rdx:rax - do not destroy these registers!
    2797             : 
    2798             :   // Check result for exception sentinel.
    2799         560 :   Label exception_returned;
    2800         560 :   __ CompareRoot(rax, RootIndex::kException);
    2801         560 :   __ j(equal, &exception_returned);
    2802             : 
    2803             :   // Check that there is no pending exception, otherwise we
    2804             :   // should have returned the exception sentinel.
    2805         560 :   if (FLAG_debug_code) {
    2806           0 :     Label okay;
    2807           0 :     __ LoadRoot(r14, RootIndex::kTheHoleValue);
    2808             :     ExternalReference pending_exception_address = ExternalReference::Create(
    2809           0 :         IsolateAddressId::kPendingExceptionAddress, masm->isolate());
    2810             :     Operand pending_exception_operand =
    2811           0 :         masm->ExternalReferenceAsOperand(pending_exception_address);
    2812           0 :     __ cmpp(r14, pending_exception_operand);
    2813           0 :     __ j(equal, &okay, Label::kNear);
    2814           0 :     __ int3();
    2815           0 :     __ bind(&okay);
    2816             :   }
    2817             : 
    2818             :   // Exit the JavaScript to C++ exit frame.
    2819         560 :   __ LeaveExitFrame(save_doubles == kSaveFPRegs, argv_mode == kArgvOnStack);
    2820         560 :   __ ret(0);
    2821             : 
    2822             :   // Handling of exception.
    2823         560 :   __ bind(&exception_returned);
    2824             : 
    2825             :   ExternalReference pending_handler_context_address = ExternalReference::Create(
    2826         560 :       IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
    2827             :   ExternalReference pending_handler_entrypoint_address =
    2828             :       ExternalReference::Create(
    2829         560 :           IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
    2830             :   ExternalReference pending_handler_fp_address = ExternalReference::Create(
    2831         560 :       IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
    2832             :   ExternalReference pending_handler_sp_address = ExternalReference::Create(
    2833         560 :       IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
    2834             : 
    2835             :   // Ask the runtime for help to determine the handler. This will set rax to
    2836             :   // contain the current pending exception, don't clobber it.
    2837             :   ExternalReference find_handler =
    2838         560 :       ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
    2839             :   {
    2840         560 :     FrameScope scope(masm, StackFrame::MANUAL);
    2841             :     __ movp(arg_reg_1, Immediate(0));  // argc.
    2842             :     __ movp(arg_reg_2, Immediate(0));  // argv.
    2843         560 :     __ Move(arg_reg_3, ExternalReference::isolate_address(masm->isolate()));
    2844         560 :     __ PrepareCallCFunction(3);
    2845         560 :     __ CallCFunction(find_handler, 3);
    2846             :   }
    2847             :   // Retrieve the handler context, SP and FP.
    2848             :   __ movp(rsi,
    2849         560 :           masm->ExternalReferenceAsOperand(pending_handler_context_address));
    2850         560 :   __ movp(rsp, masm->ExternalReferenceAsOperand(pending_handler_sp_address));
    2851         560 :   __ movp(rbp, masm->ExternalReferenceAsOperand(pending_handler_fp_address));
    2852             : 
    2853             :   // If the handler is a JS frame, restore the context to the frame. Note that
    2854             :   // the context will be set to (rsi == 0) for non-JS frames.
    2855         560 :   Label skip;
    2856             :   __ testp(rsi, rsi);
    2857         560 :   __ j(zero, &skip, Label::kNear);
    2858        1120 :   __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
    2859         560 :   __ bind(&skip);
    2860             : 
    2861             :   // Reset the masking register. This is done independent of the underlying
    2862             :   // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
    2863             :   // with both configurations. It is safe to always do this, because the
    2864             :   // underlying register is caller-saved and can be arbitrarily clobbered.
    2865         560 :   __ ResetSpeculationPoisonRegister();
    2866             : 
    2867             :   // Compute the handler entry address and jump to it.
    2868             :   __ movp(rdi,
    2869         560 :           masm->ExternalReferenceAsOperand(pending_handler_entrypoint_address));
    2870         560 :   __ jmp(rdi);
    2871         560 : }
    2872             : 
    2873          56 : void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
    2874          56 :   Label check_negative, process_64_bits, done;
    2875             : 
    2876             :   // Account for return address and saved regs.
    2877             :   const int kArgumentOffset = 4 * kRegisterSize;
    2878             : 
    2879          56 :   MemOperand mantissa_operand(MemOperand(rsp, kArgumentOffset));
    2880             :   MemOperand exponent_operand(
    2881          56 :       MemOperand(rsp, kArgumentOffset + kDoubleSize / 2));
    2882             : 
    2883             :   // The result is returned on the stack.
    2884          56 :   MemOperand return_operand = mantissa_operand;
    2885             : 
    2886             :   Register scratch1 = rbx;
    2887             : 
    2888             :   // Since we must use rcx for shifts below, use some other register (rax)
    2889             :   // to calculate the result if ecx is the requested return register.
    2890             :   Register result_reg = rax;
    2891             :   // Save ecx if it isn't the return register and therefore volatile, or if it
    2892             :   // is the return register, then save the temp register we use in its stead
    2893             :   // for the result.
    2894          56 :   Register save_reg = rax;
    2895          56 :   __ pushq(rcx);
    2896          56 :   __ pushq(scratch1);
    2897          56 :   __ pushq(save_reg);
    2898             : 
    2899             :   __ movl(scratch1, mantissa_operand);
    2900             :   __ Movsd(kScratchDoubleReg, mantissa_operand);
    2901             :   __ movl(rcx, exponent_operand);
    2902             : 
    2903          56 :   __ andl(rcx, Immediate(HeapNumber::kExponentMask));
    2904             :   __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
    2905         112 :   __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
    2906          56 :   __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
    2907          56 :   __ j(below, &process_64_bits, Label::kNear);
    2908             : 
    2909             :   // Result is entirely in lower 32-bits of mantissa
    2910             :   int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
    2911          56 :   __ subl(rcx, Immediate(delta));
    2912             :   __ xorl(result_reg, result_reg);
    2913          56 :   __ cmpl(rcx, Immediate(31));
    2914          56 :   __ j(above, &done, Label::kNear);
    2915             :   __ shll_cl(scratch1);
    2916          56 :   __ jmp(&check_negative, Label::kNear);
    2917             : 
    2918          56 :   __ bind(&process_64_bits);
    2919          56 :   __ Cvttsd2siq(result_reg, kScratchDoubleReg);
    2920          56 :   __ jmp(&done, Label::kNear);
    2921             : 
    2922             :   // If the double was negative, negate the integer result.
    2923          56 :   __ bind(&check_negative);
    2924             :   __ movl(result_reg, scratch1);
    2925             :   __ negl(result_reg);
    2926          56 :   __ cmpl(exponent_operand, Immediate(0));
    2927          56 :   __ cmovl(greater, result_reg, scratch1);
    2928             : 
    2929             :   // Restore registers
    2930          56 :   __ bind(&done);
    2931             :   __ movl(return_operand, result_reg);
    2932          56 :   __ popq(save_reg);
    2933          56 :   __ popq(scratch1);
    2934          56 :   __ popq(rcx);
    2935          56 :   __ ret(0);
    2936          56 : }
    2937             : 
    2938          56 : void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
    2939             :   const Register exponent = rdx;
    2940             :   const Register scratch = rcx;
    2941             :   const XMMRegister double_result = xmm3;
    2942             :   const XMMRegister double_base = xmm2;
    2943             :   const XMMRegister double_exponent = xmm1;
    2944             :   const XMMRegister double_scratch = xmm4;
    2945             : 
    2946          56 :   Label call_runtime, done, exponent_not_smi, int_exponent;
    2947             : 
    2948             :   // Save 1 in double_result - we need this several times later on.
    2949          56 :   __ movp(scratch, Immediate(1));
    2950          56 :   __ Cvtlsi2sd(double_result, scratch);
    2951             : 
    2952          56 :   Label fast_power, try_arithmetic_simplification;
    2953             :   // Detect integer exponents stored as double.
    2954             :   __ DoubleToI(exponent, double_exponent, double_scratch,
    2955          56 :                &try_arithmetic_simplification, &try_arithmetic_simplification);
    2956          56 :   __ jmp(&int_exponent);
    2957             : 
    2958          56 :   __ bind(&try_arithmetic_simplification);
    2959          56 :   __ Cvttsd2si(exponent, double_exponent);
    2960             :   // Skip to runtime if possibly NaN (indicated by the indefinite integer).
    2961          56 :   __ cmpl(exponent, Immediate(0x1));
    2962          56 :   __ j(overflow, &call_runtime);
    2963             : 
    2964             :   // Using FPU instructions to calculate power.
    2965          56 :   Label fast_power_failed;
    2966          56 :   __ bind(&fast_power);
    2967          56 :   __ fnclex();  // Clear flags to catch exceptions later.
    2968             :   // Transfer (B)ase and (E)xponent onto the FPU register stack.
    2969          56 :   __ subp(rsp, Immediate(kDoubleSize));
    2970         112 :   __ Movsd(Operand(rsp, 0), double_exponent);
    2971          56 :   __ fld_d(Operand(rsp, 0));  // E
    2972         112 :   __ Movsd(Operand(rsp, 0), double_base);
    2973          56 :   __ fld_d(Operand(rsp, 0));  // B, E
    2974             : 
    2975             :   // Exponent is in st(1) and base is in st(0)
    2976             :   // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
    2977             :   // FYL2X calculates st(1) * log2(st(0))
    2978          56 :   __ fyl2x();    // X
    2979          56 :   __ fld(0);     // X, X
    2980          56 :   __ frndint();  // rnd(X), X
    2981          56 :   __ fsub(1);    // rnd(X), X-rnd(X)
    2982          56 :   __ fxch(1);    // X - rnd(X), rnd(X)
    2983             :   // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
    2984          56 :   __ f2xm1();   // 2^(X-rnd(X)) - 1, rnd(X)
    2985          56 :   __ fld1();    // 1, 2^(X-rnd(X)) - 1, rnd(X)
    2986          56 :   __ faddp(1);  // 2^(X-rnd(X)), rnd(X)
    2987             :   // FSCALE calculates st(0) * 2^st(1)
    2988          56 :   __ fscale();  // 2^X, rnd(X)
    2989          56 :   __ fstp(1);
    2990             :   // Bail out to runtime in case of exceptions in the status word.
    2991          56 :   __ fnstsw_ax();
    2992          56 :   __ testb(rax, Immediate(0x5F));  // Check for all but precision exception.
    2993          56 :   __ j(not_zero, &fast_power_failed, Label::kNear);
    2994          56 :   __ fstp_d(Operand(rsp, 0));
    2995         112 :   __ Movsd(double_result, Operand(rsp, 0));
    2996          56 :   __ addp(rsp, Immediate(kDoubleSize));
    2997          56 :   __ jmp(&done);
    2998             : 
    2999          56 :   __ bind(&fast_power_failed);
    3000          56 :   __ fninit();
    3001          56 :   __ addp(rsp, Immediate(kDoubleSize));
    3002          56 :   __ jmp(&call_runtime);
    3003             : 
    3004             :   // Calculate power with integer exponent.
    3005          56 :   __ bind(&int_exponent);
    3006             :   const XMMRegister double_scratch2 = double_exponent;
    3007             :   // Back up exponent as we need to check if exponent is negative later.
    3008             :   __ movp(scratch, exponent);                // Back up exponent.
    3009             :   __ Movsd(double_scratch, double_base);     // Back up base.
    3010             :   __ Movsd(double_scratch2, double_result);  // Load double_exponent with 1.
    3011             : 
    3012             :   // Get absolute value of exponent.
    3013          56 :   Label no_neg, while_true, while_false;
    3014             :   __ testl(scratch, scratch);
    3015          56 :   __ j(positive, &no_neg, Label::kNear);
    3016             :   __ negl(scratch);
    3017          56 :   __ bind(&no_neg);
    3018             : 
    3019          56 :   __ j(zero, &while_false, Label::kNear);
    3020             :   __ shrl(scratch, Immediate(1));
    3021             :   // Above condition means CF==0 && ZF==0.  This means that the
    3022             :   // bit that has been shifted out is 0 and the result is not 0.
    3023          56 :   __ j(above, &while_true, Label::kNear);
    3024             :   __ Movsd(double_result, double_scratch);
    3025          56 :   __ j(zero, &while_false, Label::kNear);
    3026             : 
    3027          56 :   __ bind(&while_true);
    3028             :   __ shrl(scratch, Immediate(1));
    3029             :   __ Mulsd(double_scratch, double_scratch);
    3030          56 :   __ j(above, &while_true, Label::kNear);
    3031             :   __ Mulsd(double_result, double_scratch);
    3032          56 :   __ j(not_zero, &while_true);
    3033             : 
    3034          56 :   __ bind(&while_false);
    3035             :   // If the exponent is negative, return 1/result.
    3036             :   __ testl(exponent, exponent);
    3037          56 :   __ j(greater, &done);
    3038             :   __ Divsd(double_scratch2, double_result);
    3039             :   __ Movsd(double_result, double_scratch2);
    3040             :   // Test whether result is zero.  Bail out to check for subnormal result.
    3041             :   // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
    3042             :   __ Xorpd(double_scratch2, double_scratch2);
    3043             :   __ Ucomisd(double_scratch2, double_result);
    3044             :   // double_exponent aliased as double_scratch2 has already been overwritten
    3045             :   // and may not have contained the exponent value in the first place when the
    3046             :   // input was a smi.  We reset it with exponent value before bailing out.
    3047          56 :   __ j(not_equal, &done);
    3048          56 :   __ Cvtlsi2sd(double_exponent, exponent);
    3049             : 
    3050             :   // Returning or bailing out.
    3051          56 :   __ bind(&call_runtime);
    3052             :   // Move base to the correct argument register.  Exponent is already in xmm1.
    3053             :   __ Movsd(xmm0, double_base);
    3054             :   DCHECK(double_exponent == xmm1);
    3055             :   {
    3056             :     AllowExternalCallThatCantCauseGC scope(masm);
    3057          56 :     __ PrepareCallCFunction(2);
    3058          56 :     __ CallCFunction(ExternalReference::power_double_double_function(), 2);
    3059             :   }
    3060             :   // Return value is in xmm0.
    3061             :   __ Movsd(double_result, xmm0);
    3062             : 
    3063          56 :   __ bind(&done);
    3064          56 :   __ ret(0);
    3065          56 : }
    3066             : 
    3067          56 : void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
    3068             :   // ----------- S t a t e -------------
    3069             :   //  -- rax    : argc
    3070             :   //  -- rdi    : constructor
    3071             :   //  -- rsp[0] : return address
    3072             :   //  -- rsp[8] : last argument
    3073             :   // -----------------------------------
    3074             : 
    3075             :   Register decompr_scratch_for_debug =
    3076          56 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    3077             : 
    3078          56 :   if (FLAG_debug_code) {
    3079             :     // The array construct code is only set for the global and natives
    3080             :     // builtin Array functions which always have maps.
    3081             : 
    3082             :     // Initial map for the builtin Array function should be a map.
    3083             :     __ LoadTaggedPointerField(
    3084             :         rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset),
    3085           0 :         decompr_scratch_for_debug);
    3086             :     // Will both indicate a nullptr and a Smi.
    3087             :     STATIC_ASSERT(kSmiTag == 0);
    3088           0 :     Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
    3089           0 :     __ Check(not_smi, AbortReason::kUnexpectedInitialMapForArrayFunction);
    3090           0 :     __ CmpObjectType(rcx, MAP_TYPE, rcx);
    3091           0 :     __ Check(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
    3092             : 
    3093             :     // Figure out the right elements kind
    3094             :     __ LoadTaggedPointerField(
    3095             :         rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset),
    3096           0 :         decompr_scratch_for_debug);
    3097             : 
    3098             :     // Load the map's "bit field 2" into |result|. We only need the first byte,
    3099             :     // but the following masking takes care of that anyway.
    3100           0 :     __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
    3101             :     // Retrieve elements_kind from bit field 2.
    3102           0 :     __ DecodeField<Map::ElementsKindBits>(rcx);
    3103             : 
    3104             :     // Initial elements kind should be packed elements.
    3105           0 :     __ cmpl(rcx, Immediate(PACKED_ELEMENTS));
    3106           0 :     __ Assert(equal, AbortReason::kInvalidElementsKindForInternalPackedArray);
    3107             : 
    3108             :     // No arguments should be passed.
    3109             :     __ testp(rax, rax);
    3110           0 :     __ Assert(zero, AbortReason::kWrongNumberOfArgumentsForInternalPackedArray);
    3111             :   }
    3112             : 
    3113             :   __ Jump(
    3114             :       BUILTIN_CODE(masm->isolate(), InternalArrayNoArgumentConstructor_Packed),
    3115          56 :       RelocInfo::CODE_TARGET);
    3116          56 : }
    3117             : 
    3118             : namespace {
    3119             : 
    3120             : int Offset(ExternalReference ref0, ExternalReference ref1) {
    3121         224 :   int64_t offset = (ref0.address() - ref1.address());
    3122             :   // Check that fits into int.
    3123             :   DCHECK(static_cast<int>(offset) == offset);
    3124         224 :   return static_cast<int>(offset);
    3125             : }
    3126             : 
    3127             : // Calls an API function.  Allocates HandleScope, extracts returned value
    3128             : // from handle and propagates exceptions.  Clobbers r14, r15, rbx and
    3129             : // caller-save registers.  Restores context.  On return removes
    3130             : // stack_space * kSystemPointerSize (GCed).
    3131         112 : void CallApiFunctionAndReturn(MacroAssembler* masm, Register function_address,
    3132             :                               ExternalReference thunk_ref,
    3133             :                               Register thunk_last_arg, int stack_space,
    3134             :                               Operand* stack_space_operand,
    3135             :                               Operand return_value_operand) {
    3136         112 :   Label prologue;
    3137         112 :   Label promote_scheduled_exception;
    3138         112 :   Label delete_allocated_handles;
    3139         112 :   Label leave_exit_frame;
    3140             :   Label write_back;
    3141             : 
    3142         112 :   Isolate* isolate = masm->isolate();
    3143             :   Factory* factory = isolate->factory();
    3144             :   ExternalReference next_address =
    3145         112 :       ExternalReference::handle_scope_next_address(isolate);
    3146             :   const int kNextOffset = 0;
    3147             :   const int kLimitOffset = Offset(
    3148         112 :       ExternalReference::handle_scope_limit_address(isolate), next_address);
    3149             :   const int kLevelOffset = Offset(
    3150         112 :       ExternalReference::handle_scope_level_address(isolate), next_address);
    3151             :   ExternalReference scheduled_exception_address =
    3152         112 :       ExternalReference::scheduled_exception_address(isolate);
    3153             : 
    3154             :   DCHECK(rdx == function_address || r8 == function_address);
    3155             :   // Allocate HandleScope in callee-save registers.
    3156             :   Register prev_next_address_reg = r14;
    3157             :   Register prev_limit_reg = rbx;
    3158         112 :   Register base_reg = r15;
    3159         112 :   __ Move(base_reg, next_address);
    3160         224 :   __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
    3161         224 :   __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
    3162         112 :   __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
    3163             : 
    3164         112 :   if (FLAG_log_timer_events) {
    3165           0 :     FrameScope frame(masm, StackFrame::MANUAL);
    3166             :     __ PushSafepointRegisters();
    3167           0 :     __ PrepareCallCFunction(1);
    3168           0 :     __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
    3169           0 :     __ CallCFunction(ExternalReference::log_enter_external_function(), 1);
    3170           0 :     __ PopSafepointRegisters();
    3171             :   }
    3172             : 
    3173         112 :   Label profiler_disabled;
    3174         112 :   Label end_profiler_check;
    3175         112 :   __ Move(rax, ExternalReference::is_profiling_address(isolate));
    3176         224 :   __ cmpb(Operand(rax, 0), Immediate(0));
    3177         112 :   __ j(zero, &profiler_disabled);
    3178             : 
    3179             :   // Third parameter is the address of the actual getter function.
    3180         112 :   __ Move(thunk_last_arg, function_address);
    3181         112 :   __ Move(rax, thunk_ref);
    3182         112 :   __ jmp(&end_profiler_check);
    3183             : 
    3184         112 :   __ bind(&profiler_disabled);
    3185             :   // Call the api function!
    3186         112 :   __ Move(rax, function_address);
    3187             : 
    3188         112 :   __ bind(&end_profiler_check);
    3189             : 
    3190             :   // Call the api function!
    3191         112 :   __ call(rax);
    3192             : 
    3193         112 :   if (FLAG_log_timer_events) {
    3194           0 :     FrameScope frame(masm, StackFrame::MANUAL);
    3195             :     __ PushSafepointRegisters();
    3196           0 :     __ PrepareCallCFunction(1);
    3197           0 :     __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
    3198           0 :     __ CallCFunction(ExternalReference::log_leave_external_function(), 1);
    3199           0 :     __ PopSafepointRegisters();
    3200             :   }
    3201             : 
    3202             :   // Load the value from ReturnValue
    3203             :   __ movp(rax, return_value_operand);
    3204         112 :   __ bind(&prologue);
    3205             : 
    3206             :   // No more valid handles (the result handle was the last one). Restore
    3207             :   // previous handle scope.
    3208         112 :   __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
    3209         224 :   __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
    3210         112 :   __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
    3211         112 :   __ j(not_equal, &delete_allocated_handles);
    3212             : 
    3213             :   // Leave the API exit frame.
    3214         112 :   __ bind(&leave_exit_frame);
    3215         112 :   if (stack_space_operand != nullptr) {
    3216             :     DCHECK_EQ(stack_space, 0);
    3217             :     __ movp(rbx, *stack_space_operand);
    3218             :   }
    3219         112 :   __ LeaveApiExitFrame();
    3220             : 
    3221             :   // Check if the function scheduled an exception.
    3222         112 :   __ Move(rdi, scheduled_exception_address);
    3223         112 :   __ Cmp(Operand(rdi, 0), factory->the_hole_value());
    3224         112 :   __ j(not_equal, &promote_scheduled_exception);
    3225             : 
    3226             : #if DEBUG
    3227             :   // Check if the function returned a valid JavaScript value.
    3228             :   Label ok;
    3229             :   Register return_value = rax;
    3230             :   Register map = rcx;
    3231             : 
    3232             :   __ JumpIfSmi(return_value, &ok, Label::kNear);
    3233             :   Register decompr_scratch_for_debug =
    3234             :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    3235             :   __ LoadTaggedPointerField(map,
    3236             :                             FieldOperand(return_value, HeapObject::kMapOffset),
    3237             :                             decompr_scratch_for_debug);
    3238             : 
    3239             :   __ CmpInstanceType(map, LAST_NAME_TYPE);
    3240             :   __ j(below_equal, &ok, Label::kNear);
    3241             : 
    3242             :   __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
    3243             :   __ j(above_equal, &ok, Label::kNear);
    3244             : 
    3245             :   __ CompareRoot(map, RootIndex::kHeapNumberMap);
    3246             :   __ j(equal, &ok, Label::kNear);
    3247             : 
    3248             :   __ CompareRoot(return_value, RootIndex::kUndefinedValue);
    3249             :   __ j(equal, &ok, Label::kNear);
    3250             : 
    3251             :   __ CompareRoot(return_value, RootIndex::kTrueValue);
    3252             :   __ j(equal, &ok, Label::kNear);
    3253             : 
    3254             :   __ CompareRoot(return_value, RootIndex::kFalseValue);
    3255             :   __ j(equal, &ok, Label::kNear);
    3256             : 
    3257             :   __ CompareRoot(return_value, RootIndex::kNullValue);
    3258             :   __ j(equal, &ok, Label::kNear);
    3259             : 
    3260             :   __ Abort(AbortReason::kAPICallReturnedInvalidObject);
    3261             : 
    3262             :   __ bind(&ok);
    3263             : #endif
    3264             : 
    3265         112 :   if (stack_space_operand == nullptr) {
    3266             :     DCHECK_NE(stack_space, 0);
    3267          56 :     __ ret(stack_space * kSystemPointerSize);
    3268             :   } else {
    3269             :     DCHECK_EQ(stack_space, 0);
    3270             :     __ PopReturnAddressTo(rcx);
    3271          56 :     __ addq(rsp, rbx);
    3272          56 :     __ jmp(rcx);
    3273             :   }
    3274             : 
    3275             :   // Re-throw by promoting a scheduled exception.
    3276         112 :   __ bind(&promote_scheduled_exception);
    3277         112 :   __ TailCallRuntime(Runtime::kPromoteScheduledException);
    3278             : 
    3279             :   // HandleScope limit has changed. Delete allocated extensions.
    3280         112 :   __ bind(&delete_allocated_handles);
    3281         224 :   __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
    3282             :   __ movp(prev_limit_reg, rax);
    3283         112 :   __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
    3284         112 :   __ LoadAddress(rax, ExternalReference::delete_handle_scope_extensions());
    3285         112 :   __ call(rax);
    3286             :   __ movp(rax, prev_limit_reg);
    3287         112 :   __ jmp(&leave_exit_frame);
    3288         112 : }
    3289             : 
    3290             : }  // namespace
    3291             : 
    3292             : // TODO(jgruber): Instead of explicitly setting up implicit_args_ on the stack
    3293             : // in CallApiCallback, we could use the calling convention to set up the stack
    3294             : // correctly in the first place.
    3295             : //
    3296             : // TODO(jgruber): I suspect that most of CallApiCallback could be implemented
    3297             : // as a C++ trampoline, vastly simplifying the assembly implementation.
    3298             : 
    3299          56 : void Builtins::Generate_CallApiCallback(MacroAssembler* masm) {
    3300             :   // ----------- S t a t e -------------
    3301             :   //  -- rsi                 : kTargetContext
    3302             :   //  -- rdx                 : kApiFunctionAddress
    3303             :   //  -- rcx                 : kArgc
    3304             :   //  --
    3305             :   //  -- rsp[0]              : return address
    3306             :   //  -- rsp[8]              : last argument
    3307             :   //  -- ...
    3308             :   //  -- rsp[argc * 8]       : first argument
    3309             :   //  -- rsp[(argc + 1) * 8] : receiver
    3310             :   //  -- rsp[(argc + 2) * 8] : kHolder
    3311             :   //  -- rsp[(argc + 3) * 8] : kCallData
    3312             :   // -----------------------------------
    3313             : 
    3314          56 :   Register api_function_address = rdx;
    3315             :   Register argc = rcx;
    3316             : 
    3317             :   DCHECK(!AreAliased(api_function_address, argc, kScratchRegister));
    3318             : 
    3319             :   // Stack offsets (without argc).
    3320             :   static constexpr int kReceiverOffset = kSystemPointerSize;
    3321             :   static constexpr int kHolderOffset = kReceiverOffset + kSystemPointerSize;
    3322             :   static constexpr int kCallDataOffset = kHolderOffset + kSystemPointerSize;
    3323             : 
    3324             :   // Extra stack arguments are: the receiver, kHolder, kCallData.
    3325             :   static constexpr int kExtraStackArgumentCount = 3;
    3326             : 
    3327             :   typedef FunctionCallbackArguments FCA;
    3328             : 
    3329             :   STATIC_ASSERT(FCA::kArgsLength == 6);
    3330             :   STATIC_ASSERT(FCA::kNewTargetIndex == 5);
    3331             :   STATIC_ASSERT(FCA::kDataIndex == 4);
    3332             :   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
    3333             :   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
    3334             :   STATIC_ASSERT(FCA::kIsolateIndex == 1);
    3335             :   STATIC_ASSERT(FCA::kHolderIndex == 0);
    3336             : 
    3337             :   // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
    3338             :   //
    3339             :   // Current state:
    3340             :   //   rsp[0]: return address
    3341             :   //
    3342             :   // Target state:
    3343             :   //   rsp[0 * kSystemPointerSize]: return address
    3344             :   //   rsp[1 * kSystemPointerSize]: kHolder
    3345             :   //   rsp[2 * kSystemPointerSize]: kIsolate
    3346             :   //   rsp[3 * kSystemPointerSize]: undefined (kReturnValueDefaultValue)
    3347             :   //   rsp[4 * kSystemPointerSize]: undefined (kReturnValue)
    3348             :   //   rsp[5 * kSystemPointerSize]: kData
    3349             :   //   rsp[6 * kSystemPointerSize]: undefined (kNewTarget)
    3350             : 
    3351             :   // Reserve space on the stack.
    3352          56 :   __ subp(rsp, Immediate(FCA::kArgsLength * kSystemPointerSize));
    3353             : 
    3354             :   // Return address (the old stack location is overwritten later on).
    3355             :   __ movp(kScratchRegister,
    3356         112 :           Operand(rsp, FCA::kArgsLength * kSystemPointerSize));
    3357         112 :   __ movp(Operand(rsp, 0 * kSystemPointerSize), kScratchRegister);
    3358             : 
    3359             :   // kHolder.
    3360             :   __ movp(kScratchRegister,
    3361             :           Operand(rsp, argc, times_pointer_size,
    3362         112 :                   FCA::kArgsLength * kSystemPointerSize + kHolderOffset));
    3363         112 :   __ movp(Operand(rsp, 1 * kSystemPointerSize), kScratchRegister);
    3364             : 
    3365             :   // kIsolate.
    3366             :   __ Move(kScratchRegister,
    3367          56 :           ExternalReference::isolate_address(masm->isolate()));
    3368         112 :   __ movp(Operand(rsp, 2 * kSystemPointerSize), kScratchRegister);
    3369             : 
    3370             :   // kReturnValueDefaultValue, kReturnValue, and kNewTarget.
    3371          56 :   __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
    3372         112 :   __ movp(Operand(rsp, 3 * kSystemPointerSize), kScratchRegister);
    3373         112 :   __ movp(Operand(rsp, 4 * kSystemPointerSize), kScratchRegister);
    3374         112 :   __ movp(Operand(rsp, 6 * kSystemPointerSize), kScratchRegister);
    3375             : 
    3376             :   // kData.
    3377             :   __ movp(kScratchRegister,
    3378             :           Operand(rsp, argc, times_pointer_size,
    3379         112 :                   FCA::kArgsLength * kSystemPointerSize + kCallDataOffset));
    3380         112 :   __ movp(Operand(rsp, 5 * kSystemPointerSize), kScratchRegister);
    3381             : 
    3382             :   // Keep a pointer to kHolder (= implicit_args) in a scratch register.
    3383             :   // We use it below to set up the FunctionCallbackInfo object.
    3384             :   Register scratch = rbx;
    3385         112 :   __ leap(scratch, Operand(rsp, 1 * kSystemPointerSize));
    3386             : 
    3387             :   // Allocate the v8::Arguments structure in the arguments' space since
    3388             :   // it's not controlled by GC.
    3389             :   static constexpr int kApiStackSpace = 4;
    3390          56 :   __ EnterApiExitFrame(kApiStackSpace);
    3391             : 
    3392             :   // FunctionCallbackInfo::implicit_args_ (points at kHolder as set up above).
    3393             :   __ movp(StackSpaceOperand(0), scratch);
    3394             : 
    3395             :   // FunctionCallbackInfo::values_ (points at the first varargs argument passed
    3396             :   // on the stack).
    3397             :   __ leap(scratch, Operand(scratch, argc, times_pointer_size,
    3398         112 :                            (FCA::kArgsLength - 1) * kSystemPointerSize));
    3399             :   __ movp(StackSpaceOperand(1), scratch);
    3400             : 
    3401             :   // FunctionCallbackInfo::length_.
    3402             :   __ movp(StackSpaceOperand(2), argc);
    3403             : 
    3404             :   // We also store the number of bytes to drop from the stack after returning
    3405             :   // from the API function here.
    3406             :   __ leaq(kScratchRegister,
    3407             :           Operand(argc, times_pointer_size,
    3408             :                   (FCA::kArgsLength + kExtraStackArgumentCount) *
    3409         112 :                       kSystemPointerSize));
    3410             :   __ movp(StackSpaceOperand(3), kScratchRegister);
    3411             : 
    3412             :   Register arguments_arg = arg_reg_1;
    3413          56 :   Register callback_arg = arg_reg_2;
    3414             : 
    3415             :   // It's okay if api_function_address == callback_arg
    3416             :   // but not arguments_arg
    3417             :   DCHECK(api_function_address != arguments_arg);
    3418             : 
    3419             :   // v8::InvocationCallback's argument.
    3420             :   __ leap(arguments_arg, StackSpaceOperand(0));
    3421             : 
    3422          56 :   ExternalReference thunk_ref = ExternalReference::invoke_function_callback();
    3423             : 
    3424             :   // There are two stack slots above the arguments we constructed on the stack:
    3425             :   // the stored ebp (pushed by EnterApiExitFrame), and the return address.
    3426             :   static constexpr int kStackSlotsAboveFCA = 2;
    3427             :   Operand return_value_operand(
    3428             :       rbp,
    3429          56 :       (kStackSlotsAboveFCA + FCA::kReturnValueOffset) * kSystemPointerSize);
    3430             : 
    3431             :   static constexpr int kUseStackSpaceOperand = 0;
    3432          56 :   Operand stack_space_operand = StackSpaceOperand(3);
    3433             :   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
    3434             :                            kUseStackSpaceOperand, &stack_space_operand,
    3435          56 :                            return_value_operand);
    3436          56 : }
    3437             : 
    3438          56 : void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
    3439             :   Register name_arg = arg_reg_1;
    3440             :   Register accessor_info_arg = arg_reg_2;
    3441          56 :   Register getter_arg = arg_reg_3;
    3442             :   Register api_function_address = r8;
    3443          56 :   Register receiver = ApiGetterDescriptor::ReceiverRegister();
    3444          56 :   Register holder = ApiGetterDescriptor::HolderRegister();
    3445          56 :   Register callback = ApiGetterDescriptor::CallbackRegister();
    3446             :   Register scratch = rax;
    3447          56 :   Register decompr_scratch1 = COMPRESS_POINTERS_BOOL ? r11 : no_reg;
    3448          56 :   Register decompr_scratch2 = COMPRESS_POINTERS_BOOL ? r12 : no_reg;
    3449             :   Register decompr_scratch_for_debug =
    3450          56 :       COMPRESS_POINTERS_BOOL ? kScratchRegister : no_reg;
    3451             : 
    3452             :   DCHECK(!AreAliased(receiver, holder, callback, scratch, decompr_scratch1,
    3453             :                      decompr_scratch2, decompr_scratch_for_debug));
    3454             : 
    3455             :   // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
    3456             :   // name below the exit frame to make GC aware of them.
    3457             :   STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
    3458             :   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
    3459             :   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
    3460             :   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
    3461             :   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
    3462             :   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
    3463             :   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
    3464             :   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
    3465             : 
    3466             :   // Insert additional parameters into the stack frame above return address.
    3467             :   __ PopReturnAddressTo(scratch);
    3468          56 :   __ Push(receiver);
    3469             :   __ PushTaggedAnyField(FieldOperand(callback, AccessorInfo::kDataOffset),
    3470             :                         decompr_scratch1, decompr_scratch2,
    3471          56 :                         decompr_scratch_for_debug);
    3472          56 :   __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
    3473          56 :   __ Push(kScratchRegister);  // return value
    3474          56 :   __ Push(kScratchRegister);  // return value default
    3475          56 :   __ PushAddress(ExternalReference::isolate_address(masm->isolate()));
    3476          56 :   __ Push(holder);
    3477          56 :   __ Push(Smi::zero());  // should_throw_on_error -> false
    3478             :   __ PushTaggedPointerField(FieldOperand(callback, AccessorInfo::kNameOffset),
    3479          56 :                             decompr_scratch1, decompr_scratch_for_debug);
    3480             :   __ PushReturnAddressFrom(scratch);
    3481             : 
    3482             :   // v8::PropertyCallbackInfo::args_ array and name handle.
    3483             :   const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
    3484             : 
    3485             :   // Allocate v8::PropertyCallbackInfo in non-GCed stack space.
    3486             :   const int kArgStackSpace = 1;
    3487             : 
    3488             :   // Load address of v8::PropertyAccessorInfo::args_ array.
    3489         112 :   __ leap(scratch, Operand(rsp, 2 * kSystemPointerSize));
    3490             : 
    3491          56 :   __ EnterApiExitFrame(kArgStackSpace);
    3492             : 
    3493             :   // Create v8::PropertyCallbackInfo object on the stack and initialize
    3494             :   // it's args_ field.
    3495          56 :   Operand info_object = StackSpaceOperand(0);
    3496             :   __ movp(info_object, scratch);
    3497             : 
    3498         112 :   __ leap(name_arg, Operand(scratch, -kSystemPointerSize));
    3499             :   // The context register (rsi) has been saved in EnterApiExitFrame and
    3500             :   // could be used to pass arguments.
    3501             :   __ leap(accessor_info_arg, info_object);
    3502             : 
    3503             :   ExternalReference thunk_ref =
    3504          56 :       ExternalReference::invoke_accessor_getter_callback();
    3505             : 
    3506             :   // It's okay if api_function_address == getter_arg
    3507             :   // but not accessor_info_arg or name_arg
    3508             :   DCHECK(api_function_address != accessor_info_arg);
    3509             :   DCHECK(api_function_address != name_arg);
    3510             :   __ LoadTaggedPointerField(
    3511             :       scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset),
    3512          56 :       decompr_scratch_for_debug);
    3513             :   __ movp(api_function_address,
    3514             :           FieldOperand(scratch, Foreign::kForeignAddressOffset));
    3515             : 
    3516             :   // +3 is to skip prolog, return address and name handle.
    3517             :   Operand return_value_operand(
    3518             :       rbp,
    3519          56 :       (PropertyCallbackArguments::kReturnValueOffset + 3) * kSystemPointerSize);
    3520             :   Operand* const kUseStackSpaceConstant = nullptr;
    3521             :   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
    3522             :                            kStackUnwindSpace, kUseStackSpaceConstant,
    3523          56 :                            return_value_operand);
    3524          56 : }
    3525             : 
    3526          56 : void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
    3527          56 :   __ int3();  // Unused on this architecture.
    3528          56 : }
    3529             : 
    3530             : #undef __
    3531             : 
    3532             : }  // namespace internal
    3533       94089 : }  // namespace v8
    3534             : 
    3535             : #endif  // V8_TARGET_ARCH_X64

Generated by: LCOV version 1.10