LCOV - code coverage report
Current view: top level - src/builtins/x64 - builtins-x64.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 890 910 97.8 %
Date: 2017-10-20 Functions: 68 68 100.0 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #if V8_TARGET_ARCH_X64
       6             : 
       7             : #include "src/code-factory.h"
       8             : #include "src/counters.h"
       9             : #include "src/deoptimizer.h"
      10             : #include "src/frame-constants.h"
      11             : #include "src/frames.h"
      12             : #include "src/objects-inl.h"
      13             : #include "src/objects/debug-objects.h"
      14             : 
      15             : namespace v8 {
      16             : namespace internal {
      17             : 
      18             : #define __ ACCESS_MASM(masm)
      19             : 
      20        7037 : void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
      21             :                                 ExitFrameType exit_frame_type) {
      22       14074 :   __ LoadAddress(rbx, ExternalReference(address, masm->isolate()));
      23        7037 :   if (exit_frame_type == BUILTIN_EXIT) {
      24             :     __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
      25        6944 :             RelocInfo::CODE_TARGET);
      26             :   } else {
      27             :     DCHECK(exit_frame_type == EXIT);
      28             :     __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
      29          93 :             RelocInfo::CODE_TARGET);
      30             :   }
      31        7037 : }
      32             : 
      33             : namespace {
      34             : 
      35          62 : void AdaptorWithExitFrameType(MacroAssembler* masm,
      36             :                               Builtins::ExitFrameType exit_frame_type) {
      37             :   // ----------- S t a t e -------------
      38             :   //  -- rax                 : number of arguments excluding receiver
      39             :   //  -- rbx                 : entry point
      40             :   //  -- rdi                 : target
      41             :   //  -- rdx                 : new.target
      42             :   //  -- rsp[0]              : return address
      43             :   //  -- rsp[8]              : last argument
      44             :   //  -- ...
      45             :   //  -- rsp[8 * argc]       : first argument
      46             :   //  -- rsp[8 * (argc + 1)] : receiver
      47             :   // -----------------------------------
      48          62 :   __ AssertFunction(rdi);
      49             : 
      50             :   // The logic contained here is mirrored for TurboFan inlining in
      51             :   // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync.
      52             : 
      53             :   // Make sure we operate in the context of the called function (for example
      54             :   // ConstructStubs implemented in C++ will be run in the context of the caller
      55             :   // instead of the callee, due to the way that [[Construct]] is defined for
      56             :   // ordinary functions).
      57          62 :   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
      58             : 
      59             :   // CEntryStub expects rax to contain the number of arguments including the
      60             :   // receiver and the extra arguments.
      61          62 :   __ addp(rax, Immediate(BuiltinExitFrameConstants::kNumExtraArgsWithReceiver));
      62             : 
      63             :   // Unconditionally insert argc, target and new target as extra arguments. They
      64             :   // will be used by stack frame iterators when constructing the stack trace.
      65          62 :   __ PopReturnAddressTo(kScratchRegister);
      66          62 :   __ Integer32ToSmi(rax, rax);
      67          62 :   __ PushRoot(Heap::kTheHoleValueRootIndex);  // Padding.
      68          62 :   __ Push(rax);
      69          62 :   __ SmiToInteger32(rax, rax);
      70          62 :   __ Push(rdi);
      71          62 :   __ Push(rdx);
      72             :   __ PushReturnAddressFrom(kScratchRegister);
      73             : 
      74             :   // Jump to the C entry runtime stub directly here instead of using
      75             :   // JumpToExternalReference because rbx is loaded by Generate_adaptor.
      76             :   CEntryStub ces(masm->isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
      77          62 :                  exit_frame_type == Builtins::BUILTIN_EXIT);
      78          62 :   __ jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
      79          62 : }
      80             : }  // namespace
      81             : 
      82          31 : void Builtins::Generate_AdaptorWithExitFrame(MacroAssembler* masm) {
      83          31 :   AdaptorWithExitFrameType(masm, EXIT);
      84          31 : }
      85             : 
      86          31 : void Builtins::Generate_AdaptorWithBuiltinExitFrame(MacroAssembler* masm) {
      87          31 :   AdaptorWithExitFrameType(masm, BUILTIN_EXIT);
      88          31 : }
      89             : 
      90          31 : static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
      91             :   __ movp(kScratchRegister,
      92          31 :           FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
      93             :   __ movp(kScratchRegister,
      94             :           FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
      95             :   __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
      96          31 :   __ jmp(kScratchRegister);
      97          31 : }
      98             : 
      99         465 : static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
     100             :                                            Runtime::FunctionId function_id) {
     101             :   // ----------- S t a t e -------------
     102             :   //  -- rax : argument count (preserved for callee)
     103             :   //  -- rdx : new target (preserved for callee)
     104             :   //  -- rdi : target function (preserved for callee)
     105             :   // -----------------------------------
     106             :   {
     107         465 :     FrameScope scope(masm, StackFrame::INTERNAL);
     108             :     // Push the number of arguments to the callee.
     109         465 :     __ Integer32ToSmi(rax, rax);
     110         465 :     __ Push(rax);
     111             :     // Push a copy of the target function and the new target.
     112         465 :     __ Push(rdi);
     113         465 :     __ Push(rdx);
     114             :     // Function is also the parameter to the runtime call.
     115         465 :     __ Push(rdi);
     116             : 
     117             :     __ CallRuntime(function_id, 1);
     118         465 :     __ movp(rbx, rax);
     119             : 
     120             :     // Restore target function and new target.
     121         465 :     __ Pop(rdx);
     122         465 :     __ Pop(rdi);
     123         465 :     __ Pop(rax);
     124         465 :     __ SmiToInteger32(rax, rax);
     125             :   }
     126             :   __ leap(rbx, FieldOperand(rbx, Code::kHeaderSize));
     127         465 :   __ jmp(rbx);
     128         465 : }
     129             : 
     130             : namespace {
     131             : 
     132          62 : void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
     133             :   // ----------- S t a t e -------------
     134             :   //  -- rax: number of arguments
     135             :   //  -- rdi: constructor function
     136             :   //  -- rdx: new target
     137             :   //  -- rsi: context
     138             :   // -----------------------------------
     139             : 
     140             :   // Enter a construct frame.
     141             :   {
     142          62 :     FrameScope scope(masm, StackFrame::CONSTRUCT);
     143             : 
     144             :     // Preserve the incoming parameters on the stack.
     145          62 :     __ Integer32ToSmi(rcx, rax);
     146          62 :     __ Push(rsi);
     147          62 :     __ Push(rcx);
     148             : 
     149             :     // The receiver for the builtin/api call.
     150          62 :     __ PushRoot(Heap::kTheHoleValueRootIndex);
     151             : 
     152             :     // Set up pointer to last argument.
     153         124 :     __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
     154             : 
     155             :     // Copy arguments and receiver to the expression stack.
     156             :     Label loop, entry;
     157             :     __ movp(rcx, rax);
     158             :     // ----------- S t a t e -------------
     159             :     //  --                rax: number of arguments (untagged)
     160             :     //  --                rdi: constructor function
     161             :     //  --                rdx: new target
     162             :     //  --                rbx: pointer to last argument
     163             :     //  --                rcx: counter
     164             :     //  -- sp[0*kPointerSize]: the hole (receiver)
     165             :     //  -- sp[1*kPointerSize]: number of arguments (tagged)
     166             :     //  -- sp[2*kPointerSize]: context
     167             :     // -----------------------------------
     168          62 :     __ jmp(&entry);
     169          62 :     __ bind(&loop);
     170          62 :     __ Push(Operand(rbx, rcx, times_pointer_size, 0));
     171          62 :     __ bind(&entry);
     172             :     __ decp(rcx);
     173          62 :     __ j(greater_equal, &loop);
     174             : 
     175             :     // Call the function.
     176             :     // rax: number of arguments (untagged)
     177             :     // rdi: constructor function
     178             :     // rdx: new target
     179             :     ParameterCount actual(rax);
     180          62 :     __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
     181             : 
     182             :     // Restore context from the frame.
     183         124 :     __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
     184             :     // Restore smi-tagged arguments count from the frame.
     185         124 :     __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
     186             : 
     187             :     // Leave construct frame.
     188             :   }
     189             : 
     190             :   // Remove caller arguments from the stack and return.
     191             :   __ PopReturnAddressTo(rcx);
     192          62 :   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
     193         124 :   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
     194             :   __ PushReturnAddressFrom(rcx);
     195             : 
     196          62 :   __ ret(0);
     197          62 : }
     198             : 
     199             : // The construct stub for ES5 constructor functions and ES6 class constructors.
     200          62 : void Generate_JSConstructStubGeneric(MacroAssembler* masm,
     201             :                                      bool restrict_constructor_return) {
     202             :   // ----------- S t a t e -------------
     203             :   //  -- rax: number of arguments (untagged)
     204             :   //  -- rdi: constructor function
     205             :   //  -- rdx: new target
     206             :   //  -- rsi: context
     207             :   //  -- sp[...]: constructor arguments
     208             :   // -----------------------------------
     209             : 
     210             :   // Enter a construct frame.
     211             :   {
     212          62 :     FrameScope scope(masm, StackFrame::CONSTRUCT);
     213             :     Label post_instantiation_deopt_entry, not_create_implicit_receiver;
     214             : 
     215             :     // Preserve the incoming parameters on the stack.
     216          62 :     __ Integer32ToSmi(rcx, rax);
     217         310 :     __ Push(rsi);
     218          62 :     __ Push(rcx);
     219          62 :     __ Push(rdi);
     220          62 :     __ Push(rdx);
     221             : 
     222             :     // ----------- S t a t e -------------
     223             :     //  --         sp[0*kPointerSize]: new target
     224             :     //  -- rdi and sp[1*kPointerSize]: constructor function
     225             :     //  --         sp[2*kPointerSize]: argument count
     226             :     //  --         sp[3*kPointerSize]: context
     227             :     // -----------------------------------
     228             : 
     229          62 :     __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
     230             :     __ testl(FieldOperand(rbx, SharedFunctionInfo::kCompilerHintsOffset),
     231             :              Immediate(SharedFunctionInfo::kDerivedConstructorMask));
     232          62 :     __ j(not_zero, &not_create_implicit_receiver);
     233             : 
     234             :     // If not derived class constructor: Allocate the new receiver object.
     235          62 :     __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
     236             :     __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
     237          62 :             RelocInfo::CODE_TARGET);
     238          62 :     __ jmp(&post_instantiation_deopt_entry, Label::kNear);
     239             : 
     240             :     // Else: use TheHoleValue as receiver for constructor call
     241          62 :     __ bind(&not_create_implicit_receiver);
     242          62 :     __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
     243             : 
     244             :     // ----------- S t a t e -------------
     245             :     //  -- rax                          implicit receiver
     246             :     //  -- Slot 3 / sp[0*kPointerSize]  new target
     247             :     //  -- Slot 2 / sp[1*kPointerSize]  constructor function
     248             :     //  -- Slot 1 / sp[2*kPointerSize]  number of arguments (tagged)
     249             :     //  -- Slot 0 / sp[3*kPointerSize]  context
     250             :     // -----------------------------------
     251             :     // Deoptimizer enters here.
     252             :     masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
     253         186 :         masm->pc_offset());
     254          62 :     __ bind(&post_instantiation_deopt_entry);
     255             : 
     256             :     // Restore new target.
     257          62 :     __ Pop(rdx);
     258             : 
     259             :     // Push the allocated receiver to the stack. We need two copies
     260             :     // because we may have to return the original one and the calling
     261             :     // conventions dictate that the called function pops the receiver.
     262          62 :     __ Push(rax);
     263          62 :     __ Push(rax);
     264             : 
     265             :     // ----------- S t a t e -------------
     266             :     //  -- sp[0*kPointerSize]  implicit receiver
     267             :     //  -- sp[1*kPointerSize]  implicit receiver
     268             :     //  -- sp[2*kPointerSize]  constructor function
     269             :     //  -- sp[3*kPointerSize]  number of arguments (tagged)
     270             :     //  -- sp[4*kPointerSize]  context
     271             :     // -----------------------------------
     272             : 
     273             :     // Restore constructor function and argument count.
     274         124 :     __ movp(rdi, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
     275             :     __ SmiToInteger32(rax,
     276          62 :                       Operand(rbp, ConstructFrameConstants::kLengthOffset));
     277             : 
     278             :     // Set up pointer to last argument.
     279         124 :     __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
     280             : 
     281             :     // Copy arguments and receiver to the expression stack.
     282             :     Label loop, entry;
     283             :     __ movp(rcx, rax);
     284             :     // ----------- S t a t e -------------
     285             :     //  --                        rax: number of arguments (untagged)
     286             :     //  --                        rdx: new target
     287             :     //  --                        rbx: pointer to last argument
     288             :     //  --                        rcx: counter (tagged)
     289             :     //  --         sp[0*kPointerSize]: implicit receiver
     290             :     //  --         sp[1*kPointerSize]: implicit receiver
     291             :     //  -- rdi and sp[2*kPointerSize]: constructor function
     292             :     //  --         sp[3*kPointerSize]: number of arguments (tagged)
     293             :     //  --         sp[4*kPointerSize]: context
     294             :     // -----------------------------------
     295          62 :     __ jmp(&entry, Label::kNear);
     296          62 :     __ bind(&loop);
     297          62 :     __ Push(Operand(rbx, rcx, times_pointer_size, 0));
     298          62 :     __ bind(&entry);
     299             :     __ decp(rcx);
     300          62 :     __ j(greater_equal, &loop);
     301             : 
     302             :     // Call the function.
     303             :     ParameterCount actual(rax);
     304          62 :     __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
     305             : 
     306             :     // ----------- S t a t e -------------
     307             :     //  -- rax                 constructor result
     308             :     //  -- sp[0*kPointerSize]  implicit receiver
     309             :     //  -- sp[1*kPointerSize]  constructor function
     310             :     //  -- sp[2*kPointerSize]  number of arguments
     311             :     //  -- sp[3*kPointerSize]  context
     312             :     // -----------------------------------
     313             : 
     314             :     // Store offset of return address for deoptimizer.
     315             :     masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
     316          62 :         masm->pc_offset());
     317             : 
     318             :     // Restore context from the frame.
     319         124 :     __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
     320             : 
     321             :     // If the result is an object (in the ECMA sense), we should get rid
     322             :     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
     323             :     // on page 74.
     324             :     Label use_receiver, do_throw, other_result, leave_frame;
     325             : 
     326             :     // If the result is undefined, we jump out to using the implicit receiver.
     327             :     __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &use_receiver,
     328             :                   Label::kNear);
     329             : 
     330             :     // Otherwise we do a smi check and fall through to check if the return value
     331             :     // is a valid receiver.
     332             : 
     333             :     // If the result is a smi, it is *not* an object in the ECMA sense.
     334          62 :     __ JumpIfSmi(rax, &other_result, Label::kNear);
     335             : 
     336             :     // If the type of the result (stored in its map) is less than
     337             :     // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
     338             :     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
     339          62 :     __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
     340          62 :     __ j(above_equal, &leave_frame, Label::kNear);
     341             : 
     342             :     // The result is now neither undefined nor an object.
     343          62 :     __ bind(&other_result);
     344         124 :     __ movp(rbx, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
     345             :     __ movp(rbx, FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset));
     346             :     __ testl(FieldOperand(rbx, SharedFunctionInfo::kCompilerHintsOffset),
     347             :              Immediate(SharedFunctionInfo::kClassConstructorMask));
     348             : 
     349          62 :     if (restrict_constructor_return) {
     350             :       // Throw if constructor function is a class constructor
     351          31 :       __ j(Condition::zero, &use_receiver, Label::kNear);
     352             :     } else {
     353          31 :       __ j(not_zero, &use_receiver, Label::kNear);
     354             :       __ CallRuntime(
     355          31 :           Runtime::kIncrementUseCounterConstructorReturnNonUndefinedPrimitive);
     356          31 :       __ jmp(&use_receiver, Label::kNear);
     357             :     }
     358             : 
     359          62 :     __ bind(&do_throw);
     360          62 :     __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
     361             : 
     362             :     // Throw away the result of the constructor invocation and use the
     363             :     // on-stack receiver as the result.
     364          62 :     __ bind(&use_receiver);
     365         124 :     __ movp(rax, Operand(rsp, 0 * kPointerSize));
     366             :     __ JumpIfRoot(rax, Heap::kTheHoleValueRootIndex, &do_throw);
     367             : 
     368          62 :     __ bind(&leave_frame);
     369             :     // Restore the arguments count.
     370         124 :     __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
     371             :     // Leave construct frame.
     372             :   }
     373             :   // Remove caller arguments from the stack and return.
     374             :   __ PopReturnAddressTo(rcx);
     375          62 :   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
     376         124 :   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
     377             :   __ PushReturnAddressFrom(rcx);
     378          62 :   __ ret(0);
     379          62 : }
     380             : }  // namespace
     381             : 
     382          31 : void Builtins::Generate_JSConstructStubGenericRestrictedReturn(
     383             :     MacroAssembler* masm) {
     384          31 :   return Generate_JSConstructStubGeneric(masm, true);
     385             : }
     386          31 : void Builtins::Generate_JSConstructStubGenericUnrestrictedReturn(
     387             :     MacroAssembler* masm) {
     388          31 :   return Generate_JSConstructStubGeneric(masm, false);
     389             : }
     390          31 : void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
     391          31 :   Generate_JSBuiltinsConstructStubHelper(masm);
     392          31 : }
     393          31 : void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
     394          31 :   Generate_JSBuiltinsConstructStubHelper(masm);
     395          31 : }
     396             : 
     397          31 : void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
     398          31 :   FrameScope scope(masm, StackFrame::INTERNAL);
     399          31 :   __ Push(rdi);
     400          31 :   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
     401          31 : }
     402             : 
     403             : // Clobbers rcx, r11, kScratchRegister; preserves all other registers.
     404          62 : static void Generate_CheckStackOverflow(MacroAssembler* masm) {
     405             :   // rax   : the number of items to be pushed to the stack
     406             :   //
     407             :   // Check the stack for overflow. We are not trying to catch
     408             :   // interruptions (e.g. debug break and preemption) here, so the "real stack
     409             :   // limit" is checked.
     410             :   Label okay;
     411          62 :   __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
     412          62 :   __ movp(rcx, rsp);
     413             :   // Make rcx the space we have left. The stack might already be overflowed
     414             :   // here which will cause rcx to become negative.
     415          62 :   __ subp(rcx, kScratchRegister);
     416             :   // Make r11 the space we need for the array when it is unrolled onto the
     417             :   // stack.
     418             :   __ movp(r11, rax);
     419             :   __ shlq(r11, Immediate(kPointerSizeLog2));
     420             :   // Check if the arguments will overflow the stack.
     421          62 :   __ cmpp(rcx, r11);
     422          62 :   __ j(greater, &okay);  // Signed comparison.
     423             : 
     424             :   // Out of stack space.
     425          62 :   __ CallRuntime(Runtime::kThrowStackOverflow);
     426             : 
     427          62 :   __ bind(&okay);
     428          62 : }
     429             : 
     430          62 : static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
     431             :                                              bool is_construct) {
     432          62 :   ProfileEntryHookStub::MaybeCallEntryHook(masm);
     433             : 
     434             :   // Expects five C++ function parameters.
     435             :   // - Object* new_target
     436             :   // - JSFunction* function
     437             :   // - Object* receiver
     438             :   // - int argc
     439             :   // - Object*** argv
     440             :   // (see Handle::Invoke in execution.cc).
     441             : 
     442             :   // Open a C++ scope for the FrameScope.
     443             :   {
     444             : // Platform specific argument handling. After this, the stack contains
     445             : // an internal frame and the pushed function and receiver, and
     446             : // register rax and rbx holds the argument count and argument array,
     447             : // while rdi holds the function pointer, rsi the context, and rdx the
     448             : // new.target.
     449             : 
     450             : #ifdef _WIN64
     451             :     // MSVC parameters in:
     452             :     // rcx        : new_target
     453             :     // rdx        : function
     454             :     // r8         : receiver
     455             :     // r9         : argc
     456             :     // [rsp+0x20] : argv
     457             : 
     458             :     // Enter an internal frame.
     459             :     FrameScope scope(masm, StackFrame::INTERNAL);
     460             : 
     461             :     // Setup the context (we need to use the caller context from the isolate).
     462             :     ExternalReference context_address(IsolateAddressId::kContextAddress,
     463             :                                       masm->isolate());
     464             :     __ movp(rsi, masm->ExternalOperand(context_address));
     465             : 
     466             :     // Push the function and the receiver onto the stack.
     467             :     __ Push(rdx);
     468             :     __ Push(r8);
     469             : 
     470             :     // Load the number of arguments and setup pointer to the arguments.
     471             :     __ movp(rax, r9);
     472             :     // Load the previous frame pointer to access C argument on stack
     473             :     __ movp(kScratchRegister, Operand(rbp, 0));
     474             :     __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
     475             :     // Load the function pointer into rdi.
     476             :     __ movp(rdi, rdx);
     477             :     // Load the new.target into rdx.
     478             :     __ movp(rdx, rcx);
     479             : #else   // _WIN64
     480             :     // GCC parameters in:
     481             :     // rdi : new_target
     482             :     // rsi : function
     483             :     // rdx : receiver
     484             :     // rcx : argc
     485             :     // r8  : argv
     486             : 
     487          62 :     __ movp(r11, rdi);
     488             :     __ movp(rdi, rsi);
     489             :     // rdi : function
     490             :     // r11 : new_target
     491             : 
     492             :     // Clear the context before we push it when entering the internal frame.
     493         186 :     __ Set(rsi, 0);
     494             : 
     495             :     // Enter an internal frame.
     496          62 :     FrameScope scope(masm, StackFrame::INTERNAL);
     497             : 
     498             :     // Setup the context (we need to use the caller context from the isolate).
     499             :     ExternalReference context_address(IsolateAddressId::kContextAddress,
     500          62 :                                       masm->isolate());
     501          62 :     __ movp(rsi, masm->ExternalOperand(context_address));
     502             : 
     503             :     // Push the function and receiver onto the stack.
     504          62 :     __ Push(rdi);
     505          62 :     __ Push(rdx);
     506             : 
     507             :     // Load the number of arguments and setup pointer to the arguments.
     508             :     __ movp(rax, rcx);
     509             :     __ movp(rbx, r8);
     510             : 
     511             :     // Load the new.target into rdx.
     512             :     __ movp(rdx, r11);
     513             : #endif  // _WIN64
     514             : 
     515             :     // Current stack contents:
     516             :     // [rsp + 2 * kPointerSize ... ] : Internal frame
     517             :     // [rsp + kPointerSize]          : function
     518             :     // [rsp]                         : receiver
     519             :     // Current register contents:
     520             :     // rax : argc
     521             :     // rbx : argv
     522             :     // rsi : context
     523             :     // rdi : function
     524             :     // rdx : new.target
     525             : 
     526             :     // Check if we have enough stack space to push all arguments.
     527             :     // Expects argument count in rax. Clobbers rcx, r11.
     528          62 :     Generate_CheckStackOverflow(masm);
     529             : 
     530             :     // Copy arguments to the stack in a loop.
     531             :     // Register rbx points to array of pointers to handle locations.
     532             :     // Push the values of these handles.
     533             :     Label loop, entry;
     534          62 :     __ Set(rcx, 0);  // Set loop variable to 0.
     535          62 :     __ jmp(&entry, Label::kNear);
     536          62 :     __ bind(&loop);
     537         124 :     __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
     538          62 :     __ Push(Operand(kScratchRegister, 0));  // dereference handle
     539          62 :     __ addp(rcx, Immediate(1));
     540          62 :     __ bind(&entry);
     541          62 :     __ cmpp(rcx, rax);
     542          62 :     __ j(not_equal, &loop);
     543             : 
     544             :     // Invoke the builtin code.
     545             :     Handle<Code> builtin = is_construct
     546          31 :                                ? BUILTIN_CODE(masm->isolate(), Construct)
     547         124 :                                : masm->isolate()->builtins()->Call();
     548          62 :     __ Call(builtin, RelocInfo::CODE_TARGET);
     549             : 
     550             :     // Exit the internal frame. Notice that this also removes the empty
     551             :     // context and the function left on the stack by the code
     552             :     // invocation.
     553             :   }
     554             : 
     555          62 :   __ ret(0);
     556          62 : }
     557             : 
     558          31 : void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
     559          31 :   Generate_JSEntryTrampolineHelper(masm, false);
     560          31 : }
     561             : 
     562          31 : void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
     563          31 :   Generate_JSEntryTrampolineHelper(masm, true);
     564          31 : }
     565             : 
     566             : // static
     567          31 : void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
     568             :   // ----------- S t a t e -------------
     569             :   //  -- rax    : the value to pass to the generator
     570             :   //  -- rbx    : the JSGeneratorObject to resume
     571             :   //  -- rdx    : the resume mode (tagged)
     572             :   //  -- rsp[0] : return address
     573             :   // -----------------------------------
     574          31 :   __ AssertGeneratorObject(rbx);
     575             : 
     576             :   // Store input value into generator object.
     577          31 :   __ movp(FieldOperand(rbx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
     578             :   __ RecordWriteField(rbx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx,
     579          31 :                       kDontSaveFPRegs);
     580             : 
     581             :   // Store resume mode into generator object.
     582             :   __ movp(FieldOperand(rbx, JSGeneratorObject::kResumeModeOffset), rdx);
     583             : 
     584             :   // Load suspended function and context.
     585             :   __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
     586             :   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
     587             : 
     588             :   // Flood function if we are stepping.
     589             :   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
     590             :   Label stepping_prepared;
     591             :   ExternalReference debug_hook =
     592          62 :       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
     593          31 :   Operand debug_hook_operand = masm->ExternalOperand(debug_hook);
     594             :   __ cmpb(debug_hook_operand, Immediate(0));
     595          31 :   __ j(not_equal, &prepare_step_in_if_stepping);
     596             : 
     597             :   // Flood function if we need to continue stepping in the suspended generator.
     598             :   ExternalReference debug_suspended_generator =
     599          31 :       ExternalReference::debug_suspended_generator_address(masm->isolate());
     600             :   Operand debug_suspended_generator_operand =
     601          31 :       masm->ExternalOperand(debug_suspended_generator);
     602             :   __ cmpp(rbx, debug_suspended_generator_operand);
     603          31 :   __ j(equal, &prepare_step_in_suspended_generator);
     604          31 :   __ bind(&stepping_prepared);
     605             : 
     606             :   // Pop return address.
     607             :   __ PopReturnAddressTo(rax);
     608             : 
     609             :   // Push receiver.
     610          31 :   __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
     611             : 
     612             :   // ----------- S t a t e -------------
     613             :   //  -- rax    : return address
     614             :   //  -- rbx    : the JSGeneratorObject to resume
     615             :   //  -- rdx    : the resume mode (tagged)
     616             :   //  -- rdi    : generator function
     617             :   //  -- rsi    : generator context
     618             :   //  -- rsp[0] : generator receiver
     619             :   // -----------------------------------
     620             : 
     621             :   // Push holes for arguments to generator function. Since the parser forced
     622             :   // context allocation for any variables in generators, the actual argument
     623             :   // values have already been copied into the context and these dummy values
     624             :   // will never be used.
     625             :   __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
     626             :   __ movl(rcx,
     627             :           FieldOperand(rcx, SharedFunctionInfo::kFormalParameterCountOffset));
     628             :   {
     629             :     Label done_loop, loop;
     630          31 :     __ bind(&loop);
     631          31 :     __ subl(rcx, Immediate(1));
     632          31 :     __ j(carry, &done_loop, Label::kNear);
     633          31 :     __ PushRoot(Heap::kTheHoleValueRootIndex);
     634          31 :     __ jmp(&loop);
     635          31 :     __ bind(&done_loop);
     636             :   }
     637             : 
     638             :   // Underlying function needs to have bytecode available.
     639          31 :   if (FLAG_debug_code) {
     640             :     __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
     641             :     __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset));
     642           0 :     __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
     643           0 :     __ Assert(equal, kMissingBytecodeArray);
     644             :   }
     645             : 
     646             :   // Resume (Ignition/TurboFan) generator object.
     647             :   {
     648             :     __ PushReturnAddressFrom(rax);
     649             :     __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
     650             :     __ movsxlq(rax, FieldOperand(
     651          31 :                         rax, SharedFunctionInfo::kFormalParameterCountOffset));
     652             :     // We abuse new.target both to indicate that this is a resume call and to
     653             :     // pass in the generator object.  In ordinary calls, new.target is always
     654             :     // undefined because generator functions are non-constructable.
     655             :     __ movp(rdx, rbx);
     656             :     __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
     657          31 :     __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
     658          31 :     __ jmp(rcx);
     659             :   }
     660             : 
     661          31 :   __ bind(&prepare_step_in_if_stepping);
     662             :   {
     663          31 :     FrameScope scope(masm, StackFrame::INTERNAL);
     664          31 :     __ Push(rbx);
     665          31 :     __ Push(rdx);
     666          31 :     __ Push(rdi);
     667          31 :     __ CallRuntime(Runtime::kDebugOnFunctionCall);
     668          31 :     __ Pop(rdx);
     669          31 :     __ Pop(rbx);
     670          31 :     __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
     671             :   }
     672          31 :   __ jmp(&stepping_prepared);
     673             : 
     674          31 :   __ bind(&prepare_step_in_suspended_generator);
     675             :   {
     676          31 :     FrameScope scope(masm, StackFrame::INTERNAL);
     677          31 :     __ Push(rbx);
     678          31 :     __ Push(rdx);
     679          31 :     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
     680          31 :     __ Pop(rdx);
     681          31 :     __ Pop(rbx);
     682          31 :     __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
     683             :   }
     684          31 :   __ jmp(&stepping_prepared);
     685          31 : }
     686             : 
     687             : // TODO(juliana): if we remove the code below then we don't need all
     688             : // the parameters.
     689         124 : static void ReplaceClosureCodeWithOptimizedCode(
     690             :     MacroAssembler* masm, Register optimized_code, Register closure,
     691             :     Register scratch1, Register scratch2, Register scratch3) {
     692             : 
     693             :   // Store the optimized code in the closure.
     694         124 :   __ movp(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
     695             :   __ movp(scratch1, optimized_code);  // Write barrier clobbers scratch1 below.
     696             :   __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
     697         124 :                       kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
     698         124 : }
     699             : 
     700          31 : static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
     701             :                                   Register scratch2) {
     702          31 :   Register args_count = scratch1;
     703          31 :   Register return_pc = scratch2;
     704             : 
     705             :   // Get the arguments + receiver count.
     706             :   __ movp(args_count,
     707          62 :           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
     708             :   __ movl(args_count,
     709             :           FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
     710             : 
     711             :   // Leave the frame (also dropping the register file).
     712          31 :   __ leave();
     713             : 
     714             :   // Drop receiver + arguments.
     715             :   __ PopReturnAddressTo(return_pc);
     716          31 :   __ addp(rsp, args_count);
     717             :   __ PushReturnAddressFrom(return_pc);
     718          31 : }
     719             : 
     720             : // Tail-call |function_id| if |smi_entry| == |marker|
     721         248 : static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
     722             :                                           Register smi_entry,
     723             :                                           OptimizationMarker marker,
     724             :                                           Runtime::FunctionId function_id) {
     725             :   Label no_match;
     726         248 :   __ SmiCompare(smi_entry, Smi::FromEnum(marker));
     727         248 :   __ j(not_equal, &no_match, Label::kNear);
     728         248 :   GenerateTailCallToReturnedCode(masm, function_id);
     729         248 :   __ bind(&no_match);
     730         248 : }
     731             : 
     732         124 : static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
     733             :                                            Register feedback_vector,
     734             :                                            Register scratch1, Register scratch2,
     735             :                                            Register scratch3) {
     736             :   // ----------- S t a t e -------------
     737             :   //  -- rax : argument count (preserved for callee if needed, and caller)
     738             :   //  -- rdx : new target (preserved for callee if needed, and caller)
     739             :   //  -- rdi : target function (preserved for callee if needed, and caller)
     740             :   //  -- feedback vector (preserved for caller if needed)
     741             :   // -----------------------------------
     742             :   DCHECK(!AreAliased(feedback_vector, rax, rdx, rdi, scratch1, scratch2,
     743             :                      scratch3));
     744             : 
     745             :   Label optimized_code_slot_is_cell, fallthrough;
     746             : 
     747         124 :   Register closure = rdi;
     748         124 :   Register optimized_code_entry = scratch1;
     749             : 
     750             :   __ movp(optimized_code_entry,
     751         124 :           FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
     752             : 
     753             :   // Check if the code entry is a Smi. If yes, we interpret it as an
     754             :   // optimisation marker. Otherwise, interpret is as a weak cell to a code
     755             :   // object.
     756         124 :   __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_cell);
     757             : 
     758             :   {
     759             :     // Optimized code slot is a Smi optimization marker.
     760             : 
     761             :     // Fall through if no optimization trigger.
     762             :     __ SmiCompare(optimized_code_entry,
     763         124 :                   Smi::FromEnum(OptimizationMarker::kNone));
     764         124 :     __ j(equal, &fallthrough);
     765             : 
     766             :     TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
     767             :                                   OptimizationMarker::kCompileOptimized,
     768         124 :                                   Runtime::kCompileOptimized_NotConcurrent);
     769             :     TailCallRuntimeIfMarkerEquals(
     770             :         masm, optimized_code_entry,
     771             :         OptimizationMarker::kCompileOptimizedConcurrent,
     772         124 :         Runtime::kCompileOptimized_Concurrent);
     773             : 
     774             :     {
     775             :       // Otherwise, the marker is InOptimizationQueue, so fall through hoping
     776             :       // that an interrupt will eventually update the slot with optimized code.
     777         124 :       if (FLAG_debug_code) {
     778             :         __ SmiCompare(optimized_code_entry,
     779           0 :                       Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
     780           0 :         __ Assert(equal, kExpectedOptimizationSentinel);
     781             :       }
     782         124 :       __ jmp(&fallthrough);
     783             :     }
     784             :   }
     785             : 
     786             :   {
     787             :     // Optimized code slot is a WeakCell.
     788         124 :     __ bind(&optimized_code_slot_is_cell);
     789             : 
     790             :     __ movp(optimized_code_entry,
     791             :             FieldOperand(optimized_code_entry, WeakCell::kValueOffset));
     792         124 :     __ JumpIfSmi(optimized_code_entry, &fallthrough);
     793             : 
     794             :     // Check if the optimized code is marked for deopt. If it is, call the
     795             :     // runtime to clear it.
     796             :     Label found_deoptimized_code;
     797             :     __ testl(
     798             :         FieldOperand(optimized_code_entry, Code::kKindSpecificFlags1Offset),
     799             :         Immediate(1 << Code::kMarkedForDeoptimizationBit));
     800         124 :     __ j(not_zero, &found_deoptimized_code);
     801             : 
     802             :     // Optimized code is good, get it into the closure and link the closure into
     803             :     // the optimized functions list, then tail call the optimized code.
     804             :     // The feedback vector is no longer used, so re-use it as a scratch
     805             :     // register.
     806             :     ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
     807         124 :                                         scratch2, scratch3, feedback_vector);
     808             :     __ addp(optimized_code_entry,
     809         124 :             Immediate(Code::kHeaderSize - kHeapObjectTag));
     810         124 :     __ jmp(optimized_code_entry);
     811             : 
     812             :     // Optimized code slot contains deoptimized code, evict it and re-enter the
     813             :     // closure's code.
     814         124 :     __ bind(&found_deoptimized_code);
     815         124 :     GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
     816             :   }
     817             : 
     818             :   // Fall-through if the optimized code cell is clear and there is no
     819             :   // optimization marker.
     820         124 :   __ bind(&fallthrough);
     821         124 : }
     822             : 
     823             : // Advance the current bytecode offset. This simulates what all bytecode
     824             : // handlers do upon completion of the underlying operation.
     825          62 : static void AdvanceBytecodeOffset(MacroAssembler* masm, Register bytecode_array,
     826             :                                   Register bytecode_offset, Register bytecode,
     827             :                                   Register scratch1) {
     828          62 :   Register bytecode_size_table = scratch1;
     829             :   DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
     830             :                      bytecode));
     831             : 
     832             :   __ Move(bytecode_size_table,
     833          62 :           ExternalReference::bytecode_size_table_address(masm->isolate()));
     834             : 
     835             :   // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
     836             :   Label load_size, extra_wide;
     837             :   STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
     838             :   STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
     839          62 :   __ cmpb(bytecode, Immediate(0x1));
     840          62 :   __ j(above, &load_size, Label::kNear);
     841          62 :   __ j(equal, &extra_wide, Label::kNear);
     842             : 
     843             :   // Load the next bytecode and update table to the wide scaled table.
     844             :   __ incl(bytecode_offset);
     845         124 :   __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
     846             :   __ addp(bytecode_size_table,
     847          62 :           Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
     848          62 :   __ jmp(&load_size, Label::kNear);
     849             : 
     850          62 :   __ bind(&extra_wide);
     851             :   // Load the next bytecode and update table to the extra wide scaled table.
     852             :   __ incl(bytecode_offset);
     853         124 :   __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
     854             :   __ addp(bytecode_size_table,
     855          62 :           Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
     856          62 :   __ jmp(&load_size, Label::kNear);
     857             : 
     858             :   // Load the size of the current bytecode.
     859          62 :   __ bind(&load_size);
     860         124 :   __ addl(bytecode_offset, Operand(bytecode_size_table, bytecode, times_4, 0));
     861          62 : }
     862             : 
     863             : // Generate code for entering a JS function with the interpreter.
     864             : // On entry to the function the receiver and arguments have been pushed on the
     865             : // stack left to right.  The actual argument count matches the formal parameter
     866             : // count expected by the function.
     867             : //
     868             : // The live registers are:
     869             : //   o rdi: the JS function object being called
     870             : //   o rdx: the incoming new target or generator object
     871             : //   o rsi: our context
     872             : //   o rbp: the caller's frame pointer
     873             : //   o rsp: stack pointer (pointing to return address)
     874             : //
     875             : // The function builds an interpreter frame.  See InterpreterFrameConstants in
     876             : // frames.h for its layout.
     877          31 : void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
     878          31 :   ProfileEntryHookStub::MaybeCallEntryHook(masm);
     879             : 
     880             :   Register closure = rdi;
     881             :   Register feedback_vector = rbx;
     882             : 
     883             :   // Load the feedback vector from the closure.
     884             :   __ movp(feedback_vector,
     885          31 :           FieldOperand(closure, JSFunction::kFeedbackVectorOffset));
     886             :   __ movp(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
     887             :   // Read off the optimized code slot in the feedback vector, and if there
     888             :   // is optimized code or an optimization marker, call that instead.
     889          31 :   MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r14, r15);
     890             : 
     891             :   // Open a frame scope to indicate that there is a frame on the stack.  The
     892             :   // MANUAL indicates that the scope shouldn't actually generate code to set up
     893             :   // the frame (that is done below).
     894          31 :   FrameScope frame_scope(masm, StackFrame::MANUAL);
     895          31 :   __ pushq(rbp);  // Caller's frame pointer.
     896             :   __ movp(rbp, rsp);
     897          93 :   __ Push(rsi);  // Callee's context.
     898          31 :   __ Push(rdi);  // Callee's JS function.
     899             : 
     900             :   // Get the bytecode array from the function object (or from the DebugInfo if
     901             :   // it is present) and load it into kInterpreterBytecodeArrayRegister.
     902             :   Label maybe_load_debug_bytecode_array, bytecode_array_loaded;
     903             :   __ movp(rax, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
     904             :   __ movp(kInterpreterBytecodeArrayRegister,
     905             :           FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
     906             :   __ JumpIfNotSmi(FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset),
     907          31 :                   &maybe_load_debug_bytecode_array);
     908          31 :   __ bind(&bytecode_array_loaded);
     909             : 
     910             :   // Increment invocation count for the function.
     911             :   __ incl(
     912             :       FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
     913             : 
     914             :   // Check function data field is actually a BytecodeArray object.
     915          31 :   if (FLAG_debug_code) {
     916           0 :     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
     917             :     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
     918           0 :                      rax);
     919           0 :     __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
     920             :   }
     921             : 
     922             :   // Reset code age.
     923             :   __ movb(FieldOperand(kInterpreterBytecodeArrayRegister,
     924             :                        BytecodeArray::kBytecodeAgeOffset),
     925          31 :           Immediate(BytecodeArray::kNoAgeBytecodeAge));
     926             : 
     927             :   // Load initial bytecode offset.
     928             :   __ movp(kInterpreterBytecodeOffsetRegister,
     929             :           Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
     930             : 
     931             :   // Push bytecode array and Smi tagged bytecode offset.
     932          31 :   __ Push(kInterpreterBytecodeArrayRegister);
     933          31 :   __ Integer32ToSmi(rcx, kInterpreterBytecodeOffsetRegister);
     934          31 :   __ Push(rcx);
     935             : 
     936             :   // Allocate the local and temporary register file on the stack.
     937             :   {
     938             :     // Load frame size from the BytecodeArray object.
     939             :     __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
     940             :                               BytecodeArray::kFrameSizeOffset));
     941             : 
     942             :     // Do a stack check to ensure we don't go over the limit.
     943             :     Label ok;
     944             :     __ movp(rax, rsp);
     945          31 :     __ subp(rax, rcx);
     946          31 :     __ CompareRoot(rax, Heap::kRealStackLimitRootIndex);
     947          31 :     __ j(above_equal, &ok, Label::kNear);
     948          31 :     __ CallRuntime(Runtime::kThrowStackOverflow);
     949          31 :     __ bind(&ok);
     950             : 
     951             :     // If ok, push undefined as the initial value for all register file entries.
     952             :     Label loop_header;
     953             :     Label loop_check;
     954          31 :     __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
     955          31 :     __ j(always, &loop_check);
     956          31 :     __ bind(&loop_header);
     957             :     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
     958          31 :     __ Push(rax);
     959             :     // Continue loop if not done.
     960          31 :     __ bind(&loop_check);
     961          31 :     __ subp(rcx, Immediate(kPointerSize));
     962          31 :     __ j(greater_equal, &loop_header, Label::kNear);
     963             :   }
     964             : 
     965             :   // If the bytecode array has a valid incoming new target or generator object
     966             :   // register, initialize it with incoming value which was passed in rdx.
     967             :   Label no_incoming_new_target_or_generator_register;
     968             :   __ movsxlq(
     969             :       rax,
     970             :       FieldOperand(kInterpreterBytecodeArrayRegister,
     971          31 :                    BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
     972             :   __ testl(rax, rax);
     973          31 :   __ j(zero, &no_incoming_new_target_or_generator_register, Label::kNear);
     974          62 :   __ movp(Operand(rbp, rax, times_pointer_size, 0), rdx);
     975          31 :   __ bind(&no_incoming_new_target_or_generator_register);
     976             : 
     977             :   // Load accumulator with undefined.
     978          31 :   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
     979             : 
     980             :   // Load the dispatch table into a register and dispatch to the bytecode
     981             :   // handler at the current bytecode offset.
     982             :   Label do_dispatch;
     983          31 :   __ bind(&do_dispatch);
     984             :   __ Move(
     985             :       kInterpreterDispatchTableRegister,
     986          31 :       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
     987             :   __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
     988          62 :                           kInterpreterBytecodeOffsetRegister, times_1, 0));
     989             :   __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
     990          62 :                        times_pointer_size, 0));
     991          31 :   __ call(rbx);
     992          62 :   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
     993             : 
     994             :   // Any returns to the entry trampoline are either due to the return bytecode
     995             :   // or the interpreter tail calling a builtin and then a dispatch.
     996             : 
     997             :   // Get bytecode array and bytecode offset from the stack frame.
     998             :   __ movp(kInterpreterBytecodeArrayRegister,
     999          62 :           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    1000             :   __ movp(kInterpreterBytecodeOffsetRegister,
    1001          62 :           Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
    1002             :   __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
    1003          31 :                     kInterpreterBytecodeOffsetRegister);
    1004             : 
    1005             :   // Check if we should return.
    1006             :   Label do_return;
    1007             :   __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
    1008          62 :                           kInterpreterBytecodeOffsetRegister, times_1, 0));
    1009             :   __ cmpb(rbx, Immediate(static_cast<int>(interpreter::Bytecode::kReturn)));
    1010          31 :   __ j(equal, &do_return, Label::kNear);
    1011             : 
    1012             :   // Advance to the next bytecode and dispatch.
    1013             :   AdvanceBytecodeOffset(masm, kInterpreterBytecodeArrayRegister,
    1014          31 :                         kInterpreterBytecodeOffsetRegister, rbx, rcx);
    1015          31 :   __ jmp(&do_dispatch);
    1016             : 
    1017          31 :   __ bind(&do_return);
    1018             :   // The return value is in rax.
    1019          31 :   LeaveInterpreterFrame(masm, rbx, rcx);
    1020          31 :   __ ret(0);
    1021             : 
    1022             :   // Load debug copy of the bytecode array if it exists.
    1023             :   // kInterpreterBytecodeArrayRegister is already loaded with
    1024             :   // SharedFunctionInfo::kFunctionDataOffset.
    1025          31 :   __ bind(&maybe_load_debug_bytecode_array);
    1026             :   __ movp(rcx, FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset));
    1027             :   __ SmiToInteger32(kScratchRegister,
    1028          31 :                     FieldOperand(rcx, DebugInfo::kFlagsOffset));
    1029             :   __ testl(kScratchRegister, Immediate(DebugInfo::kHasBreakInfo));
    1030          31 :   __ j(zero, &bytecode_array_loaded);
    1031             :   __ movp(kInterpreterBytecodeArrayRegister,
    1032             :           FieldOperand(rcx, DebugInfo::kDebugBytecodeArrayOffset));
    1033          31 :   __ jmp(&bytecode_array_loaded);
    1034          31 : }
    1035             : 
    1036         434 : static void Generate_StackOverflowCheck(
    1037             :     MacroAssembler* masm, Register num_args, Register scratch,
    1038             :     Label* stack_overflow,
    1039             :     Label::Distance stack_overflow_distance = Label::kFar) {
    1040             :   // Check the stack for overflow. We are not trying to catch
    1041             :   // interruptions (e.g. debug break and preemption) here, so the "real stack
    1042             :   // limit" is checked.
    1043         434 :   __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
    1044         434 :   __ movp(scratch, rsp);
    1045             :   // Make scratch the space we have left. The stack might already be overflowed
    1046             :   // here which will cause scratch to become negative.
    1047         434 :   __ subp(scratch, kScratchRegister);
    1048             :   __ sarp(scratch, Immediate(kPointerSizeLog2));
    1049             :   // Check if the arguments will overflow the stack.
    1050         434 :   __ cmpp(scratch, num_args);
    1051             :   // Signed comparison.
    1052         434 :   __ j(less_equal, stack_overflow, stack_overflow_distance);
    1053         434 : }
    1054             : 
    1055         248 : static void Generate_InterpreterPushArgs(MacroAssembler* masm,
    1056             :                                          Register num_args,
    1057             :                                          Register start_address,
    1058             :                                          Register scratch) {
    1059             :   // Find the address of the last argument.
    1060         248 :   __ Move(scratch, num_args);
    1061         248 :   __ shlp(scratch, Immediate(kPointerSizeLog2));
    1062             :   __ negp(scratch);
    1063         248 :   __ addp(scratch, start_address);
    1064             : 
    1065             :   // Push the arguments.
    1066             :   Label loop_header, loop_check;
    1067         248 :   __ j(always, &loop_check);
    1068         248 :   __ bind(&loop_header);
    1069         248 :   __ Push(Operand(start_address, 0));
    1070         248 :   __ subp(start_address, Immediate(kPointerSize));
    1071         248 :   __ bind(&loop_check);
    1072         248 :   __ cmpp(start_address, scratch);
    1073         248 :   __ j(greater, &loop_header, Label::kNear);
    1074         248 : }
    1075             : 
    1076             : // static
    1077         155 : void Builtins::Generate_InterpreterPushArgsThenCallImpl(
    1078             :     MacroAssembler* masm, ConvertReceiverMode receiver_mode,
    1079             :     InterpreterPushArgsMode mode) {
    1080             :   // ----------- S t a t e -------------
    1081             :   //  -- rax : the number of arguments (not including the receiver)
    1082             :   //  -- rbx : the address of the first argument to be pushed. Subsequent
    1083             :   //           arguments should be consecutive above this, in the same order as
    1084             :   //           they are to be pushed onto the stack.
    1085             :   //  -- rdi : the target to call (can be any Object).
    1086             :   // -----------------------------------
    1087             :   Label stack_overflow;
    1088             : 
    1089             :   // Number of values to be pushed.
    1090         310 :   __ Move(rcx, rax);
    1091         155 :   __ addp(rcx, Immediate(1));  // Add one for receiver.
    1092             : 
    1093             :   // Add a stack check before pushing arguments.
    1094         155 :   Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow);
    1095             : 
    1096             :   // Pop return address to allow tail-call after pushing arguments.
    1097             :   __ PopReturnAddressTo(kScratchRegister);
    1098             : 
    1099             :   // Push "undefined" as the receiver arg if we need to.
    1100         155 :   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
    1101          62 :     __ PushRoot(Heap::kUndefinedValueRootIndex);
    1102          62 :     __ subp(rcx, Immediate(1));  // Subtract one for receiver.
    1103             :   }
    1104             : 
    1105             :   // rbx and rdx will be modified.
    1106         155 :   Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
    1107             : 
    1108         155 :   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    1109          31 :     __ Pop(rbx);                 // Pass the spread in a register
    1110          31 :     __ subp(rax, Immediate(1));  // Subtract one for spread
    1111             :   }
    1112             : 
    1113             :   // Call the target.
    1114             :   __ PushReturnAddressFrom(kScratchRegister);  // Re-push return address.
    1115             : 
    1116         155 :   if (mode == InterpreterPushArgsMode::kJSFunction) {
    1117             :     __ Jump(masm->isolate()->builtins()->CallFunction(receiver_mode),
    1118          62 :             RelocInfo::CODE_TARGET);
    1119          93 :   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    1120             :     __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
    1121          31 :             RelocInfo::CODE_TARGET);
    1122             :   } else {
    1123             :     __ Jump(masm->isolate()->builtins()->Call(receiver_mode),
    1124          62 :             RelocInfo::CODE_TARGET);
    1125             :   }
    1126             : 
    1127             :   // Throw stack overflow exception.
    1128         155 :   __ bind(&stack_overflow);
    1129             :   {
    1130         155 :     __ TailCallRuntime(Runtime::kThrowStackOverflow);
    1131             :     // This should be unreachable.
    1132         155 :     __ int3();
    1133             :   }
    1134         155 : }
    1135             : 
    1136             : // static
    1137          93 : void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
    1138             :     MacroAssembler* masm, InterpreterPushArgsMode mode) {
    1139             :   // ----------- S t a t e -------------
    1140             :   //  -- rax : the number of arguments (not including the receiver)
    1141             :   //  -- rdx : the new target (either the same as the constructor or
    1142             :   //           the JSFunction on which new was invoked initially)
    1143             :   //  -- rdi : the constructor to call (can be any Object)
    1144             :   //  -- rbx : the allocation site feedback if available, undefined otherwise
    1145             :   //  -- rcx : the address of the first argument to be pushed. Subsequent
    1146             :   //           arguments should be consecutive above this, in the same order as
    1147             :   //           they are to be pushed onto the stack.
    1148             :   // -----------------------------------
    1149             :   Label stack_overflow;
    1150             : 
    1151             :   // Add a stack check before pushing arguments.
    1152          93 :   Generate_StackOverflowCheck(masm, rax, r8, &stack_overflow);
    1153             : 
    1154             :   // Pop return address to allow tail-call after pushing arguments.
    1155          62 :   __ PopReturnAddressTo(kScratchRegister);
    1156             : 
    1157             :   // Push slot for the receiver to be constructed.
    1158          93 :   __ Push(Immediate(0));
    1159             : 
    1160             :   // rcx and r8 will be modified.
    1161          93 :   Generate_InterpreterPushArgs(masm, rax, rcx, r8);
    1162             : 
    1163          93 :   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    1164          31 :     __ Pop(rbx);                 // Pass the spread in a register
    1165          31 :     __ subp(rax, Immediate(1));  // Subtract one for spread
    1166             : 
    1167             :     // Push return address in preparation for the tail-call.
    1168             :     __ PushReturnAddressFrom(kScratchRegister);
    1169             :   } else {
    1170             :     __ PushReturnAddressFrom(kScratchRegister);
    1171          62 :     __ AssertUndefinedOrAllocationSite(rbx);
    1172             :   }
    1173             : 
    1174          93 :   if (mode == InterpreterPushArgsMode::kJSFunction) {
    1175             :     // Tail call to the function-specific construct stub (still in the caller
    1176             :     // context at this point).
    1177          31 :     __ AssertFunction(rdi);
    1178             : 
    1179          31 :     __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    1180             :     __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
    1181             :     __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
    1182             :     // Jump to the constructor function (rax, rbx, rdx passed on).
    1183          31 :     __ jmp(rcx);
    1184          62 :   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    1185             :     // Call the constructor (rax, rdx, rdi passed on).
    1186             :     __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
    1187          31 :             RelocInfo::CODE_TARGET);
    1188             :   } else {
    1189             :     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
    1190             :     // Call the constructor (rax, rdx, rdi passed on).
    1191          31 :     __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
    1192             :   }
    1193             : 
    1194             :   // Throw stack overflow exception.
    1195          93 :   __ bind(&stack_overflow);
    1196             :   {
    1197          93 :     __ TailCallRuntime(Runtime::kThrowStackOverflow);
    1198             :     // This should be unreachable.
    1199          93 :     __ int3();
    1200             :   }
    1201          93 : }
    1202             : 
    1203          62 : static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
    1204             :   // Set the return address to the correct point in the interpreter entry
    1205             :   // trampoline.
    1206             :   Smi* interpreter_entry_return_pc_offset(
    1207         124 :       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
    1208             :   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
    1209         124 :   __ Move(rbx, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
    1210             :   __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value() +
    1211         124 :                          Code::kHeaderSize - kHeapObjectTag));
    1212          62 :   __ Push(rbx);
    1213             : 
    1214             :   // Initialize dispatch table register.
    1215             :   __ Move(
    1216             :       kInterpreterDispatchTableRegister,
    1217          62 :       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
    1218             : 
    1219             :   // Get the bytecode array pointer from the frame.
    1220             :   __ movp(kInterpreterBytecodeArrayRegister,
    1221         124 :           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    1222             : 
    1223          62 :   if (FLAG_debug_code) {
    1224             :     // Check function data field is actually a BytecodeArray object.
    1225           0 :     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
    1226             :     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
    1227           0 :                      rbx);
    1228           0 :     __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
    1229             :   }
    1230             : 
    1231             :   // Get the target bytecode offset from the frame.
    1232             :   __ movp(kInterpreterBytecodeOffsetRegister,
    1233         124 :           Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
    1234             :   __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
    1235          62 :                     kInterpreterBytecodeOffsetRegister);
    1236             : 
    1237             :   // Dispatch to the target bytecode.
    1238             :   __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
    1239         124 :                           kInterpreterBytecodeOffsetRegister, times_1, 0));
    1240             :   __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
    1241         124 :                        times_pointer_size, 0));
    1242          62 :   __ jmp(rbx);
    1243          62 : }
    1244             : 
    1245          31 : void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
    1246             :   // Get bytecode array and bytecode offset from the stack frame.
    1247             :   __ movp(kInterpreterBytecodeArrayRegister,
    1248          62 :           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    1249             :   __ movp(kInterpreterBytecodeOffsetRegister,
    1250          62 :           Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
    1251             :   __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
    1252          31 :                     kInterpreterBytecodeOffsetRegister);
    1253             : 
    1254             :   // Load the current bytecode.
    1255             :   __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
    1256          62 :                           kInterpreterBytecodeOffsetRegister, times_1, 0));
    1257             : 
    1258             :   // Advance to the next bytecode.
    1259             :   AdvanceBytecodeOffset(masm, kInterpreterBytecodeArrayRegister,
    1260          31 :                         kInterpreterBytecodeOffsetRegister, rbx, rcx);
    1261             : 
    1262             :   // Convert new bytecode offset to a Smi and save in the stackframe.
    1263          31 :   __ Integer32ToSmi(rbx, kInterpreterBytecodeOffsetRegister);
    1264          62 :   __ movp(Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp), rbx);
    1265             : 
    1266          31 :   Generate_InterpreterEnterBytecode(masm);
    1267          31 : }
    1268             : 
    1269          31 : void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
    1270          31 :   Generate_InterpreterEnterBytecode(masm);
    1271          31 : }
    1272             : 
    1273          31 : void Builtins::Generate_CheckOptimizationMarker(MacroAssembler* masm) {
    1274             :   // ----------- S t a t e -------------
    1275             :   //  -- rax : argument count (preserved for callee)
    1276             :   //  -- rdx : new target (preserved for callee)
    1277             :   //  -- rdi : target function (preserved for callee)
    1278             :   // -----------------------------------
    1279             :   Register closure = rdi;
    1280             : 
    1281             :   // Get the feedback vector.
    1282             :   Register feedback_vector = rbx;
    1283             :   __ movp(feedback_vector,
    1284          31 :           FieldOperand(closure, JSFunction::kFeedbackVectorOffset));
    1285             :   __ movp(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
    1286             : 
    1287             :   // The feedback vector must be defined.
    1288          31 :   if (FLAG_debug_code) {
    1289           0 :     __ CompareRoot(feedback_vector, Heap::kUndefinedValueRootIndex);
    1290           0 :     __ Assert(not_equal, BailoutReason::kExpectedFeedbackVector);
    1291             :   }
    1292             : 
    1293             :   // Is there an optimization marker or optimized code in the feedback vector?
    1294          31 :   MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r14, r15);
    1295             : 
    1296             :   // Otherwise, tail call the SFI code.
    1297          31 :   GenerateTailCallToSharedCode(masm);
    1298          31 : }
    1299             : 
    1300             : // TODO(jupvfranco): investigate whether there is any case where the CompileLazy
    1301             : // builtin does not set the code field in the JS function. If there isn't then
    1302             : // we do not need this builtin and can jump directly to CompileLazy.
    1303          31 : void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
    1304             :   // Set the code slot inside the JSFunction to the trampoline to the
    1305             :   // interpreter entry.
    1306          31 :   __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    1307             :   __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
    1308             :   __ movq(FieldOperand(rdi, JSFunction::kCodeOffset), rcx);
    1309             :   __ RecordWriteField(rdi, JSFunction::kCodeOffset, rcx, r15, kDontSaveFPRegs,
    1310          31 :                       OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
    1311             :   // Jump to compile lazy.
    1312          31 :   Generate_CompileLazy(masm);
    1313          31 : }
    1314             : 
    1315          62 : void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
    1316             :   // ----------- S t a t e -------------
    1317             :   //  -- rax : argument count (preserved for callee)
    1318             :   //  -- rdx : new target (preserved for callee)
    1319             :   //  -- rdi : target function (preserved for callee)
    1320             :   // -----------------------------------
    1321             :   // First lookup code, maybe we don't need to compile!
    1322             :   Label gotta_call_runtime;
    1323             : 
    1324             :   Register closure = rdi;
    1325             :   Register feedback_vector = rbx;
    1326             : 
    1327             :   // Do we have a valid feedback vector?
    1328             :   __ movp(feedback_vector,
    1329          62 :           FieldOperand(closure, JSFunction::kFeedbackVectorOffset));
    1330             :   __ movp(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
    1331             :   __ JumpIfRoot(feedback_vector, Heap::kUndefinedValueRootIndex,
    1332             :                 &gotta_call_runtime);
    1333             : 
    1334             :   // Is there an optimization marker or optimized code in the feedback vector?
    1335          62 :   MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r14, r15);
    1336             : 
    1337             :   // We found no optimized code.
    1338             :   Register entry = rcx;
    1339             :   __ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
    1340             : 
    1341             :   // If SFI points to anything other than CompileLazy, install that.
    1342             :   __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
    1343          62 :   __ Move(rbx, masm->CodeObject());
    1344          62 :   __ cmpp(entry, rbx);
    1345          62 :   __ j(equal, &gotta_call_runtime);
    1346             : 
    1347             :   // Install the SFI's code entry.
    1348             :   __ movp(FieldOperand(closure, JSFunction::kCodeOffset), entry);
    1349             :   __ movp(r14, entry);  // Write barrier clobbers r14 below.
    1350             :   __ RecordWriteField(closure, JSFunction::kCodeOffset, r14, r15,
    1351          62 :                       kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
    1352             :   __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
    1353          62 :   __ jmp(entry);
    1354             : 
    1355          62 :   __ bind(&gotta_call_runtime);
    1356          62 :   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
    1357          62 : }
    1358             : 
    1359             : // Lazy deserialization design doc: http://goo.gl/dxkYDZ.
    1360          31 : void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
    1361             :   // ----------- S t a t e -------------
    1362             :   //  -- rax : argument count (preserved for callee)
    1363             :   //  -- rdx : new target (preserved for callee)
    1364             :   //  -- rdi : target function (preserved for callee)
    1365             :   // -----------------------------------
    1366             : 
    1367             :   Label deserialize_in_runtime;
    1368             : 
    1369             :   Register target = rdi;  // Must be preserved
    1370             :   Register scratch0 = rbx;
    1371             :   Register scratch1 = r12;
    1372             : 
    1373          31 :   CHECK(scratch0 != rax && scratch0 != rdx && scratch0 != rdi);
    1374          31 :   CHECK(scratch1 != rax && scratch1 != rdx && scratch1 != rdi);
    1375          31 :   CHECK(scratch0 != scratch1);
    1376             : 
    1377             :   // Load the builtin id for lazy deserialization from SharedFunctionInfo.
    1378             : 
    1379          31 :   __ AssertFunction(target);
    1380             :   __ movp(scratch0,
    1381          31 :           FieldOperand(target, JSFunction::kSharedFunctionInfoOffset));
    1382             : 
    1383             :   __ movp(scratch1,
    1384             :           FieldOperand(scratch0, SharedFunctionInfo::kFunctionDataOffset));
    1385          31 :   __ AssertSmi(scratch1);
    1386             : 
    1387             :   // The builtin may already have been deserialized. If that is the case, it is
    1388             :   // stored in the builtins table, and we can copy to correct code object to
    1389             :   // both the shared function info and function without calling into runtime.
    1390             :   //
    1391             :   // Otherwise, we need to call into runtime to deserialize.
    1392             : 
    1393             :   {
    1394             :     // Load the code object at builtins_table[builtin_id] into scratch1.
    1395             : 
    1396          31 :     __ SmiToInteger32(scratch1, scratch1);
    1397          31 :     __ Move(scratch0, ExternalReference::builtins_address(masm->isolate()));
    1398          62 :     __ movp(scratch1, Operand(scratch0, scratch1, times_pointer_size, 0));
    1399             : 
    1400             :     // Check if the loaded code object has already been deserialized. This is
    1401             :     // the case iff it does not equal DeserializeLazy.
    1402             : 
    1403          31 :     __ Move(scratch0, masm->CodeObject());
    1404          31 :     __ cmpp(scratch1, scratch0);
    1405          31 :     __ j(equal, &deserialize_in_runtime);
    1406             :   }
    1407             : 
    1408             :   {
    1409             :     // If we've reached this spot, the target builtin has been deserialized and
    1410             :     // we simply need to copy it over. First to the shared function info.
    1411             : 
    1412             :     Register target_builtin = scratch1;
    1413             :     Register shared = scratch0;
    1414             : 
    1415             :     __ movp(shared,
    1416             :             FieldOperand(target, JSFunction::kSharedFunctionInfoOffset));
    1417             : 
    1418          31 :     CHECK(r14 != target && r14 != scratch0 && r14 != scratch1);
    1419          31 :     CHECK(r15 != target && r15 != scratch0 && r15 != scratch1);
    1420             : 
    1421             :     __ movp(FieldOperand(shared, SharedFunctionInfo::kCodeOffset),
    1422             :             target_builtin);
    1423             :     __ movp(r14, target_builtin);  // Write barrier clobbers r14 below.
    1424             :     __ RecordWriteField(shared, SharedFunctionInfo::kCodeOffset, r14, r15,
    1425          31 :                         kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
    1426             : 
    1427             :     // And second to the target function.
    1428             : 
    1429             :     __ movp(FieldOperand(target, JSFunction::kCodeOffset), target_builtin);
    1430             :     __ movp(r14, target_builtin);  // Write barrier clobbers r14 below.
    1431             :     __ RecordWriteField(target, JSFunction::kCodeOffset, r14, r15,
    1432          31 :                         kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
    1433             : 
    1434             :     // All copying is done. Jump to the deserialized code object.
    1435             : 
    1436             :     __ leap(target_builtin, FieldOperand(target_builtin, Code::kHeaderSize));
    1437          31 :     __ jmp(target_builtin);
    1438             :   }
    1439             : 
    1440          31 :   __ bind(&deserialize_in_runtime);
    1441          31 :   GenerateTailCallToReturnedCode(masm, Runtime::kDeserializeLazy);
    1442          31 : }
    1443             : 
    1444          31 : void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
    1445             :   // ----------- S t a t e -------------
    1446             :   //  -- rax : argument count (preserved for callee)
    1447             :   //  -- rdx : new target (preserved for callee)
    1448             :   //  -- rdi : target function (preserved for callee)
    1449             :   // -----------------------------------
    1450             :   Label failed;
    1451             :   {
    1452          31 :     FrameScope scope(masm, StackFrame::INTERNAL);
    1453             :     // Preserve argument count for later compare.
    1454          31 :     __ movp(rcx, rax);
    1455             :     // Push the number of arguments to the callee.
    1456          31 :     __ Integer32ToSmi(rax, rax);
    1457          31 :     __ Push(rax);
    1458             :     // Push a copy of the target function and the new target.
    1459          31 :     __ Push(rdi);
    1460          31 :     __ Push(rdx);
    1461             : 
    1462             :     // The function.
    1463          31 :     __ Push(rdi);
    1464             :     // Copy arguments from caller (stdlib, foreign, heap).
    1465             :     Label args_done;
    1466         155 :     for (int j = 0; j < 4; ++j) {
    1467             :       Label over;
    1468         124 :       if (j < 3) {
    1469          93 :         __ cmpp(rcx, Immediate(j));
    1470          93 :         __ j(not_equal, &over, Label::kNear);
    1471             :       }
    1472         310 :       for (int i = j - 1; i >= 0; --i) {
    1473             :         __ Push(Operand(
    1474         186 :             rbp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
    1475             :       }
    1476         186 :       for (int i = 0; i < 3 - j; ++i) {
    1477         186 :         __ PushRoot(Heap::kUndefinedValueRootIndex);
    1478             :       }
    1479         124 :       if (j < 3) {
    1480          93 :         __ jmp(&args_done, Label::kNear);
    1481          93 :         __ bind(&over);
    1482             :       }
    1483             :     }
    1484          31 :     __ bind(&args_done);
    1485             : 
    1486             :     // Call runtime, on success unwind frame, and parent frame.
    1487             :     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
    1488             :     // A smi 0 is returned on failure, an object on success.
    1489          31 :     __ JumpIfSmi(rax, &failed, Label::kNear);
    1490             : 
    1491          31 :     __ Drop(2);
    1492          31 :     __ Pop(rcx);
    1493          31 :     __ SmiToInteger32(rcx, rcx);
    1494          31 :     scope.GenerateLeaveFrame();
    1495             : 
    1496             :     __ PopReturnAddressTo(rbx);
    1497             :     __ incp(rcx);
    1498          62 :     __ leap(rsp, Operand(rsp, rcx, times_pointer_size, 0));
    1499             :     __ PushReturnAddressFrom(rbx);
    1500          31 :     __ ret(0);
    1501             : 
    1502          31 :     __ bind(&failed);
    1503             :     // Restore target function and new target.
    1504          31 :     __ Pop(rdx);
    1505          31 :     __ Pop(rdi);
    1506          31 :     __ Pop(rax);
    1507          31 :     __ SmiToInteger32(rax, rax);
    1508             :   }
    1509             :   // On failure, tail call back to regular js by re-calling the function
    1510             :   // which has be reset to the compile lazy builtin.
    1511             :   __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
    1512          31 :   __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
    1513          31 :   __ jmp(rcx);
    1514          31 : }
    1515             : 
    1516             : namespace {
    1517         124 : void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
    1518             :                                       bool java_script_builtin,
    1519             :                                       bool with_result) {
    1520        1612 :   const RegisterConfiguration* config(RegisterConfiguration::Default());
    1521             :   int allocatable_register_count = config->num_allocatable_general_registers();
    1522         124 :   if (with_result) {
    1523             :     // Overwrite the hole inserted by the deoptimizer with the return value from
    1524             :     // the LAZY deopt point.
    1525             :     __ movq(Operand(rsp,
    1526             :                     config->num_allocatable_general_registers() * kPointerSize +
    1527             :                         BuiltinContinuationFrameConstants::kFixedFrameSize),
    1528         124 :             rax);
    1529             :   }
    1530        1612 :   for (int i = allocatable_register_count - 1; i >= 0; --i) {
    1531             :     int code = config->GetAllocatableGeneralCode(i);
    1532        1488 :     __ popq(Register::from_code(code));
    1533        1488 :     if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
    1534          62 :       __ SmiToInteger32(Register::from_code(code), Register::from_code(code));
    1535             :     }
    1536             :   }
    1537             :   __ movq(
    1538             :       rbp,
    1539         248 :       Operand(rsp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
    1540             :   const int offsetToPC =
    1541             :       BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp - kPointerSize;
    1542         124 :   __ popq(Operand(rsp, offsetToPC));
    1543         124 :   __ Drop(offsetToPC / kPointerSize);
    1544         248 :   __ addq(Operand(rsp, 0), Immediate(Code::kHeaderSize - kHeapObjectTag));
    1545         124 :   __ Ret();
    1546         124 : }
    1547             : }  // namespace
    1548             : 
    1549          31 : void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
    1550          31 :   Generate_ContinueToBuiltinHelper(masm, false, false);
    1551          31 : }
    1552             : 
    1553          31 : void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
    1554             :     MacroAssembler* masm) {
    1555          31 :   Generate_ContinueToBuiltinHelper(masm, false, true);
    1556          31 : }
    1557             : 
    1558          31 : void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
    1559          31 :   Generate_ContinueToBuiltinHelper(masm, true, false);
    1560          31 : }
    1561             : 
    1562          31 : void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
    1563             :     MacroAssembler* masm) {
    1564          31 :   Generate_ContinueToBuiltinHelper(masm, true, true);
    1565          31 : }
    1566             : 
    1567          31 : void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
    1568             :   // Enter an internal frame.
    1569             :   {
    1570          31 :     FrameScope scope(masm, StackFrame::INTERNAL);
    1571          31 :     __ CallRuntime(Runtime::kNotifyDeoptimized);
    1572             :     // Tear down internal frame.
    1573             :   }
    1574             : 
    1575             :   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
    1576          62 :   __ movp(rax, Operand(rsp, kPCOnStackSize));
    1577          31 :   __ ret(1 * kPointerSize);  // Remove rax.
    1578          31 : }
    1579             : 
    1580             : // static
    1581          31 : void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
    1582             :   // ----------- S t a t e -------------
    1583             :   //  -- rax     : argc
    1584             :   //  -- rsp[0]  : return address
    1585             :   //  -- rsp[8]  : argArray
    1586             :   //  -- rsp[16] : thisArg
    1587             :   //  -- rsp[24] : receiver
    1588             :   // -----------------------------------
    1589             : 
    1590             :   // 1. Load receiver into rdi, argArray into rbx (if present), remove all
    1591             :   // arguments from the stack (including the receiver), and push thisArg (if
    1592             :   // present) instead.
    1593             :   {
    1594             :     Label no_arg_array, no_this_arg;
    1595             :     StackArgumentsAccessor args(rsp, rax);
    1596          93 :     __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
    1597          31 :     __ movp(rbx, rdx);
    1598             :     __ movp(rdi, args.GetReceiverOperand());
    1599             :     __ testp(rax, rax);
    1600          31 :     __ j(zero, &no_this_arg, Label::kNear);
    1601             :     {
    1602          31 :       __ movp(rdx, args.GetArgumentOperand(1));
    1603          31 :       __ cmpp(rax, Immediate(1));
    1604          31 :       __ j(equal, &no_arg_array, Label::kNear);
    1605          31 :       __ movp(rbx, args.GetArgumentOperand(2));
    1606          31 :       __ bind(&no_arg_array);
    1607             :     }
    1608          31 :     __ bind(&no_this_arg);
    1609             :     __ PopReturnAddressTo(rcx);
    1610          62 :     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
    1611          31 :     __ Push(rdx);
    1612             :     __ PushReturnAddressFrom(rcx);
    1613             :   }
    1614             : 
    1615             :   // ----------- S t a t e -------------
    1616             :   //  -- rbx     : argArray
    1617             :   //  -- rdi     : receiver
    1618             :   //  -- rsp[0]  : return address
    1619             :   //  -- rsp[8]  : thisArg
    1620             :   // -----------------------------------
    1621             : 
    1622             :   // 2. We don't need to check explicitly for callable receiver here,
    1623             :   // since that's the first thing the Call/CallWithArrayLike builtins
    1624             :   // will do.
    1625             : 
    1626             :   // 3. Tail call with no arguments if argArray is null or undefined.
    1627             :   Label no_arguments;
    1628             :   __ JumpIfRoot(rbx, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
    1629             :   __ JumpIfRoot(rbx, Heap::kUndefinedValueRootIndex, &no_arguments,
    1630             :                 Label::kNear);
    1631             : 
    1632             :   // 4a. Apply the receiver to the given argArray.
    1633             :   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
    1634          31 :           RelocInfo::CODE_TARGET);
    1635             : 
    1636             :   // 4b. The argArray is either null or undefined, so we tail call without any
    1637             :   // arguments to the receiver. Since we did not create a frame for
    1638             :   // Function.prototype.apply() yet, we use a normal Call builtin here.
    1639          31 :   __ bind(&no_arguments);
    1640             :   {
    1641          31 :     __ Set(rax, 0);
    1642          31 :     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
    1643             :   }
    1644          31 : }
    1645             : 
    1646             : // static
    1647          31 : void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
    1648             :   // Stack Layout:
    1649             :   // rsp[0]           : Return address
    1650             :   // rsp[8]           : Argument n
    1651             :   // rsp[16]          : Argument n-1
    1652             :   //  ...
    1653             :   // rsp[8 * n]       : Argument 1
    1654             :   // rsp[8 * (n + 1)] : Receiver (callable to call)
    1655             :   //
    1656             :   // rax contains the number of arguments, n, not counting the receiver.
    1657             :   //
    1658             :   // 1. Make sure we have at least one argument.
    1659             :   {
    1660             :     Label done;
    1661          31 :     __ testp(rax, rax);
    1662          31 :     __ j(not_zero, &done, Label::kNear);
    1663          31 :     __ PopReturnAddressTo(rbx);
    1664          31 :     __ PushRoot(Heap::kUndefinedValueRootIndex);
    1665             :     __ PushReturnAddressFrom(rbx);
    1666             :     __ incp(rax);
    1667          31 :     __ bind(&done);
    1668             :   }
    1669             : 
    1670             :   // 2. Get the callable to call (passed as receiver) from the stack.
    1671             :   {
    1672             :     StackArgumentsAccessor args(rsp, rax);
    1673             :     __ movp(rdi, args.GetReceiverOperand());
    1674             :   }
    1675             : 
    1676             :   // 3. Shift arguments and return address one slot down on the stack
    1677             :   //    (overwriting the original receiver).  Adjust argument count to make
    1678             :   //    the original first argument the new receiver.
    1679             :   {
    1680             :     Label loop;
    1681             :     __ movp(rcx, rax);
    1682             :     StackArgumentsAccessor args(rsp, rcx);
    1683          31 :     __ bind(&loop);
    1684          31 :     __ movp(rbx, args.GetArgumentOperand(1));
    1685          31 :     __ movp(args.GetArgumentOperand(0), rbx);
    1686             :     __ decp(rcx);
    1687          31 :     __ j(not_zero, &loop);              // While non-zero.
    1688          31 :     __ DropUnderReturnAddress(1, rbx);  // Drop one slot under return address.
    1689             :     __ decp(rax);  // One fewer argument (first argument is new receiver).
    1690             :   }
    1691             : 
    1692             :   // 4. Call the callable.
    1693             :   // Since we did not create a frame for Function.prototype.call() yet,
    1694             :   // we use a normal Call builtin here.
    1695          31 :   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
    1696          31 : }
    1697             : 
    1698          31 : void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
    1699             :   // ----------- S t a t e -------------
    1700             :   //  -- rax     : argc
    1701             :   //  -- rsp[0]  : return address
    1702             :   //  -- rsp[8]  : argumentsList
    1703             :   //  -- rsp[16] : thisArgument
    1704             :   //  -- rsp[24] : target
    1705             :   //  -- rsp[32] : receiver
    1706             :   // -----------------------------------
    1707             : 
    1708             :   // 1. Load target into rdi (if present), argumentsList into rbx (if present),
    1709             :   // remove all arguments from the stack (including the receiver), and push
    1710             :   // thisArgument (if present) instead.
    1711             :   {
    1712             :     Label done;
    1713             :     StackArgumentsAccessor args(rsp, rax);
    1714          62 :     __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
    1715          31 :     __ movp(rdx, rdi);
    1716             :     __ movp(rbx, rdi);
    1717          31 :     __ cmpp(rax, Immediate(1));
    1718          31 :     __ j(below, &done, Label::kNear);
    1719          31 :     __ movp(rdi, args.GetArgumentOperand(1));  // target
    1720          31 :     __ j(equal, &done, Label::kNear);
    1721          31 :     __ movp(rdx, args.GetArgumentOperand(2));  // thisArgument
    1722          31 :     __ cmpp(rax, Immediate(3));
    1723          31 :     __ j(below, &done, Label::kNear);
    1724          31 :     __ movp(rbx, args.GetArgumentOperand(3));  // argumentsList
    1725          31 :     __ bind(&done);
    1726             :     __ PopReturnAddressTo(rcx);
    1727          62 :     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
    1728          31 :     __ Push(rdx);
    1729             :     __ PushReturnAddressFrom(rcx);
    1730             :   }
    1731             : 
    1732             :   // ----------- S t a t e -------------
    1733             :   //  -- rbx     : argumentsList
    1734             :   //  -- rdi     : target
    1735             :   //  -- rsp[0]  : return address
    1736             :   //  -- rsp[8]  : thisArgument
    1737             :   // -----------------------------------
    1738             : 
    1739             :   // 2. We don't need to check explicitly for callable target here,
    1740             :   // since that's the first thing the Call/CallWithArrayLike builtins
    1741             :   // will do.
    1742             : 
    1743             :   // 3. Apply the target to the given argumentsList.
    1744             :   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
    1745          31 :           RelocInfo::CODE_TARGET);
    1746          31 : }
    1747             : 
    1748          31 : void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
    1749             :   // ----------- S t a t e -------------
    1750             :   //  -- rax     : argc
    1751             :   //  -- rsp[0]  : return address
    1752             :   //  -- rsp[8]  : new.target (optional)
    1753             :   //  -- rsp[16] : argumentsList
    1754             :   //  -- rsp[24] : target
    1755             :   //  -- rsp[32] : receiver
    1756             :   // -----------------------------------
    1757             : 
    1758             :   // 1. Load target into rdi (if present), argumentsList into rbx (if present),
    1759             :   // new.target into rdx (if present, otherwise use target), remove all
    1760             :   // arguments from the stack (including the receiver), and push thisArgument
    1761             :   // (if present) instead.
    1762             :   {
    1763             :     Label done;
    1764             :     StackArgumentsAccessor args(rsp, rax);
    1765          62 :     __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
    1766          31 :     __ movp(rdx, rdi);
    1767             :     __ movp(rbx, rdi);
    1768          31 :     __ cmpp(rax, Immediate(1));
    1769          31 :     __ j(below, &done, Label::kNear);
    1770          31 :     __ movp(rdi, args.GetArgumentOperand(1));  // target
    1771             :     __ movp(rdx, rdi);                         // new.target defaults to target
    1772          31 :     __ j(equal, &done, Label::kNear);
    1773          31 :     __ movp(rbx, args.GetArgumentOperand(2));  // argumentsList
    1774          31 :     __ cmpp(rax, Immediate(3));
    1775          31 :     __ j(below, &done, Label::kNear);
    1776          31 :     __ movp(rdx, args.GetArgumentOperand(3));  // new.target
    1777          31 :     __ bind(&done);
    1778             :     __ PopReturnAddressTo(rcx);
    1779          62 :     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
    1780          31 :     __ PushRoot(Heap::kUndefinedValueRootIndex);
    1781             :     __ PushReturnAddressFrom(rcx);
    1782             :   }
    1783             : 
    1784             :   // ----------- S t a t e -------------
    1785             :   //  -- rbx     : argumentsList
    1786             :   //  -- rdx     : new.target
    1787             :   //  -- rdi     : target
    1788             :   //  -- rsp[0]  : return address
    1789             :   //  -- rsp[8]  : receiver (undefined)
    1790             :   // -----------------------------------
    1791             : 
    1792             :   // 2. We don't need to check explicitly for constructor target here,
    1793             :   // since that's the first thing the Construct/ConstructWithArrayLike
    1794             :   // builtins will do.
    1795             : 
    1796             :   // 3. We don't need to check explicitly for constructor new.target here,
    1797             :   // since that's the second thing the Construct/ConstructWithArrayLike
    1798             :   // builtins will do.
    1799             : 
    1800             :   // 4. Construct the target with the given new.target and argumentsList.
    1801             :   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
    1802          31 :           RelocInfo::CODE_TARGET);
    1803          31 : }
    1804             : 
    1805          31 : void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
    1806             :   // ----------- S t a t e -------------
    1807             :   //  -- rax    : argc
    1808             :   //  -- rsp[0] : return address
    1809             :   //  -- rsp[8] : last argument
    1810             :   // -----------------------------------
    1811             :   Label generic_array_code;
    1812             : 
    1813             :   // Get the InternalArray function.
    1814          31 :   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
    1815             : 
    1816          31 :   if (FLAG_debug_code) {
    1817             :     // Initial map for the builtin InternalArray functions should be maps.
    1818           0 :     __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
    1819             :     // Will both indicate a nullptr and a Smi.
    1820             :     STATIC_ASSERT(kSmiTag == 0);
    1821          31 :     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
    1822           0 :     __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
    1823           0 :     __ CmpObjectType(rbx, MAP_TYPE, rcx);
    1824           0 :     __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
    1825             :   }
    1826             : 
    1827             :   // Run the native code for the InternalArray function called as a normal
    1828             :   // function.
    1829             :   // tail call a stub
    1830          31 :   InternalArrayConstructorStub stub(masm->isolate());
    1831          31 :   __ TailCallStub(&stub);
    1832          31 : }
    1833             : 
    1834          31 : void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
    1835             :   // ----------- S t a t e -------------
    1836             :   //  -- rax    : argc
    1837             :   //  -- rsp[0] : return address
    1838             :   //  -- rsp[8] : last argument
    1839             :   // -----------------------------------
    1840             :   Label generic_array_code;
    1841             : 
    1842             :   // Get the Array function.
    1843          31 :   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rdi);
    1844             : 
    1845          31 :   if (FLAG_debug_code) {
    1846             :     // Initial map for the builtin Array functions should be maps.
    1847           0 :     __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
    1848             :     // Will both indicate a nullptr and a Smi.
    1849             :     STATIC_ASSERT(kSmiTag == 0);
    1850          31 :     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
    1851           0 :     __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
    1852           0 :     __ CmpObjectType(rbx, MAP_TYPE, rcx);
    1853           0 :     __ Check(equal, kUnexpectedInitialMapForArrayFunction);
    1854             :   }
    1855             : 
    1856          31 :   __ movp(rdx, rdi);
    1857             :   // Run the native code for the Array function called as a normal function.
    1858             :   // tail call a stub
    1859          31 :   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
    1860          31 :   ArrayConstructorStub stub(masm->isolate());
    1861          31 :   __ TailCallStub(&stub);
    1862          31 : }
    1863             : 
    1864          62 : static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
    1865          62 :   __ pushq(rbp);
    1866             :   __ movp(rbp, rsp);
    1867             : 
    1868             :   // Store the arguments adaptor context sentinel.
    1869          62 :   __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
    1870             : 
    1871             :   // Push the function on the stack.
    1872          62 :   __ Push(rdi);
    1873             : 
    1874             :   // Preserve the number of arguments on the stack. Must preserve rax,
    1875             :   // rbx and rcx because these registers are used when copying the
    1876             :   // arguments and the receiver.
    1877          62 :   __ Integer32ToSmi(r8, rax);
    1878          62 :   __ Push(r8);
    1879          62 : }
    1880             : 
    1881          31 : static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
    1882             :   // Retrieve the number of arguments from the stack. Number is a Smi.
    1883          62 :   __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
    1884             : 
    1885             :   // Leave the frame.
    1886             :   __ movp(rsp, rbp);
    1887          31 :   __ popq(rbp);
    1888             : 
    1889             :   // Remove caller arguments from the stack.
    1890             :   __ PopReturnAddressTo(rcx);
    1891          31 :   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
    1892          62 :   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
    1893             :   __ PushReturnAddressFrom(rcx);
    1894          31 : }
    1895             : 
    1896             : // static
    1897          31 : void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
    1898             :   // ----------- S t a t e -------------
    1899             :   //  -- rdx    : requested object size (untagged)
    1900             :   //  -- rsp[0] : return address
    1901             :   // -----------------------------------
    1902          31 :   __ Integer32ToSmi(rdx, rdx);
    1903             :   __ PopReturnAddressTo(rcx);
    1904          31 :   __ Push(rdx);
    1905             :   __ PushReturnAddressFrom(rcx);
    1906          31 :   __ Move(rsi, Smi::kZero);
    1907          31 :   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
    1908          31 : }
    1909             : 
    1910             : // static
    1911          31 : void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
    1912             :   // ----------- S t a t e -------------
    1913             :   //  -- rdx    : requested object size (untagged)
    1914             :   //  -- rsp[0] : return address
    1915             :   // -----------------------------------
    1916          31 :   __ Integer32ToSmi(rdx, rdx);
    1917             :   __ PopReturnAddressTo(rcx);
    1918          31 :   __ Push(rdx);
    1919          31 :   __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
    1920             :   __ PushReturnAddressFrom(rcx);
    1921          31 :   __ Move(rsi, Smi::kZero);
    1922          31 :   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
    1923          31 : }
    1924             : 
    1925             : // static
    1926          31 : void Builtins::Generate_Abort(MacroAssembler* masm) {
    1927             :   // ----------- S t a t e -------------
    1928             :   //  -- rdx    : message_id as Smi
    1929             :   //  -- rsp[0] : return address
    1930             :   // -----------------------------------
    1931             :   __ PopReturnAddressTo(rcx);
    1932          31 :   __ Push(rdx);
    1933             :   __ PushReturnAddressFrom(rcx);
    1934          31 :   __ Move(rsi, Smi::kZero);
    1935          31 :   __ TailCallRuntime(Runtime::kAbort);
    1936          31 : }
    1937             : 
    1938             : // static
    1939          31 : void Builtins::Generate_AbortJS(MacroAssembler* masm) {
    1940             :   // ----------- S t a t e -------------
    1941             :   //  -- rdx    : message as String object
    1942             :   //  -- rsp[0] : return address
    1943             :   // -----------------------------------
    1944             :   __ PopReturnAddressTo(rcx);
    1945          31 :   __ Push(rdx);
    1946             :   __ PushReturnAddressFrom(rcx);
    1947          31 :   __ Move(rsi, Smi::kZero);
    1948          31 :   __ TailCallRuntime(Runtime::kAbortJS);
    1949          31 : }
    1950             : 
    1951          31 : void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
    1952             :   // ----------- S t a t e -------------
    1953             :   //  -- rax : actual number of arguments
    1954             :   //  -- rbx : expected number of arguments
    1955             :   //  -- rdx : new target (passed through to callee)
    1956             :   //  -- rdi : function (passed through to callee)
    1957             :   // -----------------------------------
    1958             : 
    1959             :   Label invoke, dont_adapt_arguments, stack_overflow;
    1960          62 :   Counters* counters = masm->isolate()->counters();
    1961          31 :   __ IncrementCounter(counters->arguments_adaptors(), 1);
    1962             : 
    1963             :   Label enough, too_few;
    1964          31 :   __ cmpp(rax, rbx);
    1965          31 :   __ j(less, &too_few);
    1966          31 :   __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
    1967          31 :   __ j(equal, &dont_adapt_arguments);
    1968             : 
    1969             :   {  // Enough parameters: Actual >= expected.
    1970          31 :     __ bind(&enough);
    1971          31 :     EnterArgumentsAdaptorFrame(masm);
    1972             :     // The registers rcx and r8 will be modified. The register rbx is only read.
    1973          31 :     Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
    1974             : 
    1975             :     // Copy receiver and all expected arguments.
    1976             :     const int offset = StandardFrameConstants::kCallerSPOffset;
    1977          62 :     __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
    1978          31 :     __ Set(r8, -1);  // account for receiver
    1979             : 
    1980             :     Label copy;
    1981          31 :     __ bind(&copy);
    1982             :     __ incp(r8);
    1983          31 :     __ Push(Operand(rax, 0));
    1984          31 :     __ subp(rax, Immediate(kPointerSize));
    1985          31 :     __ cmpp(r8, rbx);
    1986          31 :     __ j(less, &copy);
    1987          31 :     __ jmp(&invoke);
    1988             :   }
    1989             : 
    1990             :   {  // Too few parameters: Actual < expected.
    1991          31 :     __ bind(&too_few);
    1992             : 
    1993          31 :     EnterArgumentsAdaptorFrame(masm);
    1994             :     // The registers rcx and r8 will be modified. The register rbx is only read.
    1995          31 :     Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
    1996             : 
    1997             :     // Copy receiver and all actual arguments.
    1998             :     const int offset = StandardFrameConstants::kCallerSPOffset;
    1999          62 :     __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
    2000          31 :     __ Set(r8, -1);  // account for receiver
    2001             : 
    2002             :     Label copy;
    2003          31 :     __ bind(&copy);
    2004             :     __ incp(r8);
    2005          31 :     __ Push(Operand(rdi, 0));
    2006          31 :     __ subp(rdi, Immediate(kPointerSize));
    2007          31 :     __ cmpp(r8, rax);
    2008          31 :     __ j(less, &copy);
    2009             : 
    2010             :     // Fill remaining expected arguments with undefined values.
    2011             :     Label fill;
    2012          31 :     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
    2013          31 :     __ bind(&fill);
    2014             :     __ incp(r8);
    2015          31 :     __ Push(kScratchRegister);
    2016          31 :     __ cmpp(r8, rbx);
    2017          31 :     __ j(less, &fill);
    2018             : 
    2019             :     // Restore function pointer.
    2020          62 :     __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
    2021             :   }
    2022             : 
    2023             :   // Call the entry point.
    2024          31 :   __ bind(&invoke);
    2025             :   __ movp(rax, rbx);
    2026             :   // rax : expected number of arguments
    2027             :   // rdx : new target (passed through to callee)
    2028             :   // rdi : function (passed through to callee)
    2029             :   __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
    2030          31 :   __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
    2031          31 :   __ call(rcx);
    2032             : 
    2033             :   // Store offset of return address for deoptimizer.
    2034          62 :   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
    2035             : 
    2036             :   // Leave frame and return.
    2037          31 :   LeaveArgumentsAdaptorFrame(masm);
    2038          31 :   __ ret(0);
    2039             : 
    2040             :   // -------------------------------------------
    2041             :   // Dont adapt arguments.
    2042             :   // -------------------------------------------
    2043          31 :   __ bind(&dont_adapt_arguments);
    2044             :   __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
    2045          31 :   __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
    2046          31 :   __ jmp(rcx);
    2047             : 
    2048          31 :   __ bind(&stack_overflow);
    2049             :   {
    2050          31 :     FrameScope frame(masm, StackFrame::MANUAL);
    2051          31 :     __ CallRuntime(Runtime::kThrowStackOverflow);
    2052          31 :     __ int3();
    2053             :   }
    2054          31 : }
    2055             : 
    2056             : // static
    2057          62 : void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
    2058             :                                                Handle<Code> code) {
    2059             :   // ----------- S t a t e -------------
    2060             :   //  -- rdi    : target
    2061             :   //  -- rax    : number of parameters on the stack (not including the receiver)
    2062             :   //  -- rbx    : arguments list (a FixedArray)
    2063             :   //  -- rcx    : len (number of elements to push from args)
    2064             :   //  -- rdx    : new.target (for [[Construct]])
    2065             :   //  -- rsp[0] : return address
    2066             :   // -----------------------------------
    2067          62 :   __ AssertFixedArray(rbx);
    2068             : 
    2069             :   // Check for stack overflow.
    2070             :   {
    2071             :     // Check the stack for overflow. We are not trying to catch interruptions
    2072             :     // (i.e. debug break and preemption) here, so check the "real stack limit".
    2073             :     Label done;
    2074          62 :     __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
    2075          62 :     __ movp(r8, rsp);
    2076             :     // Make r8 the space we have left. The stack might already be overflowed
    2077             :     // here which will cause r8 to become negative.
    2078          62 :     __ subp(r8, kScratchRegister);
    2079             :     __ sarp(r8, Immediate(kPointerSizeLog2));
    2080             :     // Check if the arguments will overflow the stack.
    2081          62 :     __ cmpp(r8, rcx);
    2082          62 :     __ j(greater, &done, Label::kNear);  // Signed comparison.
    2083          62 :     __ TailCallRuntime(Runtime::kThrowStackOverflow);
    2084          62 :     __ bind(&done);
    2085             :   }
    2086             : 
    2087             :   // Push additional arguments onto the stack.
    2088             :   {
    2089             :     __ PopReturnAddressTo(r8);
    2090          62 :     __ Set(r9, 0);
    2091             :     Label done, push, loop;
    2092          62 :     __ bind(&loop);
    2093          62 :     __ cmpl(r9, rcx);
    2094          62 :     __ j(equal, &done, Label::kNear);
    2095             :     // Turn the hole into undefined as we go.
    2096             :     __ movp(r11,
    2097             :             FieldOperand(rbx, r9, times_pointer_size, FixedArray::kHeaderSize));
    2098          62 :     __ CompareRoot(r11, Heap::kTheHoleValueRootIndex);
    2099          62 :     __ j(not_equal, &push, Label::kNear);
    2100          62 :     __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
    2101          62 :     __ bind(&push);
    2102          62 :     __ Push(r11);
    2103             :     __ incl(r9);
    2104          62 :     __ jmp(&loop);
    2105          62 :     __ bind(&done);
    2106             :     __ PushReturnAddressFrom(r8);
    2107          62 :     __ addq(rax, r9);
    2108             :   }
    2109             : 
    2110             :   // Tail-call to the actual Call or Construct builtin.
    2111          62 :   __ Jump(code, RelocInfo::CODE_TARGET);
    2112          62 : }
    2113             : 
    2114             : // static
    2115         124 : void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
    2116             :                                                       CallOrConstructMode mode,
    2117             :                                                       Handle<Code> code) {
    2118             :   // ----------- S t a t e -------------
    2119             :   //  -- rax : the number of arguments (not including the receiver)
    2120             :   //  -- rdx : the new target (for [[Construct]] calls)
    2121             :   //  -- rdi : the target to call (can be any Object)
    2122             :   //  -- rcx : start index (to support rest parameters)
    2123             :   // -----------------------------------
    2124             : 
    2125             :   // Check if new.target has a [[Construct]] internal method.
    2126         124 :   if (mode == CallOrConstructMode::kConstruct) {
    2127             :     Label new_target_constructor, new_target_not_constructor;
    2128          62 :     __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
    2129          62 :     __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
    2130             :     __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
    2131          62 :              Immediate(1 << Map::kIsConstructor));
    2132          62 :     __ j(not_zero, &new_target_constructor, Label::kNear);
    2133          62 :     __ bind(&new_target_not_constructor);
    2134             :     {
    2135          62 :       FrameScope scope(masm, StackFrame::MANUAL);
    2136          62 :       __ EnterFrame(StackFrame::INTERNAL);
    2137          62 :       __ Push(rdx);
    2138          62 :       __ CallRuntime(Runtime::kThrowNotConstructor);
    2139             :     }
    2140          62 :     __ bind(&new_target_constructor);
    2141             :   }
    2142             : 
    2143             :   // Check if we have an arguments adaptor frame below the function frame.
    2144             :   Label arguments_adaptor, arguments_done;
    2145         248 :   __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
    2146             :   __ cmpp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
    2147         248 :           Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
    2148         124 :   __ j(equal, &arguments_adaptor, Label::kNear);
    2149             :   {
    2150         248 :     __ movp(r8, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
    2151             :     __ movp(r8, FieldOperand(r8, JSFunction::kSharedFunctionInfoOffset));
    2152             :     __ movl(r8,
    2153             :             FieldOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
    2154             :     __ movp(rbx, rbp);
    2155             :   }
    2156         124 :   __ jmp(&arguments_done, Label::kNear);
    2157         124 :   __ bind(&arguments_adaptor);
    2158             :   {
    2159             :     __ SmiToInteger32(
    2160         124 :         r8, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
    2161             :   }
    2162         124 :   __ bind(&arguments_done);
    2163             : 
    2164             :   Label stack_done, stack_overflow;
    2165         124 :   __ subl(r8, rcx);
    2166         124 :   __ j(less_equal, &stack_done);
    2167             :   {
    2168             :     // Check for stack overflow.
    2169         124 :     Generate_StackOverflowCheck(masm, r8, rcx, &stack_overflow, Label::kNear);
    2170             : 
    2171             :     // Forward the arguments from the caller frame.
    2172             :     {
    2173             :       Label loop;
    2174         124 :       __ addl(rax, r8);
    2175             :       __ PopReturnAddressTo(rcx);
    2176         124 :       __ bind(&loop);
    2177             :       {
    2178             :         StackArgumentsAccessor args(rbx, r8, ARGUMENTS_DONT_CONTAIN_RECEIVER);
    2179         124 :         __ Push(args.GetArgumentOperand(0));
    2180             :         __ decl(r8);
    2181         124 :         __ j(not_zero, &loop);
    2182             :       }
    2183             :       __ PushReturnAddressFrom(rcx);
    2184             :     }
    2185             :   }
    2186         124 :   __ jmp(&stack_done, Label::kNear);
    2187         124 :   __ bind(&stack_overflow);
    2188         124 :   __ TailCallRuntime(Runtime::kThrowStackOverflow);
    2189         124 :   __ bind(&stack_done);
    2190             : 
    2191             :   // Tail-call to the {code} handler.
    2192         124 :   __ Jump(code, RelocInfo::CODE_TARGET);
    2193         124 : }
    2194             : 
    2195             : // static
    2196          93 : void Builtins::Generate_CallFunction(MacroAssembler* masm,
    2197             :                                      ConvertReceiverMode mode) {
    2198             :   // ----------- S t a t e -------------
    2199             :   //  -- rax : the number of arguments (not including the receiver)
    2200             :   //  -- rdi : the function to call (checked to be a JSFunction)
    2201             :   // -----------------------------------
    2202             :   StackArgumentsAccessor args(rsp, rax);
    2203          93 :   __ AssertFunction(rdi);
    2204             : 
    2205             :   // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
    2206             :   // Check that the function is not a "classConstructor".
    2207             :   Label class_constructor;
    2208          93 :   __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    2209             :   __ testl(FieldOperand(rdx, SharedFunctionInfo::kCompilerHintsOffset),
    2210             :            Immediate(SharedFunctionInfo::kClassConstructorMask));
    2211          93 :   __ j(not_zero, &class_constructor);
    2212             : 
    2213             :   // ----------- S t a t e -------------
    2214             :   //  -- rax : the number of arguments (not including the receiver)
    2215             :   //  -- rdx : the shared function info.
    2216             :   //  -- rdi : the function to call (checked to be a JSFunction)
    2217             :   // -----------------------------------
    2218             : 
    2219             :   // Enter the context of the function; ToObject has to run in the function
    2220             :   // context, and we also need to take the global proxy from the function
    2221             :   // context in case of conversion.
    2222             :   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
    2223             :   // We need to convert the receiver for non-native sloppy mode functions.
    2224             :   Label done_convert;
    2225             :   __ testl(FieldOperand(rdx, SharedFunctionInfo::kCompilerHintsOffset),
    2226             :            Immediate(SharedFunctionInfo::IsNativeBit::kMask |
    2227             :                      SharedFunctionInfo::IsStrictBit::kMask));
    2228          93 :   __ j(not_zero, &done_convert);
    2229             :   {
    2230             :     // ----------- S t a t e -------------
    2231             :     //  -- rax : the number of arguments (not including the receiver)
    2232             :     //  -- rdx : the shared function info.
    2233             :     //  -- rdi : the function to call (checked to be a JSFunction)
    2234             :     //  -- rsi : the function context.
    2235             :     // -----------------------------------
    2236             : 
    2237          93 :     if (mode == ConvertReceiverMode::kNullOrUndefined) {
    2238             :       // Patch receiver to global proxy.
    2239             :       __ LoadGlobalProxy(rcx);
    2240             :     } else {
    2241             :       Label convert_to_object, convert_receiver;
    2242             :       __ movp(rcx, args.GetReceiverOperand());
    2243         124 :       __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
    2244             :       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
    2245          62 :       __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
    2246          62 :       __ j(above_equal, &done_convert);
    2247          62 :       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
    2248             :         Label convert_global_proxy;
    2249             :         __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex,
    2250             :                       &convert_global_proxy, Label::kNear);
    2251             :         __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
    2252             :                          Label::kNear);
    2253          31 :         __ bind(&convert_global_proxy);
    2254             :         {
    2255             :           // Patch receiver to global proxy.
    2256             :           __ LoadGlobalProxy(rcx);
    2257             :         }
    2258          31 :         __ jmp(&convert_receiver);
    2259             :       }
    2260          62 :       __ bind(&convert_to_object);
    2261             :       {
    2262             :         // Convert receiver using ToObject.
    2263             :         // TODO(bmeurer): Inline the allocation here to avoid building the frame
    2264             :         // in the fast case? (fall back to AllocateInNewSpace?)
    2265          62 :         FrameScope scope(masm, StackFrame::INTERNAL);
    2266          62 :         __ Integer32ToSmi(rax, rax);
    2267          62 :         __ Push(rax);
    2268          62 :         __ Push(rdi);
    2269             :         __ movp(rax, rcx);
    2270          62 :         __ Push(rsi);
    2271             :         __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
    2272          62 :                 RelocInfo::CODE_TARGET);
    2273          62 :         __ Pop(rsi);
    2274             :         __ movp(rcx, rax);
    2275          62 :         __ Pop(rdi);
    2276          62 :         __ Pop(rax);
    2277          62 :         __ SmiToInteger32(rax, rax);
    2278             :       }
    2279             :       __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    2280          62 :       __ bind(&convert_receiver);
    2281             :     }
    2282             :     __ movp(args.GetReceiverOperand(), rcx);
    2283             :   }
    2284          93 :   __ bind(&done_convert);
    2285             : 
    2286             :   // ----------- S t a t e -------------
    2287             :   //  -- rax : the number of arguments (not including the receiver)
    2288             :   //  -- rdx : the shared function info.
    2289             :   //  -- rdi : the function to call (checked to be a JSFunction)
    2290             :   //  -- rsi : the function context.
    2291             :   // -----------------------------------
    2292             : 
    2293             :   __ movsxlq(
    2294          93 :       rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
    2295             :   ParameterCount actual(rax);
    2296             :   ParameterCount expected(rbx);
    2297             : 
    2298          93 :   __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION);
    2299             : 
    2300             :   // The function is a "classConstructor", need to raise an exception.
    2301          93 :   __ bind(&class_constructor);
    2302             :   {
    2303          93 :     FrameScope frame(masm, StackFrame::INTERNAL);
    2304          93 :     __ Push(rdi);
    2305          93 :     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
    2306             :   }
    2307          93 : }
    2308             : 
    2309             : namespace {
    2310             : 
    2311          62 : void Generate_PushBoundArguments(MacroAssembler* masm) {
    2312             :   // ----------- S t a t e -------------
    2313             :   //  -- rax : the number of arguments (not including the receiver)
    2314             :   //  -- rdx : new.target (only in case of [[Construct]])
    2315             :   //  -- rdi : target (checked to be a JSBoundFunction)
    2316             :   // -----------------------------------
    2317             : 
    2318             :   // Load [[BoundArguments]] into rcx and length of that into rbx.
    2319             :   Label no_bound_arguments;
    2320          62 :   __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
    2321          62 :   __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
    2322             :   __ testl(rbx, rbx);
    2323          62 :   __ j(zero, &no_bound_arguments);
    2324             :   {
    2325             :     // ----------- S t a t e -------------
    2326             :     //  -- rax : the number of arguments (not including the receiver)
    2327             :     //  -- rdx : new.target (only in case of [[Construct]])
    2328             :     //  -- rdi : target (checked to be a JSBoundFunction)
    2329             :     //  -- rcx : the [[BoundArguments]] (implemented as FixedArray)
    2330             :     //  -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
    2331             :     // -----------------------------------
    2332             : 
    2333             :     // Reserve stack space for the [[BoundArguments]].
    2334             :     {
    2335             :       Label done;
    2336         124 :       __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
    2337          62 :       __ subp(rsp, kScratchRegister);
    2338             :       // Check the stack for overflow. We are not trying to catch interruptions
    2339             :       // (i.e. debug break and preemption) here, so check the "real stack
    2340             :       // limit".
    2341          62 :       __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
    2342          62 :       __ j(greater, &done, Label::kNear);  // Signed comparison.
    2343             :       // Restore the stack pointer.
    2344         124 :       __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
    2345             :       {
    2346          62 :         FrameScope scope(masm, StackFrame::MANUAL);
    2347          62 :         __ EnterFrame(StackFrame::INTERNAL);
    2348          62 :         __ CallRuntime(Runtime::kThrowStackOverflow);
    2349             :       }
    2350          62 :       __ bind(&done);
    2351             :     }
    2352             : 
    2353             :     // Adjust effective number of arguments to include return address.
    2354             :     __ incl(rax);
    2355             : 
    2356             :     // Relocate arguments and return address down the stack.
    2357             :     {
    2358             :       Label loop;
    2359          62 :       __ Set(rcx, 0);
    2360         124 :       __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
    2361          62 :       __ bind(&loop);
    2362         124 :       __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
    2363         124 :       __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
    2364             :       __ incl(rcx);
    2365          62 :       __ cmpl(rcx, rax);
    2366          62 :       __ j(less, &loop);
    2367             :     }
    2368             : 
    2369             :     // Copy [[BoundArguments]] to the stack (below the arguments).
    2370             :     {
    2371             :       Label loop;
    2372             :       __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
    2373          62 :       __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
    2374          62 :       __ bind(&loop);
    2375             :       __ decl(rbx);
    2376             :       __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size,
    2377             :                                              FixedArray::kHeaderSize));
    2378         124 :       __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister);
    2379         124 :       __ leal(rax, Operand(rax, 1));
    2380          62 :       __ j(greater, &loop);
    2381             :     }
    2382             : 
    2383             :     // Adjust effective number of arguments (rax contains the number of
    2384             :     // arguments from the call plus return address plus the number of
    2385             :     // [[BoundArguments]]), so we need to subtract one for the return address.
    2386             :     __ decl(rax);
    2387             :   }
    2388          62 :   __ bind(&no_bound_arguments);
    2389          62 : }
    2390             : 
    2391             : }  // namespace
    2392             : 
    2393             : // static
    2394          31 : void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
    2395             :   // ----------- S t a t e -------------
    2396             :   //  -- rax : the number of arguments (not including the receiver)
    2397             :   //  -- rdi : the function to call (checked to be a JSBoundFunction)
    2398             :   // -----------------------------------
    2399          31 :   __ AssertBoundFunction(rdi);
    2400             : 
    2401             :   // Patch the receiver to [[BoundThis]].
    2402             :   StackArgumentsAccessor args(rsp, rax);
    2403          31 :   __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
    2404             :   __ movp(args.GetReceiverOperand(), rbx);
    2405             : 
    2406             :   // Push the [[BoundArguments]] onto the stack.
    2407          31 :   Generate_PushBoundArguments(masm);
    2408             : 
    2409             :   // Call the [[BoundTargetFunction]] via the Call builtin.
    2410             :   __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
    2411             :   __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
    2412          31 :           RelocInfo::CODE_TARGET);
    2413          31 : }
    2414             : 
    2415             : // static
    2416          93 : void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
    2417             :   // ----------- S t a t e -------------
    2418             :   //  -- rax : the number of arguments (not including the receiver)
    2419             :   //  -- rdi : the target to call (can be any Object)
    2420             :   // -----------------------------------
    2421             :   StackArgumentsAccessor args(rsp, rax);
    2422             : 
    2423             :   Label non_callable, non_function, non_smi;
    2424         465 :   __ JumpIfSmi(rdi, &non_callable);
    2425          93 :   __ bind(&non_smi);
    2426          93 :   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
    2427             :   __ j(equal, masm->isolate()->builtins()->CallFunction(mode),
    2428          93 :        RelocInfo::CODE_TARGET);
    2429          93 :   __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
    2430             :   __ j(equal, BUILTIN_CODE(masm->isolate(), CallBoundFunction),
    2431          93 :        RelocInfo::CODE_TARGET);
    2432             : 
    2433             :   // Check if target has a [[Call]] internal method.
    2434             :   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
    2435          93 :            Immediate(1 << Map::kIsCallable));
    2436          93 :   __ j(zero, &non_callable);
    2437             : 
    2438             :   // Check if target is a proxy and call CallProxy external builtin
    2439          93 :   __ CmpInstanceType(rcx, JS_PROXY_TYPE);
    2440          93 :   __ j(not_equal, &non_function);
    2441          93 :   __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
    2442             : 
    2443             :   // 2. Call to something else, which might have a [[Call]] internal method (if
    2444             :   // not we raise an exception).
    2445          93 :   __ bind(&non_function);
    2446             :   // Overwrite the original receiver with the (original) target.
    2447             :   __ movp(args.GetReceiverOperand(), rdi);
    2448             :   // Let the "call_as_function_delegate" take care of the rest.
    2449          93 :   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
    2450             :   __ Jump(masm->isolate()->builtins()->CallFunction(
    2451             :               ConvertReceiverMode::kNotNullOrUndefined),
    2452          93 :           RelocInfo::CODE_TARGET);
    2453             : 
    2454             :   // 3. Call to something that is not callable.
    2455          93 :   __ bind(&non_callable);
    2456             :   {
    2457          93 :     FrameScope scope(masm, StackFrame::INTERNAL);
    2458          93 :     __ Push(rdi);
    2459          93 :     __ CallRuntime(Runtime::kThrowCalledNonCallable);
    2460             :   }
    2461          93 : }
    2462             : 
    2463             : // static
    2464          31 : void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
    2465             :   // ----------- S t a t e -------------
    2466             :   //  -- rax : the number of arguments (not including the receiver)
    2467             :   //  -- rdx : the new target (checked to be a constructor)
    2468             :   //  -- rdi : the constructor to call (checked to be a JSFunction)
    2469             :   // -----------------------------------
    2470          31 :   __ AssertFunction(rdi);
    2471             : 
    2472             :   // Calling convention for function specific ConstructStubs require
    2473             :   // rbx to contain either an AllocationSite or undefined.
    2474          31 :   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
    2475             : 
    2476             :   // Tail call to the function-specific construct stub (still in the caller
    2477             :   // context at this point).
    2478          31 :   __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    2479             :   __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
    2480             :   __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
    2481          31 :   __ jmp(rcx);
    2482          31 : }
    2483             : 
    2484             : // static
    2485          31 : void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
    2486             :   // ----------- S t a t e -------------
    2487             :   //  -- rax : the number of arguments (not including the receiver)
    2488             :   //  -- rdx : the new target (checked to be a constructor)
    2489             :   //  -- rdi : the constructor to call (checked to be a JSBoundFunction)
    2490             :   // -----------------------------------
    2491          31 :   __ AssertBoundFunction(rdi);
    2492             : 
    2493             :   // Push the [[BoundArguments]] onto the stack.
    2494          31 :   Generate_PushBoundArguments(masm);
    2495             : 
    2496             :   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
    2497             :   {
    2498             :     Label done;
    2499          31 :     __ cmpp(rdi, rdx);
    2500          31 :     __ j(not_equal, &done, Label::kNear);
    2501             :     __ movp(rdx,
    2502             :             FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
    2503          31 :     __ bind(&done);
    2504             :   }
    2505             : 
    2506             :   // Construct the [[BoundTargetFunction]] via the Construct builtin.
    2507             :   __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
    2508          31 :   __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
    2509          31 : }
    2510             : 
    2511             : // static
    2512          31 : void Builtins::Generate_Construct(MacroAssembler* masm) {
    2513             :   // ----------- S t a t e -------------
    2514             :   //  -- rax : the number of arguments (not including the receiver)
    2515             :   //  -- rdx : the new target (either the same as the constructor or
    2516             :   //           the JSFunction on which new was invoked initially)
    2517             :   //  -- rdi : the constructor to call (can be any Object)
    2518             :   // -----------------------------------
    2519             :   StackArgumentsAccessor args(rsp, rax);
    2520             : 
    2521             :   // Check if target is a Smi.
    2522             :   Label non_constructor, non_proxy;
    2523         186 :   __ JumpIfSmi(rdi, &non_constructor, Label::kNear);
    2524             : 
    2525             :   // Dispatch based on instance type.
    2526          31 :   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
    2527             :   __ j(equal, BUILTIN_CODE(masm->isolate(), ConstructFunction),
    2528          31 :        RelocInfo::CODE_TARGET);
    2529             : 
    2530             :   // Check if target has a [[Construct]] internal method.
    2531             :   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
    2532          31 :            Immediate(1 << Map::kIsConstructor));
    2533          31 :   __ j(zero, &non_constructor, Label::kNear);
    2534             : 
    2535             :   // Only dispatch to bound functions after checking whether they are
    2536             :   // constructors.
    2537          31 :   __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
    2538             :   __ j(equal, BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
    2539          31 :        RelocInfo::CODE_TARGET);
    2540             : 
    2541             :   // Only dispatch to proxies after checking whether they are constructors.
    2542          31 :   __ CmpInstanceType(rcx, JS_PROXY_TYPE);
    2543          31 :   __ j(not_equal, &non_proxy);
    2544             :   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
    2545          31 :           RelocInfo::CODE_TARGET);
    2546             : 
    2547             :   // Called Construct on an exotic Object with a [[Construct]] internal method.
    2548          31 :   __ bind(&non_proxy);
    2549             :   {
    2550             :     // Overwrite the original receiver with the (original) target.
    2551             :     __ movp(args.GetReceiverOperand(), rdi);
    2552             :     // Let the "call_as_constructor_delegate" take care of the rest.
    2553          31 :     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
    2554             :     __ Jump(masm->isolate()->builtins()->CallFunction(),
    2555          31 :             RelocInfo::CODE_TARGET);
    2556             :   }
    2557             : 
    2558             :   // Called Construct on an Object that doesn't have a [[Construct]] internal
    2559             :   // method.
    2560          31 :   __ bind(&non_constructor);
    2561             :   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
    2562          31 :           RelocInfo::CODE_TARGET);
    2563          31 : }
    2564             : 
    2565          62 : static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
    2566             :                                               bool has_handler_frame) {
    2567             :   // Lookup the function in the JavaScript frame.
    2568          62 :   if (has_handler_frame) {
    2569          62 :     __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
    2570          62 :     __ movp(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset));
    2571             :   } else {
    2572          62 :     __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
    2573             :   }
    2574             : 
    2575             :   {
    2576          62 :     FrameScope scope(masm, StackFrame::INTERNAL);
    2577             :     // Pass function as argument.
    2578          62 :     __ Push(rax);
    2579          62 :     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
    2580             :   }
    2581             : 
    2582             :   Label skip;
    2583             :   // If the code object is null, just return to the caller.
    2584          62 :   __ cmpp(rax, Immediate(0));
    2585          62 :   __ j(not_equal, &skip, Label::kNear);
    2586          62 :   __ ret(0);
    2587             : 
    2588          62 :   __ bind(&skip);
    2589             : 
    2590             :   // Drop any potential handler frame that is be sitting on top of the actual
    2591             :   // JavaScript frame. This is the case then OSR is triggered from bytecode.
    2592          62 :   if (has_handler_frame) {
    2593          31 :     __ leave();
    2594             :   }
    2595             : 
    2596             :   // Load deoptimization data from the code object.
    2597         124 :   __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
    2598             : 
    2599             :   // Load the OSR entrypoint offset from the deoptimization data.
    2600             :   __ SmiToInteger32(rbx,
    2601             :                     Operand(rbx, FixedArray::OffsetOfElementAt(
    2602             :                                      DeoptimizationData::kOsrPcOffsetIndex) -
    2603          62 :                                      kHeapObjectTag));
    2604             : 
    2605             :   // Compute the target address = code_obj + header_size + osr_offset
    2606         124 :   __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
    2607             : 
    2608             :   // Overwrite the return address on the stack.
    2609             :   __ movq(StackOperandForReturnAddress(0), rax);
    2610             : 
    2611             :   // And "return" to the OSR entry point of the function.
    2612          62 :   __ ret(0);
    2613          62 : }
    2614             : 
    2615          31 : void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
    2616          31 :   Generate_OnStackReplacementHelper(masm, false);
    2617          31 : }
    2618             : 
    2619          31 : void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
    2620          31 :   Generate_OnStackReplacementHelper(masm, true);
    2621          31 : }
    2622             : 
    2623          31 : void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
    2624             :   {
    2625          31 :     FrameScope scope(masm, StackFrame::INTERNAL);
    2626             : 
    2627             :     // Save all parameter registers (see wasm-linkage.cc). They might be
    2628             :     // overwritten in the runtime call below. We don't have any callee-saved
    2629             :     // registers in wasm, so no need to store anything else.
    2630          31 :     constexpr Register gp_regs[]{rax, rbx, rcx, rdx, rsi, rdi};
    2631          31 :     constexpr XMMRegister xmm_regs[]{xmm1, xmm2, xmm3, xmm4, xmm5, xmm6};
    2632             : 
    2633         217 :     for (auto reg : gp_regs) {
    2634         186 :       __ Push(reg);
    2635             :     }
    2636          31 :     __ subp(rsp, Immediate(16 * arraysize(xmm_regs)));
    2637         217 :     for (int i = 0, e = arraysize(xmm_regs); i < e; ++i) {
    2638         186 :       __ movdqu(Operand(rsp, 16 * i), xmm_regs[i]);
    2639             :     }
    2640             : 
    2641             :     // Initialize rsi register with kZero, CEntryStub will use it to set the
    2642             :     // current context on the isolate.
    2643          31 :     __ Move(rsi, Smi::kZero);
    2644          31 :     __ CallRuntime(Runtime::kWasmCompileLazy);
    2645             :     // Store returned instruction start in r11.
    2646             :     __ leap(r11, FieldOperand(rax, Code::kHeaderSize));
    2647             : 
    2648             :     // Restore registers.
    2649         217 :     for (int i = arraysize(xmm_regs) - 1; i >= 0; --i) {
    2650         186 :       __ movdqu(xmm_regs[i], Operand(rsp, 16 * i));
    2651             :     }
    2652          31 :     __ addp(rsp, Immediate(16 * arraysize(xmm_regs)));
    2653         217 :     for (int i = arraysize(gp_regs) - 1; i >= 0; --i) {
    2654         186 :       __ Pop(gp_regs[i]);
    2655          31 :     }
    2656             :   }
    2657             :   // Now jump to the instructions of the returned code object.
    2658          31 :   __ jmp(r11);
    2659          31 : }
    2660             : 
    2661             : #undef __
    2662             : 
    2663             : }  // namespace internal
    2664             : }  // namespace v8
    2665             : 
    2666             : #endif  // V8_TARGET_ARCH_X64

Generated by: LCOV version 1.10