LCOV - code coverage report
Current view: top level - src/x64 - code-stubs-x64.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 497 627 79.3 %
Date: 2017-10-20 Functions: 27 35 77.1 %

          Line data    Source code
       1             : // Copyright 2013 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #if V8_TARGET_ARCH_X64
       6             : 
       7             : #include "src/api-arguments.h"
       8             : #include "src/bootstrapper.h"
       9             : #include "src/code-stubs.h"
      10             : #include "src/counters.h"
      11             : #include "src/double.h"
      12             : #include "src/frame-constants.h"
      13             : #include "src/frames.h"
      14             : #include "src/heap/heap-inl.h"
      15             : #include "src/ic/handler-compiler.h"
      16             : #include "src/ic/ic.h"
      17             : #include "src/ic/stub-cache.h"
      18             : #include "src/isolate.h"
      19             : #include "src/objects-inl.h"
      20             : #include "src/objects/regexp-match-info.h"
      21             : #include "src/regexp/jsregexp.h"
      22             : #include "src/regexp/regexp-macro-assembler.h"
      23             : #include "src/runtime/runtime.h"
      24             : 
      25             : #include "src/x64/code-stubs-x64.h"  // Cannot be the first include.
      26             : 
      27             : namespace v8 {
      28             : namespace internal {
      29             : 
      30             : #define __ ACCESS_MASM(masm)
      31             : 
      32          31 : void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
      33          31 :   __ popq(rcx);
      34          62 :   __ movq(MemOperand(rsp, rax, times_8, 0), rdi);
      35          31 :   __ pushq(rdi);
      36          31 :   __ pushq(rbx);
      37          31 :   __ pushq(rcx);
      38          31 :   __ addq(rax, Immediate(3));
      39          31 :   __ TailCallRuntime(Runtime::kNewArray);
      40          31 : }
      41             : 
      42             : 
      43         124 : void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
      44          62 :   __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
      45             :   const int argument_count = 1;
      46          62 :   __ PrepareCallCFunction(argument_count);
      47             :   __ LoadAddress(arg_reg_1,
      48         124 :                  ExternalReference::isolate_address(isolate()));
      49             : 
      50             :   AllowExternalCallThatCantCauseGC scope(masm);
      51             :   __ CallCFunction(
      52             :       ExternalReference::store_buffer_overflow_function(isolate()),
      53          62 :       argument_count);
      54          62 :   __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
      55          62 :   __ ret(0);
      56          62 : }
      57             : 
      58             : 
      59        2582 : void DoubleToIStub::Generate(MacroAssembler* masm) {
      60        2582 :     Register final_result_reg = this->destination();
      61             : 
      62             :     Label check_negative, process_64_bits, done;
      63             : 
      64             :     // Account for return address and saved regs.
      65             :     const int kArgumentOffset = 3 * kRegisterSize;
      66             : 
      67        2582 :     MemOperand mantissa_operand(MemOperand(rsp, kArgumentOffset));
      68             :     MemOperand exponent_operand(
      69        2582 :         MemOperand(rsp, kArgumentOffset + kDoubleSize / 2));
      70             : 
      71             :     Register scratch1 = no_reg;
      72        2582 :     Register scratch_candidates[3] = { rbx, rdx, rdi };
      73        2613 :     for (int i = 0; i < 3; i++) {
      74        2613 :       scratch1 = scratch_candidates[i];
      75        2613 :       if (final_result_reg != scratch1) break;
      76             :     }
      77             : 
      78             :     // Since we must use rcx for shifts below, use some other register (rax)
      79             :     // to calculate the result if ecx is the requested return register.
      80        2582 :     Register result_reg = final_result_reg == rcx ? rax : final_result_reg;
      81             :     // Save ecx if it isn't the return register and therefore volatile, or if it
      82             :     // is the return register, then save the temp register we use in its stead
      83             :     // for the result.
      84        2582 :     Register save_reg = final_result_reg == rcx ? rax : rcx;
      85        2582 :     __ pushq(scratch1);
      86        2582 :     __ pushq(save_reg);
      87             : 
      88             :     __ movl(scratch1, mantissa_operand);
      89        2582 :     __ Movsd(kScratchDoubleReg, mantissa_operand);
      90             :     __ movl(rcx, exponent_operand);
      91             : 
      92        2582 :     __ andl(rcx, Immediate(HeapNumber::kExponentMask));
      93             :     __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
      94        5164 :     __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
      95        2582 :     __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
      96        2582 :     __ j(below, &process_64_bits);
      97             : 
      98             :     // Result is entirely in lower 32-bits of mantissa
      99             :     int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
     100        2582 :     __ subl(rcx, Immediate(delta));
     101             :     __ xorl(result_reg, result_reg);
     102        2582 :     __ cmpl(rcx, Immediate(31));
     103        2582 :     __ j(above, &done);
     104             :     __ shll_cl(scratch1);
     105        2582 :     __ jmp(&check_negative);
     106             : 
     107        2582 :     __ bind(&process_64_bits);
     108        2582 :     __ Cvttsd2siq(result_reg, kScratchDoubleReg);
     109        2582 :     __ jmp(&done, Label::kNear);
     110             : 
     111             :     // If the double was negative, negate the integer result.
     112        2582 :     __ bind(&check_negative);
     113             :     __ movl(result_reg, scratch1);
     114             :     __ negl(result_reg);
     115             :     __ cmpl(exponent_operand, Immediate(0));
     116        2582 :     __ cmovl(greater, result_reg, scratch1);
     117             : 
     118             :     // Restore registers
     119        2582 :     __ bind(&done);
     120        2582 :     if (final_result_reg != result_reg) {
     121             :       DCHECK(final_result_reg == rcx);
     122             :       __ movl(final_result_reg, result_reg);
     123             :     }
     124        2582 :     __ popq(save_reg);
     125        2582 :     __ popq(scratch1);
     126        2582 :     __ ret(0);
     127        2582 : }
     128             : 
     129          93 : void MathPowStub::Generate(MacroAssembler* masm) {
     130          31 :   const Register exponent = MathPowTaggedDescriptor::exponent();
     131             :   DCHECK(exponent == rdx);
     132             :   const Register scratch = rcx;
     133          31 :   const XMMRegister double_result = xmm3;
     134          31 :   const XMMRegister double_base = xmm2;
     135             :   const XMMRegister double_exponent = xmm1;
     136          31 :   const XMMRegister double_scratch = xmm4;
     137             : 
     138             :   Label call_runtime, done, exponent_not_smi, int_exponent;
     139             : 
     140             :   // Save 1 in double_result - we need this several times later on.
     141          31 :   __ movp(scratch, Immediate(1));
     142          31 :   __ Cvtlsi2sd(double_result, scratch);
     143             : 
     144          31 :   if (exponent_type() == TAGGED) {
     145           0 :     __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
     146           0 :     __ SmiToInteger32(exponent, exponent);
     147           0 :     __ jmp(&int_exponent);
     148             : 
     149           0 :     __ bind(&exponent_not_smi);
     150           0 :     __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
     151             :   }
     152             : 
     153          31 :   if (exponent_type() != INTEGER) {
     154             :     Label fast_power, try_arithmetic_simplification;
     155             :     // Detect integer exponents stored as double.
     156             :     __ DoubleToI(exponent, double_exponent, double_scratch,
     157             :                  TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
     158             :                  &try_arithmetic_simplification,
     159          31 :                  &try_arithmetic_simplification);
     160          31 :     __ jmp(&int_exponent);
     161             : 
     162          31 :     __ bind(&try_arithmetic_simplification);
     163          31 :     __ Cvttsd2si(exponent, double_exponent);
     164             :     // Skip to runtime if possibly NaN (indicated by the indefinite integer).
     165          31 :     __ cmpl(exponent, Immediate(0x1));
     166          31 :     __ j(overflow, &call_runtime);
     167             : 
     168             :     // Using FPU instructions to calculate power.
     169             :     Label fast_power_failed;
     170          31 :     __ bind(&fast_power);
     171          31 :     __ fnclex();  // Clear flags to catch exceptions later.
     172             :     // Transfer (B)ase and (E)xponent onto the FPU register stack.
     173          31 :     __ subp(rsp, Immediate(kDoubleSize));
     174          31 :     __ Movsd(Operand(rsp, 0), double_exponent);
     175          31 :     __ fld_d(Operand(rsp, 0));  // E
     176          31 :     __ Movsd(Operand(rsp, 0), double_base);
     177          31 :     __ fld_d(Operand(rsp, 0));  // B, E
     178             : 
     179             :     // Exponent is in st(1) and base is in st(0)
     180             :     // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
     181             :     // FYL2X calculates st(1) * log2(st(0))
     182          31 :     __ fyl2x();    // X
     183          31 :     __ fld(0);     // X, X
     184          31 :     __ frndint();  // rnd(X), X
     185          31 :     __ fsub(1);    // rnd(X), X-rnd(X)
     186          31 :     __ fxch(1);    // X - rnd(X), rnd(X)
     187             :     // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
     188          31 :     __ f2xm1();    // 2^(X-rnd(X)) - 1, rnd(X)
     189          31 :     __ fld1();     // 1, 2^(X-rnd(X)) - 1, rnd(X)
     190          31 :     __ faddp(1);   // 2^(X-rnd(X)), rnd(X)
     191             :     // FSCALE calculates st(0) * 2^st(1)
     192          31 :     __ fscale();   // 2^X, rnd(X)
     193          31 :     __ fstp(1);
     194             :     // Bail out to runtime in case of exceptions in the status word.
     195          31 :     __ fnstsw_ax();
     196          31 :     __ testb(rax, Immediate(0x5F));  // Check for all but precision exception.
     197          31 :     __ j(not_zero, &fast_power_failed, Label::kNear);
     198          31 :     __ fstp_d(Operand(rsp, 0));
     199          31 :     __ Movsd(double_result, Operand(rsp, 0));
     200          31 :     __ addp(rsp, Immediate(kDoubleSize));
     201          31 :     __ jmp(&done);
     202             : 
     203          31 :     __ bind(&fast_power_failed);
     204          31 :     __ fninit();
     205          31 :     __ addp(rsp, Immediate(kDoubleSize));
     206          31 :     __ jmp(&call_runtime);
     207             :   }
     208             : 
     209             :   // Calculate power with integer exponent.
     210          31 :   __ bind(&int_exponent);
     211          31 :   const XMMRegister double_scratch2 = double_exponent;
     212             :   // Back up exponent as we need to check if exponent is negative later.
     213             :   __ movp(scratch, exponent);  // Back up exponent.
     214          31 :   __ Movsd(double_scratch, double_base);     // Back up base.
     215          31 :   __ Movsd(double_scratch2, double_result);  // Load double_exponent with 1.
     216             : 
     217             :   // Get absolute value of exponent.
     218             :   Label no_neg, while_true, while_false;
     219             :   __ testl(scratch, scratch);
     220          31 :   __ j(positive, &no_neg, Label::kNear);
     221             :   __ negl(scratch);
     222          31 :   __ bind(&no_neg);
     223             : 
     224          31 :   __ j(zero, &while_false, Label::kNear);
     225             :   __ shrl(scratch, Immediate(1));
     226             :   // Above condition means CF==0 && ZF==0.  This means that the
     227             :   // bit that has been shifted out is 0 and the result is not 0.
     228          31 :   __ j(above, &while_true, Label::kNear);
     229          31 :   __ Movsd(double_result, double_scratch);
     230          31 :   __ j(zero, &while_false, Label::kNear);
     231             : 
     232          31 :   __ bind(&while_true);
     233             :   __ shrl(scratch, Immediate(1));
     234          31 :   __ Mulsd(double_scratch, double_scratch);
     235          31 :   __ j(above, &while_true, Label::kNear);
     236          31 :   __ Mulsd(double_result, double_scratch);
     237          31 :   __ j(not_zero, &while_true);
     238             : 
     239          31 :   __ bind(&while_false);
     240             :   // If the exponent is negative, return 1/result.
     241             :   __ testl(exponent, exponent);
     242          31 :   __ j(greater, &done);
     243          31 :   __ Divsd(double_scratch2, double_result);
     244          31 :   __ Movsd(double_result, double_scratch2);
     245             :   // Test whether result is zero.  Bail out to check for subnormal result.
     246             :   // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
     247          31 :   __ Xorpd(double_scratch2, double_scratch2);
     248          31 :   __ Ucomisd(double_scratch2, double_result);
     249             :   // double_exponent aliased as double_scratch2 has already been overwritten
     250             :   // and may not have contained the exponent value in the first place when the
     251             :   // input was a smi.  We reset it with exponent value before bailing out.
     252          31 :   __ j(not_equal, &done);
     253          31 :   __ Cvtlsi2sd(double_exponent, exponent);
     254             : 
     255             :   // Returning or bailing out.
     256          31 :   __ bind(&call_runtime);
     257             :   // Move base to the correct argument register.  Exponent is already in xmm1.
     258          31 :   __ Movsd(xmm0, double_base);
     259             :   DCHECK(double_exponent == xmm1);
     260             :   {
     261             :     AllowExternalCallThatCantCauseGC scope(masm);
     262          31 :     __ PrepareCallCFunction(2);
     263             :     __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
     264          31 :                      2);
     265             :   }
     266             :   // Return value is in xmm0.
     267          31 :   __ Movsd(double_result, xmm0);
     268             : 
     269          31 :   __ bind(&done);
     270          31 :   __ ret(0);
     271          31 : }
     272             : 
     273             : 
     274       13849 : bool CEntryStub::NeedsImmovableCode() {
     275       13849 :   return false;
     276             : }
     277             : 
     278             : 
     279          31 : void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
     280          31 :   CEntryStub::GenerateAheadOfTime(isolate);
     281          31 :   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
     282             :   // It is important that the store buffer overflow stubs are generated first.
     283          31 :   CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
     284          31 :   StoreFastElementStub::GenerateAheadOfTime(isolate);
     285          31 : }
     286             : 
     287             : 
     288       54848 : void CodeStub::GenerateFPStubs(Isolate* isolate) {
     289       54848 : }
     290             : 
     291             : 
     292          31 : void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
     293             :   CEntryStub stub(isolate, 1, kDontSaveFPRegs);
     294          31 :   stub.GetCode();
     295             :   CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
     296          31 :   save_doubles.GetCode();
     297          31 : }
     298             : 
     299             : 
     300       69245 : void CEntryStub::Generate(MacroAssembler* masm) {
     301             :   // rax: number of arguments including receiver
     302             :   // rbx: pointer to C function  (C callee-saved)
     303             :   // rbp: frame pointer of calling JS frame (restored after C call)
     304             :   // rsp: stack pointer  (restored after C call)
     305             :   // rsi: current context (restored)
     306             :   //
     307             :   // If argv_in_register():
     308             :   // r15: pointer to the first argument
     309             : 
     310       13849 :   ProfileEntryHookStub::MaybeCallEntryHook(masm);
     311             : 
     312             : #ifdef _WIN64
     313             :   // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
     314             :   // stack to be aligned to 16 bytes. It only allows a single-word to be
     315             :   // returned in register rax. Larger return sizes must be written to an address
     316             :   // passed as a hidden first argument.
     317             :   const Register kCCallArg0 = rcx;
     318             :   const Register kCCallArg1 = rdx;
     319             :   const Register kCCallArg2 = r8;
     320             :   const Register kCCallArg3 = r9;
     321             :   const int kArgExtraStackSpace = 2;
     322             :   const int kMaxRegisterResultSize = 1;
     323             : #else
     324             :   // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
     325             :   // are returned in rax, and a struct of two pointers are returned in rax+rdx.
     326             :   // Larger return sizes must be written to an address passed as a hidden first
     327             :   // argument.
     328             :   const Register kCCallArg0 = rdi;
     329             :   const Register kCCallArg1 = rsi;
     330             :   const Register kCCallArg2 = rdx;
     331             :   const Register kCCallArg3 = rcx;
     332             :   const int kArgExtraStackSpace = 0;
     333             :   const int kMaxRegisterResultSize = 2;
     334             : #endif  // _WIN64
     335             : 
     336             :   // Enter the exit frame that transitions from JavaScript to C++.
     337             :   int arg_stack_space =
     338       13849 :       kArgExtraStackSpace +
     339             :       (result_size() <= kMaxRegisterResultSize ? 0 : result_size());
     340       13849 :   if (argv_in_register()) {
     341             :     DCHECK(!save_doubles());
     342             :     DCHECK(!is_builtin_exit());
     343          62 :     __ EnterApiExitFrame(arg_stack_space);
     344             :     // Move argc into r14 (argv is already in r15).
     345          62 :     __ movp(r14, rax);
     346             :   } else {
     347             :     __ EnterExitFrame(
     348             :         arg_stack_space, save_doubles(),
     349       27574 :         is_builtin_exit() ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
     350             :   }
     351             : 
     352             :   // rbx: pointer to builtin function  (C callee-saved).
     353             :   // rbp: frame pointer of exit frame  (restored after C call).
     354             :   // rsp: stack pointer (restored after C call).
     355             :   // r14: number of arguments including receiver (C callee-saved).
     356             :   // r15: argv pointer (C callee-saved).
     357             : 
     358             :   // Check stack alignment.
     359       13849 :   if (FLAG_debug_code) {
     360          31 :     __ CheckStackAlignment();
     361             :   }
     362             : 
     363             :   // Call C function. The arguments object will be created by stubs declared by
     364             :   // DECLARE_RUNTIME_FUNCTION().
     365       13849 :   if (result_size() <= kMaxRegisterResultSize) {
     366             :     // Pass a pointer to the Arguments object as the first argument.
     367             :     // Return result in single register (rax), or a register pair (rax, rdx).
     368       13849 :     __ movp(kCCallArg0, r14);  // argc.
     369             :     __ movp(kCCallArg1, r15);  // argv.
     370      110823 :     __ Move(kCCallArg2, ExternalReference::isolate_address(isolate()));
     371             :   } else {
     372             :     DCHECK_LE(result_size(), 2);
     373             :     // Pass a pointer to the result location as the first argument.
     374           0 :     __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
     375             :     // Pass a pointer to the Arguments object as the second argument.
     376             :     __ movp(kCCallArg1, r14);  // argc.
     377             :     __ movp(kCCallArg2, r15);  // argv.
     378           0 :     __ Move(kCCallArg3, ExternalReference::isolate_address(isolate()));
     379             :   }
     380       13849 :   __ call(rbx);
     381             : 
     382       13849 :   if (result_size() > kMaxRegisterResultSize) {
     383             :     // Read result values stored on stack. Result is stored
     384             :     // above the the two Arguments object slots on Win64.
     385             :     DCHECK_LE(result_size(), 2);
     386             :     __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
     387             :     __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
     388             :   }
     389             :   // Result is in rax or rdx:rax - do not destroy these registers!
     390             : 
     391             :   // Check result for exception sentinel.
     392             :   Label exception_returned;
     393       13849 :   __ CompareRoot(rax, Heap::kExceptionRootIndex);
     394       13849 :   __ j(equal, &exception_returned);
     395             : 
     396             :   // Check that there is no pending exception, otherwise we
     397             :   // should have returned the exception sentinel.
     398       13849 :   if (FLAG_debug_code) {
     399             :     Label okay;
     400          31 :     __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
     401             :     ExternalReference pending_exception_address(
     402          31 :         IsolateAddressId::kPendingExceptionAddress, isolate());
     403             :     Operand pending_exception_operand =
     404          31 :         masm->ExternalOperand(pending_exception_address);
     405             :     __ cmpp(r14, pending_exception_operand);
     406          31 :     __ j(equal, &okay, Label::kNear);
     407          31 :     __ int3();
     408          31 :     __ bind(&okay);
     409             :   }
     410             : 
     411             :   // Exit the JavaScript to C++ exit frame.
     412       27698 :   __ LeaveExitFrame(save_doubles(), !argv_in_register());
     413       13849 :   __ ret(0);
     414             : 
     415             :   // Handling of exception.
     416       13849 :   __ bind(&exception_returned);
     417             : 
     418             :   ExternalReference pending_handler_context_address(
     419       13849 :       IsolateAddressId::kPendingHandlerContextAddress, isolate());
     420             :   ExternalReference pending_handler_code_address(
     421       13849 :       IsolateAddressId::kPendingHandlerCodeAddress, isolate());
     422             :   ExternalReference pending_handler_offset_address(
     423       13849 :       IsolateAddressId::kPendingHandlerOffsetAddress, isolate());
     424             :   ExternalReference pending_handler_fp_address(
     425       13849 :       IsolateAddressId::kPendingHandlerFPAddress, isolate());
     426             :   ExternalReference pending_handler_sp_address(
     427       13849 :       IsolateAddressId::kPendingHandlerSPAddress, isolate());
     428             : 
     429             :   // Ask the runtime for help to determine the handler. This will set rax to
     430             :   // contain the current pending exception, don't clobber it.
     431             :   ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
     432       13849 :                                  isolate());
     433             :   {
     434       13849 :     FrameScope scope(masm, StackFrame::MANUAL);
     435             :     __ movp(arg_reg_1, Immediate(0));  // argc.
     436             :     __ movp(arg_reg_2, Immediate(0));  // argv.
     437       13849 :     __ Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
     438       13849 :     __ PrepareCallCFunction(3);
     439       13849 :     __ CallCFunction(find_handler, 3);
     440             :   }
     441             : 
     442             :   // Retrieve the handler context, SP and FP.
     443       13849 :   __ movp(rsi, masm->ExternalOperand(pending_handler_context_address));
     444       13849 :   __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address));
     445       13849 :   __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address));
     446             : 
     447             :   // If the handler is a JS frame, restore the context to the frame. Note that
     448             :   // the context will be set to (rsi == 0) for non-JS frames.
     449             :   Label skip;
     450             :   __ testp(rsi, rsi);
     451       13849 :   __ j(zero, &skip, Label::kNear);
     452       27698 :   __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
     453       13849 :   __ bind(&skip);
     454             : 
     455             :   // Compute the handler entry address and jump to it.
     456       13849 :   __ movp(rdi, masm->ExternalOperand(pending_handler_code_address));
     457       13849 :   __ movp(rdx, masm->ExternalOperand(pending_handler_offset_address));
     458             :   __ leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
     459       13849 :   __ jmp(rdi);
     460       13849 : }
     461             : 
     462             : 
     463         186 : void JSEntryStub::Generate(MacroAssembler* masm) {
     464             :   Label invoke, handler_entry, exit;
     465             :   Label not_outermost_js, not_outermost_js_2;
     466             : 
     467          62 :   ProfileEntryHookStub::MaybeCallEntryHook(masm);
     468             : 
     469             :   {  // NOLINT. Scope block confuses linter.
     470             :     MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
     471             :     // Set up frame.
     472          62 :     __ pushq(rbp);
     473             :     __ movp(rbp, rsp);
     474             : 
     475             :     // Push the stack frame type.
     476          62 :     __ Push(Immediate(StackFrame::TypeToMarker(type())));  // context slot
     477             :     ExternalReference context_address(IsolateAddressId::kContextAddress,
     478         310 :                                       isolate());
     479          62 :     __ Load(kScratchRegister, context_address);
     480          62 :     __ Push(kScratchRegister);  // context
     481             :     // Save callee-saved registers (X64/X32/Win64 calling conventions).
     482          62 :     __ pushq(r12);
     483          62 :     __ pushq(r13);
     484          62 :     __ pushq(r14);
     485          62 :     __ pushq(r15);
     486             : #ifdef _WIN64
     487             :     __ pushq(rdi);  // Only callee save in Win64 ABI, argument in AMD64 ABI.
     488             :     __ pushq(rsi);  // Only callee save in Win64 ABI, argument in AMD64 ABI.
     489             : #endif
     490          62 :     __ pushq(rbx);
     491             : 
     492             : #ifdef _WIN64
     493             :     // On Win64 XMM6-XMM15 are callee-save
     494             :     __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
     495             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
     496             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
     497             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
     498             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
     499             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
     500             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
     501             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
     502             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
     503             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
     504             :     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
     505             : #endif
     506             : 
     507             :     // Set up the roots and smi constant registers.
     508             :     // Needs to be done before any further smi loads.
     509          62 :     __ InitializeRootRegister();
     510             :   }
     511             : 
     512             :   // Save copies of the top frame descriptor on the stack.
     513          62 :   ExternalReference c_entry_fp(IsolateAddressId::kCEntryFPAddress, isolate());
     514             :   {
     515          62 :     Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
     516          62 :     __ Push(c_entry_fp_operand);
     517             :   }
     518             : 
     519             :   // If this is the outermost JS call, set js_entry_sp value.
     520          62 :   ExternalReference js_entry_sp(IsolateAddressId::kJSEntrySPAddress, isolate());
     521          62 :   __ Load(rax, js_entry_sp);
     522             :   __ testp(rax, rax);
     523          62 :   __ j(not_zero, &not_outermost_js);
     524          62 :   __ Push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
     525             :   __ movp(rax, rbp);
     526          62 :   __ Store(js_entry_sp, rax);
     527             :   Label cont;
     528          62 :   __ jmp(&cont);
     529          62 :   __ bind(&not_outermost_js);
     530          62 :   __ Push(Immediate(StackFrame::INNER_JSENTRY_FRAME));
     531          62 :   __ bind(&cont);
     532             : 
     533             :   // Jump to a faked try block that does the invoke, with a faked catch
     534             :   // block that sets the pending exception.
     535          62 :   __ jmp(&invoke);
     536          62 :   __ bind(&handler_entry);
     537          62 :   handler_offset_ = handler_entry.pos();
     538             :   // Caught exception: Store result (exception) in the pending exception
     539             :   // field in the JSEnv and return a failure sentinel.
     540             :   ExternalReference pending_exception(
     541          62 :       IsolateAddressId::kPendingExceptionAddress, isolate());
     542          62 :   __ Store(pending_exception, rax);
     543          62 :   __ LoadRoot(rax, Heap::kExceptionRootIndex);
     544          62 :   __ jmp(&exit);
     545             : 
     546             :   // Invoke: Link this frame into the handler chain.
     547          62 :   __ bind(&invoke);
     548          62 :   __ PushStackHandler();
     549             : 
     550             :   // Invoke the function by calling through JS entry trampoline builtin and
     551             :   // pop the faked function when we return. We load the address from an
     552             :   // external reference instead of inlining the call target address directly
     553             :   // in the code, because the builtin stubs may not have been generated yet
     554             :   // at the time this code is generated.
     555          62 :   if (type() == StackFrame::CONSTRUCT_ENTRY) {
     556             :     __ Call(BUILTIN_CODE(isolate(), JSConstructEntryTrampoline),
     557          31 :             RelocInfo::CODE_TARGET);
     558             :   } else {
     559          31 :     __ Call(BUILTIN_CODE(isolate(), JSEntryTrampoline), RelocInfo::CODE_TARGET);
     560             :   }
     561             : 
     562             :   // Unlink this frame from the handler chain.
     563          62 :   __ PopStackHandler();
     564             : 
     565          62 :   __ bind(&exit);
     566             :   // Check if the current stack frame is marked as the outermost JS frame.
     567          62 :   __ Pop(rbx);
     568          62 :   __ cmpp(rbx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
     569          62 :   __ j(not_equal, &not_outermost_js_2);
     570             :   __ Move(kScratchRegister, js_entry_sp);
     571         124 :   __ movp(Operand(kScratchRegister, 0), Immediate(0));
     572          62 :   __ bind(&not_outermost_js_2);
     573             : 
     574             :   // Restore the top frame descriptor from the stack.
     575          62 :   { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
     576          62 :     __ Pop(c_entry_fp_operand);
     577             :   }
     578             : 
     579             :   // Restore callee-saved registers (X64 conventions).
     580             : #ifdef _WIN64
     581             :   // On Win64 XMM6-XMM15 are callee-save
     582             :   __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
     583             :   __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
     584             :   __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
     585             :   __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
     586             :   __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
     587             :   __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
     588             :   __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
     589             :   __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
     590             :   __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
     591             :   __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
     592             :   __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
     593             : #endif
     594             : 
     595          62 :   __ popq(rbx);
     596             : #ifdef _WIN64
     597             :   // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
     598             :   __ popq(rsi);
     599             :   __ popq(rdi);
     600             : #endif
     601          62 :   __ popq(r15);
     602          62 :   __ popq(r14);
     603          62 :   __ popq(r13);
     604          62 :   __ popq(r12);
     605          62 :   __ addp(rsp, Immediate(2 * kPointerSize));  // remove markers
     606             : 
     607             :   // Restore frame pointer and return.
     608          62 :   __ popq(rbp);
     609          62 :   __ ret(0);
     610          62 : }
     611             : 
     612         829 : void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
     613             :                                                       Label* miss,
     614             :                                                       Label* done,
     615             :                                                       Register properties,
     616             :                                                       Handle<Name> name,
     617             :                                                       Register r0) {
     618             :   DCHECK(name->IsUniqueName());
     619             :   // If names of slots in range from 1 to kProbes - 1 for the hash value are
     620             :   // not equal to the name and kProbes-th slot is not used (its name is the
     621             :   // undefined value), it guarantees the hash table doesn't contain the
     622             :   // property. It's true even if some slots represent deleted properties
     623             :   // (their names are the hole value).
     624        4145 :   for (int i = 0; i < kInlinedProbes; i++) {
     625             :     // r0 points to properties hash.
     626             :     // Compute the masked index: (hash + i + i * i) & mask.
     627        3316 :     Register index = r0;
     628             :     // Capacity is smi 2^n.
     629        7461 :     __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
     630        3316 :     __ decl(index);
     631             :     __ andp(index,
     632        9948 :             Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
     633             : 
     634             :     // Scale the index by multiplying by the entry size.
     635             :     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
     636        6632 :     __ leap(index, Operand(index, index, times_2, 0));  // index *= 3.
     637             : 
     638        3316 :     Register entity_name = r0;
     639             :     // Having undefined at this place means the name is not contained.
     640             :     STATIC_ASSERT(kSmiTagSize == 1);
     641             :     __ movp(entity_name, Operand(properties,
     642             :                                  index,
     643             :                                  times_pointer_size,
     644        6632 :                                  kElementsStartOffset - kHeapObjectTag));
     645        3316 :     __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
     646        3316 :     __ j(equal, done);
     647             : 
     648             :     // Stop if found the property.
     649        3316 :     __ Cmp(entity_name, name);
     650        3316 :     __ j(equal, miss);
     651             :   }
     652             : 
     653             :   NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0);
     654         829 :   __ Push(name);
     655        1658 :   __ Push(Immediate(name->Hash()));
     656         829 :   __ CallStub(&stub);
     657         829 :   __ testp(r0, r0);
     658         829 :   __ j(not_zero, miss);
     659         829 :   __ jmp(done);
     660         829 : }
     661             : 
     662        1190 : void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
     663             :   // This stub overrides SometimesSetsUpAFrame() to return false.  That means
     664             :   // we cannot call anything that could cause a GC from this stub.
     665             :   // Stack frame on entry:
     666             :   //  rsp[0 * kPointerSize] : return address.
     667             :   //  rsp[1 * kPointerSize] : key's hash.
     668             :   //  rsp[2 * kPointerSize] : key.
     669             :   // Registers:
     670             :   //  dictionary_: NameDictionary to probe.
     671             :   //  result_: used as scratch.
     672             :   //  index_: will hold an index of entry if lookup is successful.
     673             :   //          might alias with result_.
     674             :   // Returns:
     675             :   //  result_ is zero if lookup failed, non zero otherwise.
     676             : 
     677             :   Label in_dictionary, not_in_dictionary;
     678             : 
     679             :   Register scratch = result();
     680             : 
     681          35 :   __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset));
     682          35 :   __ decl(scratch);
     683          35 :   __ Push(scratch);
     684             : 
     685             :   // If names of slots in range from 1 to kProbes - 1 for the hash value are
     686             :   // not equal to the name and kProbes-th slot is not used (its name is the
     687             :   // undefined value), it guarantees the hash table doesn't contain the
     688             :   // property. It's true even if some slots represent deleted properties
     689             :   // (their names are the null value).
     690             :   StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
     691             :                               kPointerSize);
     692         595 :   for (int i = kInlinedProbes; i < kTotalProbes; i++) {
     693             :     // Compute the masked index: (hash + i + i * i) & mask.
     694         560 :     __ movp(scratch, args.GetArgumentOperand(1));
     695         560 :     if (i > 0) {
     696        1680 :       __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
     697             :     }
     698        1120 :     __ andp(scratch, Operand(rsp, 0));
     699             : 
     700             :     // Scale the index by multiplying by the entry size.
     701             :     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
     702        1120 :     __ leap(index(), Operand(scratch, scratch, times_2, 0));  // index *= 3.
     703             : 
     704             :     // Having undefined at this place means the name is not contained.
     705             :     __ movp(scratch, Operand(dictionary(), index(), times_pointer_size,
     706        1120 :                              kElementsStartOffset - kHeapObjectTag));
     707             : 
     708        1120 :     __ Cmp(scratch, isolate()->factory()->undefined_value());
     709         560 :     __ j(equal, &not_in_dictionary);
     710             : 
     711             :     // Stop if found the property.
     712         560 :     __ cmpp(scratch, args.GetArgumentOperand(0));
     713         560 :     __ j(equal, &in_dictionary);
     714             :   }
     715             : 
     716          35 :   __ bind(&in_dictionary);
     717             :   __ movp(scratch, Immediate(1));
     718          35 :   __ Drop(1);
     719          35 :   __ ret(2 * kPointerSize);
     720             : 
     721          35 :   __ bind(&not_in_dictionary);
     722             :   __ movp(scratch, Immediate(0));
     723          35 :   __ Drop(1);
     724          35 :   __ ret(2 * kPointerSize);
     725          35 : }
     726             : 
     727             : 
     728       54879 : void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
     729             :     Isolate* isolate) {
     730             :   StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
     731       54879 :   stub1.GetCode();
     732             :   StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
     733       54879 :   stub2.GetCode();
     734       54879 : }
     735             : 
     736           0 : RecordWriteStub::Mode RecordWriteStub::GetMode(Code* stub) {
     737           0 :   byte first_instruction = stub->instruction_start()[0];
     738           0 :   byte second_instruction = stub->instruction_start()[2];
     739             : 
     740           0 :   if (first_instruction == kTwoByteJumpInstruction) {
     741             :     return INCREMENTAL;
     742             :   }
     743             : 
     744             :   DCHECK_EQ(first_instruction, kTwoByteNopInstruction);
     745             : 
     746           0 :   if (second_instruction == kTwoByteJumpInstruction) {
     747             :     return INCREMENTAL_COMPACTION;
     748             :   }
     749             : 
     750             :   DCHECK_EQ(second_instruction, kTwoByteNopInstruction);
     751             : 
     752           0 :   return STORE_BUFFER_ONLY;
     753             : }
     754             : 
     755           0 : void RecordWriteStub::Patch(Code* stub, Mode mode) {
     756           0 :   switch (mode) {
     757             :     case STORE_BUFFER_ONLY:
     758             :       DCHECK(GetMode(stub) == INCREMENTAL ||
     759             :              GetMode(stub) == INCREMENTAL_COMPACTION);
     760           0 :       stub->instruction_start()[0] = kTwoByteNopInstruction;
     761           0 :       stub->instruction_start()[2] = kTwoByteNopInstruction;
     762           0 :       break;
     763             :     case INCREMENTAL:
     764             :       DCHECK(GetMode(stub) == STORE_BUFFER_ONLY);
     765           0 :       stub->instruction_start()[0] = kTwoByteJumpInstruction;
     766           0 :       break;
     767             :     case INCREMENTAL_COMPACTION:
     768             :       DCHECK(GetMode(stub) == STORE_BUFFER_ONLY);
     769           0 :       stub->instruction_start()[0] = kTwoByteNopInstruction;
     770           0 :       stub->instruction_start()[2] = kTwoByteJumpInstruction;
     771           0 :       break;
     772             :   }
     773             :   DCHECK(GetMode(stub) == mode);
     774           0 :   Assembler::FlushICache(stub->GetIsolate(), stub->instruction_start(), 7);
     775           0 : }
     776             : 
     777             : // Takes the input in 3 registers: address_ value_ and object_.  A pointer to
     778             : // the value has just been written into the object, now this stub makes sure
     779             : // we keep the GC informed.  The word in the object where the value has been
     780             : // written is in the address register.
     781           0 : void RecordWriteStub::Generate(MacroAssembler* masm) {
     782             :   Label skip_to_incremental;
     783             :   Label second_instr;
     784             : 
     785             :   // The first two instructions are generated with labels so as to get the
     786             :   // offset fixed up correctly by the bind(Label*) call.  We patch it back and
     787             :   // forth between a compare instructions (a nop in this position) and the
     788             :   // real branch when we start and stop incremental heap marking.
     789             :   // See RecordWriteStub::Patch for details.
     790           0 :   __ jmp(&skip_to_incremental, Label::kNear);
     791           0 :   __ bind(&second_instr);
     792           0 :   __ jmp(&skip_to_incremental, Label::kNear);
     793             : 
     794           0 :   if (remembered_set_action() == EMIT_REMEMBERED_SET) {
     795           0 :     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode());
     796             :   } else {
     797           0 :     __ ret(0);
     798             :   }
     799             : 
     800           0 :   __ bind(&skip_to_incremental);
     801             : 
     802           0 :   GenerateIncremental(masm, &second_instr);
     803             : 
     804             :   // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
     805             :   // Will be checked in IncrementalMarking::ActivateGeneratedStub.
     806             :   masm->set_byte_at(0, kTwoByteNopInstruction);
     807             :   masm->set_byte_at(2, kTwoByteNopInstruction);
     808           0 : }
     809             : 
     810           0 : void RecordWriteStub::GenerateIncremental(MacroAssembler* masm,
     811           0 :                                           Label* second_instr) {
     812           0 :   regs_.Save(masm);
     813             : 
     814           0 :   if (remembered_set_action() == EMIT_REMEMBERED_SET) {
     815             :     Label dont_need_remembered_set;
     816             : 
     817           0 :     __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
     818             :     __ JumpIfNotInNewSpace(regs_.scratch0(),
     819             :                            regs_.scratch0(),
     820             :                            &dont_need_remembered_set);
     821             : 
     822             :     __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
     823             :                         &dont_need_remembered_set);
     824             : 
     825             :     // First notify the incremental marker if necessary, then update the
     826             :     // remembered set.
     827             :     CheckNeedsToInformIncrementalMarker(
     828             :         masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
     829           0 :         second_instr);
     830           0 :     InformIncrementalMarker(masm);
     831           0 :     regs_.Restore(masm);
     832           0 :     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode());
     833             : 
     834           0 :     __ bind(&dont_need_remembered_set);
     835             :   }
     836             : 
     837             :   CheckNeedsToInformIncrementalMarker(
     838           0 :       masm, kReturnOnNoNeedToInformIncrementalMarker, second_instr);
     839           0 :   InformIncrementalMarker(masm);
     840           0 :   regs_.Restore(masm);
     841           0 :   __ ret(0);
     842           0 : }
     843             : 
     844             : 
     845           0 : void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
     846             :   regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
     847             :   Register address =
     848           0 :       arg_reg_1 == regs_.address() ? kScratchRegister : regs_.address();
     849             :   DCHECK(address != regs_.object());
     850             :   DCHECK(address != arg_reg_1);
     851           0 :   __ Move(address, regs_.address());
     852           0 :   __ Move(arg_reg_1, regs_.object());
     853             :   // TODO(gc) Can we just set address arg2 in the beginning?
     854           0 :   __ Move(arg_reg_2, address);
     855             :   __ LoadAddress(arg_reg_3,
     856           0 :                  ExternalReference::isolate_address(isolate()));
     857             :   int argument_count = 3;
     858             : 
     859             :   AllowExternalCallThatCantCauseGC scope(masm);
     860           0 :   __ PrepareCallCFunction(argument_count);
     861             :   __ CallCFunction(
     862             :       ExternalReference::incremental_marking_record_write_function(isolate()),
     863           0 :       argument_count);
     864             :   regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
     865           0 : }
     866             : 
     867           0 : void RecordWriteStub::Activate(Code* code) {
     868           0 :   code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
     869           0 : }
     870             : 
     871           0 : void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
     872             :     MacroAssembler* masm, OnNoNeedToInformIncrementalMarker on_no_need,
     873           0 :     Label* second_instr) {
     874             :   Label need_incremental;
     875             :   Label need_incremental_pop_object;
     876             : 
     877             : #ifndef V8_CONCURRENT_MARKING
     878             :   Label on_black;
     879             :   // Let's look at the color of the object:  If it is not black we don't have
     880             :   // to inform the incremental marker.
     881             :   __ JumpIfBlack(regs_.object(),
     882             :                  regs_.scratch0(),
     883             :                  regs_.scratch1(),
     884             :                  &on_black,
     885             :                  Label::kNear);
     886             : 
     887             :   regs_.Restore(masm);
     888             :   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
     889             :     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode());
     890             :   } else {
     891             :     __ ret(0);
     892             :   }
     893             : 
     894             :   __ bind(&on_black);
     895             : #endif
     896             : 
     897             :   // Get the value from the slot.
     898           0 :   __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
     899             : 
     900             :   Label ensure_not_white;
     901             :   // If second instruction is TwoByteNopInstruction, we're in noncompacting
     902             :   // mode.
     903           0 :   __ cmpb(Operand(second_instr), Immediate(kTwoByteNopInstruction));
     904           0 :   __ j(equal, &ensure_not_white, Label::kNear);
     905             :   __ CheckPageFlag(regs_.scratch0(),  // Contains value.
     906             :                    regs_.scratch1(),  // Scratch.
     907             :                    MemoryChunk::kEvacuationCandidateMask, zero,
     908           0 :                    &ensure_not_white, Label::kNear);
     909             : 
     910             :   __ CheckPageFlag(regs_.object(),
     911             :                    regs_.scratch1(),  // Scratch.
     912             :                    MemoryChunk::kSkipEvacuationSlotsRecordingMask, zero,
     913           0 :                    &need_incremental);
     914             : 
     915           0 :   __ bind(&ensure_not_white);
     916             : 
     917             :   // We need an extra register for this, so we push the object register
     918             :   // temporarily.
     919           0 :   __ Push(regs_.object());
     920             :   __ JumpIfWhite(regs_.scratch0(),  // The value.
     921             :                  regs_.scratch1(),  // Scratch.
     922             :                  regs_.object(),    // Scratch.
     923           0 :                  &need_incremental_pop_object, Label::kNear);
     924           0 :   __ Pop(regs_.object());
     925             : 
     926           0 :   regs_.Restore(masm);
     927           0 :   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
     928           0 :     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode());
     929             :   } else {
     930           0 :     __ ret(0);
     931             :   }
     932             : 
     933           0 :   __ bind(&need_incremental_pop_object);
     934           0 :   __ Pop(regs_.object());
     935             : 
     936           0 :   __ bind(&need_incremental);
     937             : 
     938             :   // Fall through when we need to inform the incremental marker.
     939           0 : }
     940             : 
     941             : 
     942       14004 : void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
     943       14004 :   if (masm->isolate()->function_entry_hook() != nullptr) {
     944             :     ProfileEntryHookStub stub(masm->isolate());
     945           0 :     masm->CallStub(&stub);
     946             :   }
     947       14004 : }
     948             : 
     949      619711 : void ProfileEntryHookStub::MaybeCallEntryHookDelayed(TurboAssembler* tasm,
     950             :                                                      Zone* zone) {
     951      619711 :   if (tasm->isolate()->function_entry_hook() != nullptr) {
     952           0 :     tasm->CallStubDelayed(new (zone) ProfileEntryHookStub(nullptr));
     953             :   }
     954      619711 : }
     955             : 
     956           0 : void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
     957             :   // This stub can be called from essentially anywhere, so it needs to save
     958             :   // all volatile and callee-save registers.
     959             :   const size_t kNumSavedRegisters = 2;
     960           0 :   __ pushq(arg_reg_1);
     961           0 :   __ pushq(arg_reg_2);
     962             : 
     963             :   // Calculate the original stack pointer and store it in the second arg.
     964             :   __ leap(arg_reg_2,
     965           0 :          Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
     966             : 
     967             :   // Calculate the function address to the first arg.
     968           0 :   __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
     969           0 :   __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
     970             : 
     971             :   // Save the remainder of the volatile registers.
     972           0 :   masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
     973             : 
     974             :   // Call the entry hook function.
     975           0 :   __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
     976             :           Assembler::RelocInfoNone());
     977             : 
     978             :   AllowExternalCallThatCantCauseGC scope(masm);
     979             : 
     980             :   const int kArgumentCount = 2;
     981           0 :   __ PrepareCallCFunction(kArgumentCount);
     982           0 :   __ CallCFunction(rax, kArgumentCount);
     983             : 
     984             :   // Restore volatile regs.
     985           0 :   masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
     986           0 :   __ popq(arg_reg_2);
     987           0 :   __ popq(arg_reg_1);
     988             : 
     989           0 :   __ Ret();
     990           0 : }
     991             : 
     992             : 
     993             : template<class T>
     994          62 : static void CreateArrayDispatch(MacroAssembler* masm,
     995             :                                 AllocationSiteOverrideMode mode) {
     996          62 :   if (mode == DISABLE_ALLOCATION_SITES) {
     997         217 :     T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
     998          31 :     __ TailCallStub(&stub);
     999          31 :   } else if (mode == DONT_OVERRIDE) {
    1000             :     int last_index =
    1001          31 :         GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
    1002         217 :     for (int i = 0; i <= last_index; ++i) {
    1003             :       Label next;
    1004         186 :       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
    1005         372 :       __ cmpl(rdx, Immediate(kind));
    1006         186 :       __ j(not_equal, &next);
    1007             :       T stub(masm->isolate(), kind);
    1008         186 :       __ TailCallStub(&stub);
    1009         186 :       __ bind(&next);
    1010             :     }
    1011             : 
    1012             :     // If we reached this point there is a problem.
    1013          31 :     __ Abort(kUnexpectedElementsKindInArrayConstructor);
    1014             :   } else {
    1015           0 :     UNREACHABLE();
    1016             :   }
    1017          62 : }
    1018             : 
    1019             : 
    1020          62 : static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
    1021             :                                            AllocationSiteOverrideMode mode) {
    1022             :   // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
    1023             :   // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
    1024             :   // rax - number of arguments
    1025             :   // rdi - constructor?
    1026             :   // rsp[0] - return address
    1027             :   // rsp[8] - last argument
    1028             : 
    1029             :   STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
    1030             :   STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
    1031             :   STATIC_ASSERT(PACKED_ELEMENTS == 2);
    1032             :   STATIC_ASSERT(HOLEY_ELEMENTS == 3);
    1033             :   STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
    1034             :   STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
    1035             : 
    1036          62 :   if (mode == DISABLE_ALLOCATION_SITES) {
    1037             :     ElementsKind initial = GetInitialFastElementsKind();
    1038             :     ElementsKind holey_initial = GetHoleyElementsKind(initial);
    1039             : 
    1040             :     ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
    1041             :                                                   holey_initial,
    1042         217 :                                                   DISABLE_ALLOCATION_SITES);
    1043          31 :     __ TailCallStub(&stub_holey);
    1044          31 :   } else if (mode == DONT_OVERRIDE) {
    1045             :     // is the low bit set? If so, we are holey and that is good.
    1046             :     Label normal_sequence;
    1047          31 :     __ testb(rdx, Immediate(1));
    1048          31 :     __ j(not_zero, &normal_sequence);
    1049             : 
    1050             :     // We are going to create a holey array, but our kind is non-holey.
    1051             :     // Fix kind and retry (only if we have an allocation site in the slot).
    1052             :     __ incl(rdx);
    1053             : 
    1054          31 :     if (FLAG_debug_code) {
    1055             :       Handle<Map> allocation_site_map =
    1056             :           masm->isolate()->factory()->allocation_site_map();
    1057           0 :       __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
    1058           0 :       __ Assert(equal, kExpectedAllocationSite);
    1059             :     }
    1060             : 
    1061             :     // Save the resulting elements kind in type info. We can't just store r3
    1062             :     // in the AllocationSite::transition_info field because elements kind is
    1063             :     // restricted to a portion of the field...upper bits need to be left alone.
    1064             :     STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
    1065             :     __ SmiAddConstant(
    1066             :         FieldOperand(rbx, AllocationSite::kTransitionInfoOrBoilerplateOffset),
    1067          31 :         Smi::FromInt(kFastElementsKindPackedToHoley));
    1068             : 
    1069          31 :     __ bind(&normal_sequence);
    1070             :     int last_index =
    1071          31 :         GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
    1072         217 :     for (int i = 0; i <= last_index; ++i) {
    1073             :       Label next;
    1074         186 :       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
    1075         372 :       __ cmpl(rdx, Immediate(kind));
    1076         186 :       __ j(not_equal, &next);
    1077             :       ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
    1078         186 :       __ TailCallStub(&stub);
    1079         186 :       __ bind(&next);
    1080             :     }
    1081             : 
    1082             :     // If we reached this point there is a problem.
    1083          31 :     __ Abort(kUnexpectedElementsKindInArrayConstructor);
    1084             :   } else {
    1085           0 :     UNREACHABLE();
    1086             :   }
    1087          62 : }
    1088             : 
    1089             : 
    1090             : template<class T>
    1091          62 : static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
    1092             :   int to_index =
    1093          62 :       GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
    1094         434 :   for (int i = 0; i <= to_index; ++i) {
    1095         372 :     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
    1096             :     T stub(isolate, kind);
    1097         372 :     stub.GetCode();
    1098         372 :     if (AllocationSite::ShouldTrack(kind)) {
    1099             :       T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
    1100         124 :       stub1.GetCode();
    1101             :     }
    1102             :   }
    1103          62 : }
    1104             : 
    1105          31 : void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
    1106             :   ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
    1107          31 :       isolate);
    1108             :   ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
    1109          31 :       isolate);
    1110             :   ArrayNArgumentsConstructorStub stub(isolate);
    1111          31 :   stub.GetCode();
    1112             : 
    1113          31 :   ElementsKind kinds[2] = {PACKED_ELEMENTS, HOLEY_ELEMENTS};
    1114          93 :   for (int i = 0; i < 2; i++) {
    1115             :     // For internal arrays we only need a few things
    1116          62 :     InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
    1117          62 :     stubh1.GetCode();
    1118             :     InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
    1119          62 :     stubh2.GetCode();
    1120             :   }
    1121          31 : }
    1122             : 
    1123          62 : void ArrayConstructorStub::GenerateDispatchToArrayStub(
    1124             :     MacroAssembler* masm, AllocationSiteOverrideMode mode) {
    1125             :   Label not_zero_case, not_one_case;
    1126          62 :   __ testp(rax, rax);
    1127          62 :   __ j(not_zero, &not_zero_case);
    1128          62 :   CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
    1129             : 
    1130          62 :   __ bind(&not_zero_case);
    1131          62 :   __ cmpl(rax, Immediate(1));
    1132          62 :   __ j(greater, &not_one_case);
    1133          62 :   CreateArrayDispatchOneArgument(masm, mode);
    1134             : 
    1135          62 :   __ bind(&not_one_case);
    1136          62 :   ArrayNArgumentsConstructorStub stub(masm->isolate());
    1137          62 :   __ TailCallStub(&stub);
    1138          62 : }
    1139             : 
    1140          31 : void ArrayConstructorStub::Generate(MacroAssembler* masm) {
    1141             :   // ----------- S t a t e -------------
    1142             :   //  -- rax    : argc
    1143             :   //  -- rbx    : AllocationSite or undefined
    1144             :   //  -- rdi    : constructor
    1145             :   //  -- rdx    : new target
    1146             :   //  -- rsp[0] : return address
    1147             :   //  -- rsp[8] : last argument
    1148             :   // -----------------------------------
    1149          31 :   if (FLAG_debug_code) {
    1150             :     // The array construct code is only set for the global and natives
    1151             :     // builtin Array functions which always have maps.
    1152             : 
    1153             :     // Initial map for the builtin Array function should be a map.
    1154           0 :     __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
    1155             :     // Will both indicate a nullptr and a Smi.
    1156             :     STATIC_ASSERT(kSmiTag == 0);
    1157           0 :     Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
    1158           0 :     __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
    1159           0 :     __ CmpObjectType(rcx, MAP_TYPE, rcx);
    1160           0 :     __ Check(equal, kUnexpectedInitialMapForArrayFunction);
    1161             : 
    1162             :     // We should either have undefined in rbx or a valid AllocationSite
    1163           0 :     __ AssertUndefinedOrAllocationSite(rbx);
    1164             :   }
    1165             : 
    1166             :   // Enter the context of the Array function.
    1167          31 :   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
    1168             : 
    1169             :   Label subclassing;
    1170          31 :   __ cmpp(rdi, rdx);
    1171          31 :   __ j(not_equal, &subclassing);
    1172             : 
    1173             :   Label no_info;
    1174             :   // If the feedback vector is the undefined value call an array constructor
    1175             :   // that doesn't use AllocationSites.
    1176          31 :   __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
    1177          31 :   __ j(equal, &no_info);
    1178             : 
    1179             :   // Only look at the lower 16 bits of the transition info.
    1180             :   __ movp(rdx, FieldOperand(
    1181             :                    rbx, AllocationSite::kTransitionInfoOrBoilerplateOffset));
    1182          31 :   __ SmiToInteger32(rdx, rdx);
    1183             :   STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
    1184          31 :   __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
    1185          31 :   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
    1186             : 
    1187          31 :   __ bind(&no_info);
    1188          31 :   GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
    1189             : 
    1190             :   // Subclassing
    1191          31 :   __ bind(&subclassing);
    1192             :   StackArgumentsAccessor args(rsp, rax);
    1193             :   __ movp(args.GetReceiverOperand(), rdi);
    1194          31 :   __ addp(rax, Immediate(3));
    1195             :   __ PopReturnAddressTo(rcx);
    1196          31 :   __ Push(rdx);
    1197          31 :   __ Push(rbx);
    1198             :   __ PushReturnAddressFrom(rcx);
    1199          31 :   __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
    1200          31 : }
    1201             : 
    1202             : 
    1203          62 : void InternalArrayConstructorStub::GenerateCase(
    1204             :     MacroAssembler* masm, ElementsKind kind) {
    1205             :   Label not_zero_case, not_one_case;
    1206             :   Label normal_sequence;
    1207             : 
    1208          62 :   __ testp(rax, rax);
    1209          62 :   __ j(not_zero, &not_zero_case);
    1210         217 :   InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
    1211          62 :   __ TailCallStub(&stub0);
    1212             : 
    1213          62 :   __ bind(&not_zero_case);
    1214          62 :   __ cmpl(rax, Immediate(1));
    1215          62 :   __ j(greater, &not_one_case);
    1216             : 
    1217          62 :   if (IsFastPackedElementsKind(kind)) {
    1218             :     // We might need to create a holey array
    1219             :     // look at the first argument
    1220             :     StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
    1221          31 :     __ movp(rcx, args.GetArgumentOperand(0));
    1222             :     __ testp(rcx, rcx);
    1223          31 :     __ j(zero, &normal_sequence);
    1224             : 
    1225             :     InternalArraySingleArgumentConstructorStub
    1226             :         stub1_holey(isolate(), GetHoleyElementsKind(kind));
    1227          31 :     __ TailCallStub(&stub1_holey);
    1228             :   }
    1229             : 
    1230          62 :   __ bind(&normal_sequence);
    1231             :   InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
    1232          62 :   __ TailCallStub(&stub1);
    1233             : 
    1234          62 :   __ bind(&not_one_case);
    1235             :   ArrayNArgumentsConstructorStub stubN(isolate());
    1236          62 :   __ TailCallStub(&stubN);
    1237          62 : }
    1238             : 
    1239             : 
    1240          31 : void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
    1241             :   // ----------- S t a t e -------------
    1242             :   //  -- rax    : argc
    1243             :   //  -- rdi    : constructor
    1244             :   //  -- rsp[0] : return address
    1245             :   //  -- rsp[8] : last argument
    1246             :   // -----------------------------------
    1247             : 
    1248          31 :   if (FLAG_debug_code) {
    1249             :     // The array construct code is only set for the global and natives
    1250             :     // builtin Array functions which always have maps.
    1251             : 
    1252             :     // Initial map for the builtin Array function should be a map.
    1253           0 :     __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
    1254             :     // Will both indicate a nullptr and a Smi.
    1255             :     STATIC_ASSERT(kSmiTag == 0);
    1256           0 :     Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
    1257           0 :     __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
    1258           0 :     __ CmpObjectType(rcx, MAP_TYPE, rcx);
    1259           0 :     __ Check(equal, kUnexpectedInitialMapForArrayFunction);
    1260             :   }
    1261             : 
    1262             :   // Figure out the right elements kind
    1263          31 :   __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
    1264             : 
    1265             :   // Load the map's "bit field 2" into |result|. We only need the first byte,
    1266             :   // but the following masking takes care of that anyway.
    1267             :   __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
    1268             :   // Retrieve elements_kind from bit field 2.
    1269          31 :   __ DecodeField<Map::ElementsKindBits>(rcx);
    1270             : 
    1271          31 :   if (FLAG_debug_code) {
    1272             :     Label done;
    1273           0 :     __ cmpl(rcx, Immediate(PACKED_ELEMENTS));
    1274           0 :     __ j(equal, &done);
    1275           0 :     __ cmpl(rcx, Immediate(HOLEY_ELEMENTS));
    1276             :     __ Assert(equal,
    1277           0 :               kInvalidElementsKindForInternalArrayOrInternalPackedArray);
    1278           0 :     __ bind(&done);
    1279             :   }
    1280             : 
    1281             :   Label fast_elements_case;
    1282          31 :   __ cmpl(rcx, Immediate(PACKED_ELEMENTS));
    1283          31 :   __ j(equal, &fast_elements_case);
    1284          31 :   GenerateCase(masm, HOLEY_ELEMENTS);
    1285             : 
    1286          31 :   __ bind(&fast_elements_case);
    1287          31 :   GenerateCase(masm, PACKED_ELEMENTS);
    1288          31 : }
    1289             : 
    1290             : static int Offset(ExternalReference ref0, ExternalReference ref1) {
    1291        7022 :   int64_t offset = (ref0.address() - ref1.address());
    1292             :   // Check that fits into int.
    1293             :   DCHECK(static_cast<int>(offset) == offset);
    1294        7022 :   return static_cast<int>(offset);
    1295             : }
    1296             : 
    1297             : // Prepares stack to put arguments (aligns and so on).  WIN64 calling
    1298             : // convention requires to put the pointer to the return value slot into
    1299             : // rcx (rcx must be preserverd until CallApiFunctionAndReturn).  Saves
    1300             : // context (rsi).  Clobbers rax.  Allocates arg_stack_space * kPointerSize
    1301             : // inside the exit frame (not GCed) accessible via StackSpaceOperand.
    1302             : static void PrepareCallApiFunction(MacroAssembler* masm, int arg_stack_space) {
    1303        3511 :   __ EnterApiExitFrame(arg_stack_space);
    1304             : }
    1305             : 
    1306             : 
    1307             : // Calls an API function.  Allocates HandleScope, extracts returned value
    1308             : // from handle and propagates exceptions.  Clobbers r14, r15, rbx and
    1309             : // caller-save registers.  Restores context.  On return removes
    1310             : // stack_space * kPointerSize (GCed).
    1311        3511 : static void CallApiFunctionAndReturn(MacroAssembler* masm,
    1312             :                                      Register function_address,
    1313             :                                      ExternalReference thunk_ref,
    1314             :                                      Register thunk_last_arg, int stack_space,
    1315             :                                      Operand* stack_space_operand,
    1316             :                                      Operand return_value_operand,
    1317             :                                      Operand* context_restore_operand) {
    1318             :   Label prologue;
    1319             :   Label promote_scheduled_exception;
    1320             :   Label delete_allocated_handles;
    1321             :   Label leave_exit_frame;
    1322             :   Label write_back;
    1323             : 
    1324        3511 :   Isolate* isolate = masm->isolate();
    1325             :   Factory* factory = isolate->factory();
    1326             :   ExternalReference next_address =
    1327        3511 :       ExternalReference::handle_scope_next_address(isolate);
    1328             :   const int kNextOffset = 0;
    1329             :   const int kLimitOffset = Offset(
    1330        3511 :       ExternalReference::handle_scope_limit_address(isolate), next_address);
    1331             :   const int kLevelOffset = Offset(
    1332        3511 :       ExternalReference::handle_scope_level_address(isolate), next_address);
    1333             :   ExternalReference scheduled_exception_address =
    1334        3511 :       ExternalReference::scheduled_exception_address(isolate);
    1335             : 
    1336             :   DCHECK(rdx == function_address || r8 == function_address);
    1337             :   // Allocate HandleScope in callee-save registers.
    1338             :   Register prev_next_address_reg = r14;
    1339             :   Register prev_limit_reg = rbx;
    1340             :   Register base_reg = r15;
    1341             :   __ Move(base_reg, next_address);
    1342        7022 :   __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
    1343        7022 :   __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
    1344        7022 :   __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
    1345             : 
    1346        3511 :   if (FLAG_log_timer_events) {
    1347           0 :     FrameScope frame(masm, StackFrame::MANUAL);
    1348             :     __ PushSafepointRegisters();
    1349           0 :     __ PrepareCallCFunction(1);
    1350           0 :     __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
    1351             :     __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
    1352           0 :                      1);
    1353           0 :     __ PopSafepointRegisters();
    1354             :   }
    1355             : 
    1356             :   Label profiler_disabled;
    1357             :   Label end_profiler_check;
    1358        3511 :   __ Move(rax, ExternalReference::is_profiling_address(isolate));
    1359        7022 :   __ cmpb(Operand(rax, 0), Immediate(0));
    1360        3511 :   __ j(zero, &profiler_disabled);
    1361             : 
    1362             :   // Third parameter is the address of the actual getter function.
    1363        3511 :   __ Move(thunk_last_arg, function_address);
    1364             :   __ Move(rax, thunk_ref);
    1365        3511 :   __ jmp(&end_profiler_check);
    1366             : 
    1367        3511 :   __ bind(&profiler_disabled);
    1368             :   // Call the api function!
    1369        3511 :   __ Move(rax, function_address);
    1370             : 
    1371        3511 :   __ bind(&end_profiler_check);
    1372             : 
    1373             :   // Call the api function!
    1374        3511 :   __ call(rax);
    1375             : 
    1376        3511 :   if (FLAG_log_timer_events) {
    1377           0 :     FrameScope frame(masm, StackFrame::MANUAL);
    1378             :     __ PushSafepointRegisters();
    1379           0 :     __ PrepareCallCFunction(1);
    1380           0 :     __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
    1381             :     __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
    1382           0 :                      1);
    1383           0 :     __ PopSafepointRegisters();
    1384             :   }
    1385             : 
    1386             :   // Load the value from ReturnValue
    1387             :   __ movp(rax, return_value_operand);
    1388        3511 :   __ bind(&prologue);
    1389             : 
    1390             :   // No more valid handles (the result handle was the last one). Restore
    1391             :   // previous handle scope.
    1392        7022 :   __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
    1393        7022 :   __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
    1394        7022 :   __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
    1395        3511 :   __ j(not_equal, &delete_allocated_handles);
    1396             : 
    1397             :   // Leave the API exit frame.
    1398        3511 :   __ bind(&leave_exit_frame);
    1399        3511 :   bool restore_context = context_restore_operand != nullptr;
    1400        3511 :   if (restore_context) {
    1401             :     __ movp(rsi, *context_restore_operand);
    1402             :   }
    1403        3511 :   if (stack_space_operand != nullptr) {
    1404             :     __ movp(rbx, *stack_space_operand);
    1405             :   }
    1406        3511 :   __ LeaveApiExitFrame(!restore_context);
    1407             : 
    1408             :   // Check if the function scheduled an exception.
    1409             :   __ Move(rdi, scheduled_exception_address);
    1410        3511 :   __ Cmp(Operand(rdi, 0), factory->the_hole_value());
    1411        3511 :   __ j(not_equal, &promote_scheduled_exception);
    1412             : 
    1413             : #if DEBUG
    1414             :   // Check if the function returned a valid JavaScript value.
    1415             :   Label ok;
    1416             :   Register return_value = rax;
    1417             :   Register map = rcx;
    1418             : 
    1419             :   __ JumpIfSmi(return_value, &ok, Label::kNear);
    1420             :   __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
    1421             : 
    1422             :   __ CmpInstanceType(map, LAST_NAME_TYPE);
    1423             :   __ j(below_equal, &ok, Label::kNear);
    1424             : 
    1425             :   __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
    1426             :   __ j(above_equal, &ok, Label::kNear);
    1427             : 
    1428             :   __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
    1429             :   __ j(equal, &ok, Label::kNear);
    1430             : 
    1431             :   __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
    1432             :   __ j(equal, &ok, Label::kNear);
    1433             : 
    1434             :   __ CompareRoot(return_value, Heap::kTrueValueRootIndex);
    1435             :   __ j(equal, &ok, Label::kNear);
    1436             : 
    1437             :   __ CompareRoot(return_value, Heap::kFalseValueRootIndex);
    1438             :   __ j(equal, &ok, Label::kNear);
    1439             : 
    1440             :   __ CompareRoot(return_value, Heap::kNullValueRootIndex);
    1441             :   __ j(equal, &ok, Label::kNear);
    1442             : 
    1443             :   __ Abort(kAPICallReturnedInvalidObject);
    1444             : 
    1445             :   __ bind(&ok);
    1446             : #endif
    1447             : 
    1448        3511 :   if (stack_space_operand != nullptr) {
    1449             :     DCHECK_EQ(stack_space, 0);
    1450             :     __ PopReturnAddressTo(rcx);
    1451           0 :     __ addq(rsp, rbx);
    1452           0 :     __ jmp(rcx);
    1453             :   } else {
    1454        3511 :     __ ret(stack_space * kPointerSize);
    1455             :   }
    1456             : 
    1457             :   // Re-throw by promoting a scheduled exception.
    1458        3511 :   __ bind(&promote_scheduled_exception);
    1459        3511 :   __ TailCallRuntime(Runtime::kPromoteScheduledException);
    1460             : 
    1461             :   // HandleScope limit has changed. Delete allocated extensions.
    1462        3511 :   __ bind(&delete_allocated_handles);
    1463        7022 :   __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
    1464             :   __ movp(prev_limit_reg, rax);
    1465        3511 :   __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
    1466             :   __ LoadAddress(rax,
    1467        3511 :                  ExternalReference::delete_handle_scope_extensions(isolate));
    1468        3511 :   __ call(rax);
    1469             :   __ movp(rax, prev_limit_reg);
    1470        3511 :   __ jmp(&leave_exit_frame);
    1471        3511 : }
    1472             : 
    1473       10440 : void CallApiCallbackStub::Generate(MacroAssembler* masm) {
    1474             :   // ----------- S t a t e -------------
    1475             :   //  -- rdi                 : callee
    1476             :   //  -- rbx                 : call_data
    1477             :   //  -- rcx                 : holder
    1478             :   //  -- rdx                 : api_function_address
    1479             :   //  -- rsi                 : context
    1480             :   //  -- rax                 : number of arguments if argc is a register
    1481             :   //  -- rsp[0]              : return address
    1482             :   //  -- rsp[8]              : last argument
    1483             :   //  -- ...
    1484             :   //  -- rsp[argc * 8]       : first argument
    1485             :   //  -- rsp[(argc + 1) * 8] : receiver
    1486             :   //  -- rsp[(argc + 2) * 8] : accessor_holder
    1487             :   // -----------------------------------
    1488             : 
    1489             :   Register callee = rdi;
    1490             :   Register call_data = rbx;
    1491        3480 :   Register holder = rcx;
    1492        3480 :   Register api_function_address = rdx;
    1493             :   Register context = rsi;
    1494             :   Register return_address = r8;
    1495             : 
    1496             :   typedef FunctionCallbackArguments FCA;
    1497             : 
    1498             :   STATIC_ASSERT(FCA::kArgsLength == 8);
    1499             :   STATIC_ASSERT(FCA::kNewTargetIndex == 7);
    1500             :   STATIC_ASSERT(FCA::kContextSaveIndex == 6);
    1501             :   STATIC_ASSERT(FCA::kCalleeIndex == 5);
    1502             :   STATIC_ASSERT(FCA::kDataIndex == 4);
    1503             :   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
    1504             :   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
    1505             :   STATIC_ASSERT(FCA::kIsolateIndex == 1);
    1506             :   STATIC_ASSERT(FCA::kHolderIndex == 0);
    1507             : 
    1508        6960 :   __ PopReturnAddressTo(return_address);
    1509             : 
    1510             :   // new target
    1511        3480 :   __ PushRoot(Heap::kUndefinedValueRootIndex);
    1512             : 
    1513             :   // context save
    1514        3480 :   __ Push(context);
    1515             : 
    1516             :   // callee
    1517        3480 :   __ Push(callee);
    1518             : 
    1519             :   // call data
    1520        3480 :   __ Push(call_data);
    1521             : 
    1522             :   // return value
    1523        3480 :   __ PushRoot(Heap::kUndefinedValueRootIndex);
    1524             :   // return value default
    1525        3480 :   __ PushRoot(Heap::kUndefinedValueRootIndex);
    1526             :   // isolate
    1527             :   Register scratch = call_data;
    1528        3480 :   __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
    1529        3480 :   __ Push(scratch);
    1530             :   // holder
    1531        3480 :   __ Push(holder);
    1532             : 
    1533             :   // enter a new context
    1534             :   int argc = this->argc();
    1535        3480 :   if (this->is_lazy()) {
    1536             :     // ----------- S t a t e -------------------------------------
    1537             :     //  -- rsp[0]                                 : holder
    1538             :     //  -- ...
    1539             :     //  -- rsp[(FCA::kArgsLength - 1) * 8]        : new_target
    1540             :     //  -- rsp[FCA::kArgsLength * 8]              : last argument
    1541             :     //  -- ...
    1542             :     //  -- rsp[(FCA::kArgsLength + argc - 1) * 8] : first argument
    1543             :     //  -- rsp[(FCA::kArgsLength + argc) * 8]     : receiver
    1544             :     //  -- rsp[(FCA::kArgsLength + argc + 1) * 8] : accessor_holder
    1545             :     // -----------------------------------------------------------
    1546             : 
    1547             :     // load context from accessor_holder
    1548             :     Register accessor_holder = context;
    1549          44 :     Register scratch2 = callee;
    1550             :     __ movp(accessor_holder,
    1551          88 :             MemOperand(rsp, (argc + FCA::kArgsLength + 1) * kPointerSize));
    1552             :     // Look for the constructor if |accessor_holder| is not a function.
    1553             :     Label skip_looking_for_constructor;
    1554             :     __ movp(scratch, FieldOperand(accessor_holder, HeapObject::kMapOffset));
    1555             :     __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
    1556          44 :              Immediate(1 << Map::kIsConstructor));
    1557          44 :     __ j(not_zero, &skip_looking_for_constructor, Label::kNear);
    1558          44 :     __ GetMapConstructor(context, scratch, scratch2);
    1559          44 :     __ bind(&skip_looking_for_constructor);
    1560             :     __ movp(context, FieldOperand(context, JSFunction::kContextOffset));
    1561             :   } else {
    1562             :     // load context from callee
    1563        3436 :     __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
    1564             :   }
    1565             : 
    1566        3480 :   __ movp(scratch, rsp);
    1567             :   // Push return address back on stack.
    1568             :   __ PushReturnAddressFrom(return_address);
    1569             : 
    1570             :   // Allocate the v8::Arguments structure in the arguments' space since
    1571             :   // it's not controlled by GC.
    1572             :   const int kApiStackSpace = 3;
    1573             : 
    1574             :   PrepareCallApiFunction(masm, kApiStackSpace);
    1575             : 
    1576             :   // FunctionCallbackInfo::implicit_args_.
    1577             :   __ movp(StackSpaceOperand(0), scratch);
    1578        6960 :   __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
    1579             :   // FunctionCallbackInfo::values_.
    1580             :   __ movp(StackSpaceOperand(1), scratch);
    1581             :   // FunctionCallbackInfo::length_.
    1582        6960 :   __ Set(StackSpaceOperand(2), argc);
    1583             : 
    1584             : #if defined(__MINGW64__) || defined(_WIN64)
    1585             :   Register arguments_arg = rcx;
    1586             :   Register callback_arg = rdx;
    1587             : #else
    1588             :   Register arguments_arg = rdi;
    1589        3480 :   Register callback_arg = rsi;
    1590             : #endif
    1591             : 
    1592             :   // It's okay if api_function_address == callback_arg
    1593             :   // but not arguments_arg
    1594             :   DCHECK(api_function_address != arguments_arg);
    1595             : 
    1596             :   // v8::InvocationCallback's argument.
    1597             :   __ leap(arguments_arg, StackSpaceOperand(0));
    1598             : 
    1599             :   ExternalReference thunk_ref =
    1600        3480 :       ExternalReference::invoke_function_callback(masm->isolate());
    1601             : 
    1602             :   // Accessor for FunctionCallbackInfo and first js arg.
    1603             :   StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
    1604             :                                        ARGUMENTS_DONT_CONTAIN_RECEIVER);
    1605             :   Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
    1606        3480 :       FCA::kArgsLength - FCA::kContextSaveIndex);
    1607             :   Operand return_value_operand = args_from_rbp.GetArgumentOperand(
    1608        3480 :       this->is_store() ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
    1609        3480 :   const int stack_space = argc + FCA::kArgsLength + 2;
    1610             :   Operand* stack_space_operand = nullptr;
    1611             :   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
    1612             :                            stack_space, stack_space_operand,
    1613        3480 :                            return_value_operand, &context_restore_operand);
    1614        3480 : }
    1615             : 
    1616             : 
    1617          31 : void CallApiGetterStub::Generate(MacroAssembler* masm) {
    1618             : #if defined(__MINGW64__) || defined(_WIN64)
    1619             :   Register getter_arg = r8;
    1620             :   Register accessor_info_arg = rdx;
    1621             :   Register name_arg = rcx;
    1622             : #else
    1623          31 :   Register getter_arg = rdx;
    1624             :   Register accessor_info_arg = rsi;
    1625             :   Register name_arg = rdi;
    1626             : #endif
    1627             :   Register api_function_address = r8;
    1628          31 :   Register receiver = ApiGetterDescriptor::ReceiverRegister();
    1629          31 :   Register holder = ApiGetterDescriptor::HolderRegister();
    1630          31 :   Register callback = ApiGetterDescriptor::CallbackRegister();
    1631             :   Register scratch = rax;
    1632             :   DCHECK(!AreAliased(receiver, holder, callback, scratch));
    1633             : 
    1634             :   // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
    1635             :   // name below the exit frame to make GC aware of them.
    1636             :   STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
    1637             :   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
    1638             :   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
    1639             :   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
    1640             :   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
    1641             :   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
    1642             :   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
    1643             :   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
    1644             : 
    1645             :   // Insert additional parameters into the stack frame above return address.
    1646             :   __ PopReturnAddressTo(scratch);
    1647          31 :   __ Push(receiver);
    1648          31 :   __ Push(FieldOperand(callback, AccessorInfo::kDataOffset));
    1649          31 :   __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
    1650          31 :   __ Push(kScratchRegister);  // return value
    1651          31 :   __ Push(kScratchRegister);  // return value default
    1652          62 :   __ PushAddress(ExternalReference::isolate_address(isolate()));
    1653          31 :   __ Push(holder);
    1654          31 :   __ Push(Smi::kZero);  // should_throw_on_error -> false
    1655          31 :   __ Push(FieldOperand(callback, AccessorInfo::kNameOffset));
    1656             :   __ PushReturnAddressFrom(scratch);
    1657             : 
    1658             :   // v8::PropertyCallbackInfo::args_ array and name handle.
    1659             :   const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
    1660             : 
    1661             :   // Allocate v8::PropertyCallbackInfo in non-GCed stack space.
    1662             :   const int kArgStackSpace = 1;
    1663             : 
    1664             :   // Load address of v8::PropertyAccessorInfo::args_ array.
    1665          62 :   __ leap(scratch, Operand(rsp, 2 * kPointerSize));
    1666             : 
    1667             :   PrepareCallApiFunction(masm, kArgStackSpace);
    1668             :   // Create v8::PropertyCallbackInfo object on the stack and initialize
    1669             :   // it's args_ field.
    1670          31 :   Operand info_object = StackSpaceOperand(0);
    1671             :   __ movp(info_object, scratch);
    1672             : 
    1673          62 :   __ leap(name_arg, Operand(scratch, -kPointerSize));
    1674             :   // The context register (rsi) has been saved in PrepareCallApiFunction and
    1675             :   // could be used to pass arguments.
    1676             :   __ leap(accessor_info_arg, info_object);
    1677             : 
    1678             :   ExternalReference thunk_ref =
    1679          31 :       ExternalReference::invoke_accessor_getter_callback(isolate());
    1680             : 
    1681             :   // It's okay if api_function_address == getter_arg
    1682             :   // but not accessor_info_arg or name_arg
    1683             :   DCHECK(api_function_address != accessor_info_arg);
    1684             :   DCHECK(api_function_address != name_arg);
    1685             :   __ movp(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
    1686             :   __ movp(api_function_address,
    1687             :           FieldOperand(scratch, Foreign::kForeignAddressOffset));
    1688             : 
    1689             :   // +3 is to skip prolog, return address and name handle.
    1690             :   Operand return_value_operand(
    1691          31 :       rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
    1692             :   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
    1693             :                            kStackUnwindSpace, nullptr, return_value_operand,
    1694          31 :                            nullptr);
    1695          31 : }
    1696             : 
    1697             : #undef __
    1698             : 
    1699             : }  // namespace internal
    1700             : }  // namespace v8
    1701             : 
    1702             : #endif  // V8_TARGET_ARCH_X64

Generated by: LCOV version 1.10