LCOV - code coverage report
Current view: top level - src/x64 - macro-assembler-x64.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 953 1278 74.6 %
Date: 2017-10-20 Functions: 175 214 81.8 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #if V8_TARGET_ARCH_X64
       6             : 
       7             : #include "src/base/bits.h"
       8             : #include "src/base/division-by-constant.h"
       9             : #include "src/base/utils/random-number-generator.h"
      10             : #include "src/bootstrapper.h"
      11             : #include "src/callable.h"
      12             : #include "src/code-stubs.h"
      13             : #include "src/counters.h"
      14             : #include "src/debug/debug.h"
      15             : #include "src/external-reference-table.h"
      16             : #include "src/frames-inl.h"
      17             : #include "src/heap/heap-inl.h"
      18             : #include "src/objects-inl.h"
      19             : #include "src/register-configuration.h"
      20             : #include "src/x64/assembler-x64.h"
      21             : 
      22             : #include "src/x64/macro-assembler-x64.h"  // Cannot be the first include.
      23             : 
      24             : namespace v8 {
      25             : namespace internal {
      26             : 
      27        8948 : Operand StackArgumentsAccessor::GetArgumentOperand(int index) {
      28             :   DCHECK_GE(index, 0);
      29        8948 :   int receiver = (receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER) ? 1 : 0;
      30             :   int displacement_to_last_argument =
      31        8948 :       base_reg_ == rsp ? kPCOnStackSize : kFPOnStackSize + kPCOnStackSize;
      32        8948 :   displacement_to_last_argument += extra_displacement_to_last_argument_;
      33        8948 :   if (argument_count_reg_ == no_reg) {
      34             :     // argument[0] is at base_reg_ + displacement_to_last_argument +
      35             :     // (argument_count_immediate_ + receiver - 1) * kPointerSize.
      36             :     DCHECK_GT(argument_count_immediate_ + receiver, 0);
      37             :     return Operand(
      38             :         base_reg_,
      39             :         displacement_to_last_argument +
      40        8111 :             (argument_count_immediate_ + receiver - 1 - index) * kPointerSize);
      41             :   } else {
      42             :     // argument[0] is at base_reg_ + displacement_to_last_argument +
      43             :     // argument_count_reg_ * times_pointer_size + (receiver - 1) * kPointerSize.
      44             :     return Operand(
      45             :         base_reg_, argument_count_reg_, times_pointer_size,
      46         837 :         displacement_to_last_argument + (receiver - 1 - index) * kPointerSize);
      47             :   }
      48             : }
      49             : 
      50           0 : StackArgumentsAccessor::StackArgumentsAccessor(
      51           0 :     Register base_reg, const ParameterCount& parameter_count,
      52             :     StackArgumentsAccessorReceiverMode receiver_mode,
      53             :     int extra_displacement_to_last_argument)
      54             :     : base_reg_(base_reg),
      55             :       argument_count_reg_(parameter_count.is_reg() ? parameter_count.reg()
      56             :                                                    : no_reg),
      57             :       argument_count_immediate_(
      58             :           parameter_count.is_immediate() ? parameter_count.immediate() : 0),
      59             :       receiver_mode_(receiver_mode),
      60             :       extra_displacement_to_last_argument_(
      61           0 :           extra_displacement_to_last_argument) {}
      62             : 
      63      407254 : MacroAssembler::MacroAssembler(Isolate* isolate, void* buffer, int size,
      64             :                                CodeObjectRequired create_code_object)
      65      407254 :     : TurboAssembler(isolate, buffer, size, create_code_object) {}
      66             : 
      67     1708721 : TurboAssembler::TurboAssembler(Isolate* isolate, void* buffer, int buffer_size,
      68             :                                CodeObjectRequired create_code_object)
      69     3417563 :     : Assembler(isolate, buffer, buffer_size), isolate_(isolate) {
      70     1708842 :   if (create_code_object == CodeObjectRequired::kYes) {
      71             :     code_object_ =
      72      814508 :         Handle<HeapObject>::New(isolate->heap()->undefined_value(), isolate);
      73             :   }
      74     1708842 : }
      75             : 
      76             : static const int64_t kInvalidRootRegisterDelta = -1;
      77             : 
      78      537287 : int64_t TurboAssembler::RootRegisterDelta(ExternalReference other) {
      79     1074574 :   if (predictable_code_size() &&
      80           0 :       (other.address() < reinterpret_cast<Address>(isolate()) ||
      81           0 :        other.address() >= reinterpret_cast<Address>(isolate() + 1))) {
      82             :     return kInvalidRootRegisterDelta;
      83             :   }
      84             :   Address roots_register_value =
      85             :       kRootRegisterBias +
      86      537287 :       reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
      87             : 
      88             :   int64_t delta = kInvalidRootRegisterDelta;  // Bogus initialization.
      89             :   if (kPointerSize == kInt64Size) {
      90      537287 :     delta = other.address() - roots_register_value;
      91             :   } else {
      92             :     // For x32, zero extend the address to 64-bit and calculate the delta.
      93             :     uint64_t o = static_cast<uint32_t>(
      94             :         reinterpret_cast<intptr_t>(other.address()));
      95             :     uint64_t r = static_cast<uint32_t>(
      96             :         reinterpret_cast<intptr_t>(roots_register_value));
      97             :     delta = o - r;
      98             :   }
      99           0 :   return delta;
     100             : }
     101             : 
     102             : 
     103      104702 : Operand MacroAssembler::ExternalOperand(ExternalReference target,
     104             :                                         Register scratch) {
     105      104702 :   if (root_array_available_ && !serializer_enabled()) {
     106             :     int64_t delta = RootRegisterDelta(target);
     107          48 :     if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
     108          24 :       return Operand(kRootRegister, static_cast<int32_t>(delta));
     109             :     }
     110             :   }
     111             :   Move(scratch, target);
     112      104678 :   return Operand(scratch, 0);
     113             : }
     114             : 
     115             : 
     116         155 : void MacroAssembler::Load(Register destination, ExternalReference source) {
     117         155 :   if (root_array_available_ && !serializer_enabled()) {
     118             :     int64_t delta = RootRegisterDelta(source);
     119           0 :     if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
     120           0 :       movp(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
     121         155 :       return;
     122             :     }
     123             :   }
     124             :   // Safe code.
     125         155 :   if (destination == rax) {
     126          62 :     load_rax(source);
     127             :   } else {
     128             :     Move(kScratchRegister, source);
     129         186 :     movp(destination, Operand(kScratchRegister, 0));
     130             :   }
     131             : }
     132             : 
     133             : 
     134       90636 : void MacroAssembler::Store(ExternalReference destination, Register source) {
     135       90636 :   if (root_array_available_ && !serializer_enabled()) {
     136             :     int64_t delta = RootRegisterDelta(destination);
     137       76858 :     if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
     138       76858 :       movp(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
     139      129065 :       return;
     140             :     }
     141             :   }
     142             :   // Safe code.
     143       52207 :   if (source == rax) {
     144         124 :     store_rax(destination);
     145             :   } else {
     146             :     Move(kScratchRegister, destination);
     147      104166 :     movp(Operand(kScratchRegister, 0), source);
     148             :   }
     149             : }
     150             : 
     151     1061658 : void TurboAssembler::LoadAddress(Register destination,
     152             :                                  ExternalReference source) {
     153     1061658 :   if (root_array_available_ && !serializer_enabled()) {
     154             :     int64_t delta = RootRegisterDelta(source);
     155      997668 :     if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
     156      997476 :       leap(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
     157     1560396 :       return;
     158             :     }
     159             :   }
     160             :   // Safe code.
     161             :   Move(destination, source);
     162             : }
     163             : 
     164           0 : int TurboAssembler::LoadAddressSize(ExternalReference source) {
     165           0 :   if (root_array_available_ && !serializer_enabled()) {
     166             :     // This calculation depends on the internals of LoadAddress.
     167             :     // It's correctness is ensured by the asserts in the Call
     168             :     // instruction below.
     169             :     int64_t delta = RootRegisterDelta(source);
     170           0 :     if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
     171             :       // Operand is leap(scratch, Operand(kRootRegister, delta));
     172             :       // Opcodes : REX.W 8D ModRM Disp8/Disp32  - 4 or 7.
     173             :       int size = 4;
     174           0 :       if (!is_int8(static_cast<int32_t>(delta))) {
     175             :         size += 3;  // Need full four-byte displacement in lea.
     176             :       }
     177           0 :       return size;
     178             :     }
     179             :   }
     180             :   // Size of movp(destination, src);
     181             :   return Assembler::kMoveAddressIntoScratchRegisterInstructionLength;
     182             : }
     183             : 
     184             : 
     185          31 : void MacroAssembler::PushAddress(ExternalReference source) {
     186          31 :   int64_t address = reinterpret_cast<int64_t>(source.address());
     187          31 :   if (is_int32(address) && !serializer_enabled()) {
     188           0 :     if (emit_debug_code()) {
     189             :       Move(kScratchRegister, kZapValue, Assembler::RelocInfoNone());
     190             :     }
     191           0 :     Push(Immediate(static_cast<int32_t>(address)));
     192          31 :     return;
     193             :   }
     194          31 :   LoadAddress(kScratchRegister, source);
     195             :   Push(kScratchRegister);
     196             : }
     197             : 
     198     1746776 : void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
     199             :   DCHECK(root_array_available_);
     200             :   movp(destination, Operand(kRootRegister,
     201     3493557 :                             (index << kPointerSizeLog2) - kRootRegisterBias));
     202     1746781 : }
     203             : 
     204       10905 : void MacroAssembler::PushRoot(Heap::RootListIndex index) {
     205             :   DCHECK(root_array_available_);
     206       21810 :   Push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
     207       10905 : }
     208             : 
     209      680544 : void TurboAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
     210             :   DCHECK(root_array_available_);
     211             :   cmpp(with, Operand(kRootRegister,
     212     1361121 :                      (index << kPointerSizeLog2) - kRootRegisterBias));
     213      680577 : }
     214             : 
     215         829 : void TurboAssembler::CompareRoot(const Operand& with,
     216             :                                  Heap::RootListIndex index) {
     217             :   DCHECK(root_array_available_);
     218             :   DCHECK(!with.AddressUsesRegister(kScratchRegister));
     219         829 :   LoadRoot(kScratchRegister, index);
     220         829 :   cmpp(with, kScratchRegister);
     221         829 : }
     222             : 
     223           0 : void MacroAssembler::RememberedSetHelper(Register object,  // For debug tests.
     224             :                                          Register addr, Register scratch,
     225             :                                          SaveFPRegsMode save_fp) {
     226           0 :   if (emit_debug_code()) {
     227             :     Label ok;
     228           0 :     JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
     229           0 :     int3();
     230           0 :     bind(&ok);
     231             :   }
     232             :   // Load store buffer top.
     233             :   ExternalReference store_buffer =
     234           0 :       ExternalReference::store_buffer_top(isolate());
     235             :   DCHECK_NE(scratch, kScratchRegister);
     236             :   Move(kScratchRegister, store_buffer);
     237           0 :   movp(scratch, Operand(kScratchRegister, 0));
     238             :   // Store pointer to buffer.
     239           0 :   movp(Operand(scratch, 0), addr);
     240             :   // Increment buffer top.
     241           0 :   addp(scratch, Immediate(kPointerSize));
     242             :   // Write back new top of buffer.
     243           0 :   movp(Operand(kScratchRegister, 0), scratch);
     244             :   // Call stub on end of buffer.
     245             :   Label done;
     246             :   // Check for end of buffer.
     247             :   testp(scratch, Immediate(StoreBuffer::kStoreBufferMask));
     248             :   Label buffer_overflowed;
     249           0 :   j(equal, &buffer_overflowed, Label::kNear);
     250           0 :   ret(0);
     251           0 :   bind(&buffer_overflowed);
     252             :   StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
     253           0 :   CallStub(&store_buffer_overflow);
     254           0 :   ret(0);
     255           0 : }
     256             : 
     257             : 
     258           0 : void MacroAssembler::InNewSpace(Register object,
     259             :                                 Register scratch,
     260             :                                 Condition cc,
     261             :                                 Label* branch,
     262             :                                 Label::Distance distance) {
     263             :   CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cc, branch,
     264           0 :                 distance);
     265           0 : }
     266             : 
     267         310 : void MacroAssembler::RecordWriteField(Register object, int offset,
     268             :                                       Register value, Register dst,
     269             :                                       SaveFPRegsMode save_fp,
     270             :                                       RememberedSetAction remembered_set_action,
     271             :                                       SmiCheck smi_check) {
     272             :   // First, check if a write barrier is even needed. The tests below
     273             :   // catch stores of Smis.
     274             :   Label done;
     275             : 
     276             :   // Skip barrier if writing a smi.
     277         310 :   if (smi_check == INLINE_SMI_CHECK) {
     278          31 :     JumpIfSmi(value, &done);
     279             :   }
     280             : 
     281             :   // Although the object register is tagged, the offset is relative to the start
     282             :   // of the object, so so offset must be a multiple of kPointerSize.
     283             :   DCHECK(IsAligned(offset, kPointerSize));
     284             : 
     285         310 :   leap(dst, FieldOperand(object, offset));
     286         620 :   if (emit_debug_code()) {
     287             :     Label ok;
     288           0 :     testb(dst, Immediate(kPointerSize - 1));
     289           0 :     j(zero, &ok, Label::kNear);
     290           0 :     int3();
     291           0 :     bind(&ok);
     292             :   }
     293             : 
     294             :   RecordWrite(object, dst, value, save_fp, remembered_set_action,
     295         310 :               OMIT_SMI_CHECK);
     296             : 
     297         310 :   bind(&done);
     298             : 
     299             :   // Clobber clobbered input registers when running with the debug-code flag
     300             :   // turned on to provoke errors.
     301         310 :   if (emit_debug_code()) {
     302             :     Move(value, kZapValue, Assembler::RelocInfoNone());
     303             :     Move(dst, kZapValue, Assembler::RelocInfoNone());
     304             :   }
     305         310 : }
     306             : 
     307      321192 : void TurboAssembler::SaveRegisters(RegList registers) {
     308             :   DCHECK_GT(NumRegs(registers), 0);
     309     5460264 :   for (int i = 0; i < Register::kNumRegisters; ++i) {
     310     5139072 :     if ((registers >> i) & 1u) {
     311     1605960 :       pushq(Register::from_code(i));
     312             :     }
     313             :   }
     314      321192 : }
     315             : 
     316      321192 : void TurboAssembler::RestoreRegisters(RegList registers) {
     317             :   DCHECK_GT(NumRegs(registers), 0);
     318     5460264 :   for (int i = Register::kNumRegisters - 1; i >= 0; --i) {
     319     5139072 :     if ((registers >> i) & 1u) {
     320     1605960 :       popq(Register::from_code(i));
     321             :     }
     322             :   }
     323      321192 : }
     324             : 
     325      321192 : void TurboAssembler::CallRecordWriteStub(
     326             :     Register object, Register address,
     327      642384 :     RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
     328             :   Callable const callable =
     329      321192 :       Builtins::CallableFor(isolate(), Builtins::kRecordWrite);
     330             :   RegList registers = callable.descriptor().allocatable_registers();
     331             : 
     332      321192 :   SaveRegisters(registers);
     333             : 
     334             :   Register object_parameter(callable.descriptor().GetRegisterParameter(
     335      321192 :       RecordWriteDescriptor::kObject));
     336             :   Register slot_parameter(
     337      321192 :       callable.descriptor().GetRegisterParameter(RecordWriteDescriptor::kSlot));
     338             :   Register isolate_parameter(callable.descriptor().GetRegisterParameter(
     339      321192 :       RecordWriteDescriptor::kIsolate));
     340             :   Register remembered_set_parameter(callable.descriptor().GetRegisterParameter(
     341      321192 :       RecordWriteDescriptor::kRememberedSet));
     342             :   Register fp_mode_parameter(callable.descriptor().GetRegisterParameter(
     343      321192 :       RecordWriteDescriptor::kFPMode));
     344             : 
     345      321192 :   pushq(object);
     346      321192 :   pushq(address);
     347             : 
     348      321192 :   popq(slot_parameter);
     349      321192 :   popq(object_parameter);
     350             : 
     351      321192 :   LoadAddress(isolate_parameter, ExternalReference::isolate_address(isolate()));
     352      321192 :   Move(remembered_set_parameter, Smi::FromEnum(remembered_set_action));
     353      321192 :   Move(fp_mode_parameter, Smi::FromEnum(fp_mode));
     354             :   Call(callable.code(), RelocInfo::CODE_TARGET);
     355             : 
     356      321192 :   RestoreRegisters(registers);
     357      321192 : }
     358             : 
     359         310 : void MacroAssembler::RecordWrite(Register object, Register address,
     360             :                                  Register value, SaveFPRegsMode fp_mode,
     361             :                                  RememberedSetAction remembered_set_action,
     362             :                                  SmiCheck smi_check) {
     363             :   DCHECK(object != value);
     364             :   DCHECK(object != address);
     365             :   DCHECK(value != address);
     366         310 :   AssertNotSmi(object);
     367             : 
     368         589 :   if (remembered_set_action == OMIT_REMEMBERED_SET &&
     369         279 :       !FLAG_incremental_marking) {
     370           0 :     return;
     371             :   }
     372             : 
     373         620 :   if (emit_debug_code()) {
     374             :     Label ok;
     375           0 :     cmpp(value, Operand(address, 0));
     376           0 :     j(equal, &ok, Label::kNear);
     377           0 :     int3();
     378           0 :     bind(&ok);
     379             :   }
     380             : 
     381             :   // First, check if a write barrier is even needed. The tests below
     382             :   // catch stores of smis and stores into the young generation.
     383             :   Label done;
     384             : 
     385         310 :   if (smi_check == INLINE_SMI_CHECK) {
     386             :     // Skip barrier if writing a smi.
     387         620 :     JumpIfSmi(value, &done);
     388             :   }
     389             : 
     390             :   CheckPageFlag(value,
     391             :                 value,  // Used as scratch.
     392             :                 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
     393         310 :                 Label::kNear);
     394             : 
     395             :   CheckPageFlag(object,
     396             :                 value,  // Used as scratch.
     397             :                 MemoryChunk::kPointersFromHereAreInterestingMask,
     398             :                 zero,
     399             :                 &done,
     400         310 :                 Label::kNear);
     401             : 
     402             : #ifdef V8_CSA_WRITE_BARRIER
     403         310 :   CallRecordWriteStub(object, address, remembered_set_action, fp_mode);
     404             : #else
     405             :   RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
     406             :                        fp_mode);
     407             :   CallStub(&stub);
     408             : #endif
     409             : 
     410         310 :   bind(&done);
     411             : 
     412             :   // Count number of write barriers in generated code.
     413         310 :   isolate()->counters()->write_barriers_static()->Increment();
     414         310 :   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
     415             : 
     416             :   // Clobber clobbered registers when running with the debug-code flag
     417             :   // turned on to provoke errors.
     418         310 :   if (emit_debug_code()) {
     419             :     Move(address, kZapValue, Assembler::RelocInfoNone());
     420             :     Move(value, kZapValue, Assembler::RelocInfoNone());
     421             :   }
     422             : }
     423             : 
     424         536 : void TurboAssembler::Assert(Condition cc, BailoutReason reason) {
     425         536 :   if (emit_debug_code()) Check(cc, reason);
     426         536 : }
     427             : 
     428      168026 : void TurboAssembler::AssertUnreachable(BailoutReason reason) {
     429      168026 :   if (emit_debug_code()) Abort(reason);
     430      168026 : }
     431             : 
     432          43 : void TurboAssembler::Check(Condition cc, BailoutReason reason) {
     433             :   Label L;
     434          43 :   j(cc, &L, Label::kNear);
     435          43 :   Abort(reason);
     436             :   // Control will not return here.
     437          43 :   bind(&L);
     438          43 : }
     439             : 
     440          62 : void TurboAssembler::CheckStackAlignment() {
     441          62 :   int frame_alignment = base::OS::ActivationFrameAlignment();
     442          62 :   int frame_alignment_mask = frame_alignment - 1;
     443          62 :   if (frame_alignment > kPointerSize) {
     444             :     DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
     445             :     Label alignment_as_expected;
     446          62 :     testp(rsp, Immediate(frame_alignment_mask));
     447          62 :     j(zero, &alignment_as_expected, Label::kNear);
     448             :     // Abort if stack is not aligned.
     449          62 :     int3();
     450          62 :     bind(&alignment_as_expected);
     451             :   }
     452          62 : }
     453             : 
     454       17742 : void TurboAssembler::Abort(BailoutReason reason) {
     455             : #ifdef DEBUG
     456             :   const char* msg = GetBailoutReason(reason);
     457             :   if (msg != nullptr) {
     458             :     RecordComment("Abort message: ");
     459             :     RecordComment(msg);
     460             :   }
     461             : 
     462             :   if (FLAG_trap_on_abort) {
     463             :     int3();
     464             :     return;
     465             :   }
     466             : #endif
     467             : 
     468       11828 :   Move(rdx, Smi::FromInt(static_cast<int>(reason)));
     469             : 
     470        5914 :   if (!has_frame()) {
     471             :     // We don't actually want to generate a pile of code for this, so just
     472             :     // claim there is a stack frame, without generating one.
     473          72 :     FrameScope scope(this, StackFrame::NONE);
     474         144 :     Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
     475             :   } else {
     476        5842 :     Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
     477             :   }
     478             :   // Control will not return here.
     479        5914 :   int3();
     480        5914 : }
     481             : 
     482         140 : void TurboAssembler::CallStubDelayed(CodeStub* stub) {
     483             :   DCHECK(AllowThisStubCall(stub));  // Calls are not allowed in some stubs
     484       45560 :   call(stub);
     485         140 : }
     486             : 
     487        2379 : void MacroAssembler::CallStub(CodeStub* stub) {
     488             :   DCHECK(AllowThisStubCall(stub));  // Calls are not allowed in some stubs
     489        2379 :   Call(stub->GetCode(), RelocInfo::CODE_TARGET);
     490        2379 : }
     491             : 
     492             : 
     493        4561 : void MacroAssembler::TailCallStub(CodeStub* stub) {
     494        4561 :   Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
     495        4561 : }
     496             : 
     497             : bool TurboAssembler::AllowThisStubCall(CodeStub* stub) {
     498             :   return has_frame() || !stub->SometimesSetsUpAFrame();
     499             : }
     500             : 
     501          10 : void TurboAssembler::CallRuntimeDelayed(Zone* zone, Runtime::FunctionId fid,
     502          10 :                                         SaveFPRegsMode save_doubles) {
     503          10 :   const Runtime::Function* f = Runtime::FunctionForId(fid);
     504             :   // TODO(1236192): Most runtime routines don't need the number of
     505             :   // arguments passed in because it is constant. At some point we
     506             :   // should remove this need and make the runtime routine entry code
     507             :   // smarter.
     508          10 :   Set(rax, f->nargs);
     509          10 :   LoadAddress(rbx, ExternalReference(f, isolate()));
     510          10 :   CallStubDelayed(new (zone) CEntryStub(nullptr, f->result_size, save_doubles));
     511          10 : }
     512             : 
     513        1550 : void MacroAssembler::CallRuntime(const Runtime::Function* f,
     514             :                                  int num_arguments,
     515             :                                  SaveFPRegsMode save_doubles) {
     516             :   // If the expected number of arguments of the runtime function is
     517             :   // constant, we check that the actual number of arguments match the
     518             :   // expectation.
     519        1550 :   CHECK(f->nargs < 0 || f->nargs == num_arguments);
     520             : 
     521             :   // TODO(1236192): Most runtime routines don't need the number of
     522             :   // arguments passed in because it is constant. At some point we
     523             :   // should remove this need and make the runtime routine entry code
     524             :   // smarter.
     525        3100 :   Set(rax, num_arguments);
     526        1550 :   LoadAddress(rbx, ExternalReference(f, isolate()));
     527        1550 :   CEntryStub ces(isolate(), f->result_size, save_doubles);
     528        1550 :   CallStub(&ces);
     529        1550 : }
     530             : 
     531      104758 : void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
     532             :   // ----------- S t a t e -------------
     533             :   //  -- rsp[0]                 : return address
     534             :   //  -- rsp[8]                 : argument num_arguments - 1
     535             :   //  ...
     536             :   //  -- rsp[8 * num_arguments] : argument 0 (receiver)
     537             :   //
     538             :   //  For runtime functions with variable arguments:
     539             :   //  -- rax                    : number of  arguments
     540             :   // -----------------------------------
     541             : 
     542      104758 :   const Runtime::Function* function = Runtime::FunctionForId(fid);
     543             :   DCHECK_EQ(1, function->result_size);
     544      104758 :   if (function->nargs >= 0) {
     545      104727 :     Set(rax, function->nargs);
     546             :   }
     547      104758 :   JumpToExternalReference(ExternalReference(fid, isolate()));
     548      104758 : }
     549             : 
     550      104789 : void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
     551             :                                              bool builtin_exit_frame) {
     552             :   // Set the entry point and jump to the C entry runtime stub.
     553      104789 :   LoadAddress(rbx, ext);
     554             :   CEntryStub ces(isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
     555             :                  builtin_exit_frame);
     556      104789 :   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
     557      104789 : }
     558             : 
     559             : static constexpr Register saved_regs[] = {rax, rcx, rdx, rbx, rbp, rsi,
     560             :                                           rdi, r8,  r9,  r10, r11};
     561             : 
     562             : static constexpr int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
     563             : 
     564         378 : int TurboAssembler::RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
     565             :                                                     Register exclusion1,
     566             :                                                     Register exclusion2,
     567             :                                                     Register exclusion3) const {
     568             :   int bytes = 0;
     569        4536 :   for (int i = 0; i < kNumberOfSavedRegs; i++) {
     570        4158 :     Register reg = saved_regs[i];
     571        4158 :     if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
     572        3780 :       bytes += kPointerSize;
     573             :     }
     574             :   }
     575             : 
     576             :   // R12 to r15 are callee save on all platforms.
     577         378 :   if (fp_mode == kSaveFPRegs) {
     578         192 :     bytes += kDoubleSize * XMMRegister::kNumRegisters;
     579             :   }
     580             : 
     581         378 :   return bytes;
     582             : }
     583             : 
     584         440 : int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
     585             :                                     Register exclusion2, Register exclusion3) {
     586             :   // We don't allow a GC during a store buffer overflow so there is no need to
     587             :   // store the registers in any particular way, but we do have to store and
     588             :   // restore them.
     589             :   int bytes = 0;
     590        5280 :   for (int i = 0; i < kNumberOfSavedRegs; i++) {
     591        4840 :     Register reg = saved_regs[i];
     592        4840 :     if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
     593        4462 :       pushq(reg);
     594        4462 :       bytes += kPointerSize;
     595             :     }
     596             :   }
     597             : 
     598             :   // R12 to r15 are callee save on all platforms.
     599         440 :   if (fp_mode == kSaveFPRegs) {
     600             :     int delta = kDoubleSize * XMMRegister::kNumRegisters;
     601         223 :     subp(rsp, Immediate(delta));
     602        3791 :     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
     603        3568 :       XMMRegister reg = XMMRegister::from_code(i);
     604        3568 :       Movsd(Operand(rsp, i * kDoubleSize), reg);
     605             :     }
     606         223 :     bytes += delta;
     607             :   }
     608             : 
     609         440 :   return bytes;
     610             : }
     611             : 
     612         440 : int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
     613             :                                    Register exclusion2, Register exclusion3) {
     614             :   int bytes = 0;
     615         440 :   if (fp_mode == kSaveFPRegs) {
     616        3568 :     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
     617        3568 :       XMMRegister reg = XMMRegister::from_code(i);
     618        3568 :       Movsd(reg, Operand(rsp, i * kDoubleSize));
     619             :     }
     620             :     int delta = kDoubleSize * XMMRegister::kNumRegisters;
     621         223 :     addp(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
     622             :     bytes += delta;
     623             :   }
     624             : 
     625        4840 :   for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
     626        4840 :     Register reg = saved_regs[i];
     627        4840 :     if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
     628        4462 :       popq(reg);
     629        4462 :       bytes += kPointerSize;
     630             :     }
     631             :   }
     632             : 
     633         440 :   return bytes;
     634             : }
     635             : 
     636       11073 : void TurboAssembler::Cvtss2sd(XMMRegister dst, XMMRegister src) {
     637       11073 :   if (CpuFeatures::IsSupported(AVX)) {
     638             :     CpuFeatureScope scope(this, AVX);
     639       11053 :     vcvtss2sd(dst, src, src);
     640             :   } else {
     641          20 :     cvtss2sd(dst, src);
     642             :   }
     643       11075 : }
     644             : 
     645       13434 : void TurboAssembler::Cvtss2sd(XMMRegister dst, const Operand& src) {
     646       13434 :   if (CpuFeatures::IsSupported(AVX)) {
     647             :     CpuFeatureScope scope(this, AVX);
     648       13434 :     vcvtss2sd(dst, dst, src);
     649             :   } else {
     650           0 :     cvtss2sd(dst, src);
     651             :   }
     652       13434 : }
     653             : 
     654        8077 : void TurboAssembler::Cvtsd2ss(XMMRegister dst, XMMRegister src) {
     655        8077 :   if (CpuFeatures::IsSupported(AVX)) {
     656             :     CpuFeatureScope scope(this, AVX);
     657        8073 :     vcvtsd2ss(dst, src, src);
     658             :   } else {
     659           4 :     cvtsd2ss(dst, src);
     660             :   }
     661        8078 : }
     662             : 
     663       12557 : void TurboAssembler::Cvtsd2ss(XMMRegister dst, const Operand& src) {
     664       12557 :   if (CpuFeatures::IsSupported(AVX)) {
     665             :     CpuFeatureScope scope(this, AVX);
     666       12557 :     vcvtsd2ss(dst, dst, src);
     667             :   } else {
     668           0 :     cvtsd2ss(dst, src);
     669             :   }
     670       12557 : }
     671             : 
     672      295405 : void TurboAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
     673      295405 :   if (CpuFeatures::IsSupported(AVX)) {
     674             :     CpuFeatureScope scope(this, AVX);
     675      294556 :     vxorpd(dst, dst, dst);
     676             :     vcvtlsi2sd(dst, dst, src);
     677             :   } else {
     678         849 :     xorpd(dst, dst);
     679         849 :     cvtlsi2sd(dst, src);
     680             :   }
     681      295408 : }
     682             : 
     683        4062 : void TurboAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) {
     684        4062 :   if (CpuFeatures::IsSupported(AVX)) {
     685             :     CpuFeatureScope scope(this, AVX);
     686        4062 :     vxorpd(dst, dst, dst);
     687             :     vcvtlsi2sd(dst, dst, src);
     688             :   } else {
     689           0 :     xorpd(dst, dst);
     690           0 :     cvtlsi2sd(dst, src);
     691             :   }
     692        4062 : }
     693             : 
     694         555 : void TurboAssembler::Cvtlsi2ss(XMMRegister dst, Register src) {
     695         555 :   if (CpuFeatures::IsSupported(AVX)) {
     696             :     CpuFeatureScope scope(this, AVX);
     697         553 :     vxorps(dst, dst, dst);
     698             :     vcvtlsi2ss(dst, dst, src);
     699             :   } else {
     700           2 :     xorps(dst, dst);
     701           2 :     cvtlsi2ss(dst, src);
     702             :   }
     703         555 : }
     704             : 
     705          10 : void TurboAssembler::Cvtlsi2ss(XMMRegister dst, const Operand& src) {
     706          10 :   if (CpuFeatures::IsSupported(AVX)) {
     707             :     CpuFeatureScope scope(this, AVX);
     708          10 :     vxorps(dst, dst, dst);
     709             :     vcvtlsi2ss(dst, dst, src);
     710             :   } else {
     711           0 :     xorps(dst, dst);
     712           0 :     cvtlsi2ss(dst, src);
     713             :   }
     714          10 : }
     715             : 
     716         262 : void TurboAssembler::Cvtqsi2ss(XMMRegister dst, Register src) {
     717         262 :   if (CpuFeatures::IsSupported(AVX)) {
     718             :     CpuFeatureScope scope(this, AVX);
     719         262 :     vxorps(dst, dst, dst);
     720             :     vcvtqsi2ss(dst, dst, src);
     721             :   } else {
     722           0 :     xorps(dst, dst);
     723           0 :     cvtqsi2ss(dst, src);
     724             :   }
     725         263 : }
     726             : 
     727           0 : void TurboAssembler::Cvtqsi2ss(XMMRegister dst, const Operand& src) {
     728           0 :   if (CpuFeatures::IsSupported(AVX)) {
     729             :     CpuFeatureScope scope(this, AVX);
     730           0 :     vxorps(dst, dst, dst);
     731             :     vcvtqsi2ss(dst, dst, src);
     732             :   } else {
     733           0 :     xorps(dst, dst);
     734           0 :     cvtqsi2ss(dst, src);
     735             :   }
     736           0 : }
     737             : 
     738        3957 : void TurboAssembler::Cvtqsi2sd(XMMRegister dst, Register src) {
     739        3957 :   if (CpuFeatures::IsSupported(AVX)) {
     740             :     CpuFeatureScope scope(this, AVX);
     741        3875 :     vxorpd(dst, dst, dst);
     742             :     vcvtqsi2sd(dst, dst, src);
     743             :   } else {
     744          82 :     xorpd(dst, dst);
     745          82 :     cvtqsi2sd(dst, src);
     746             :   }
     747        3960 : }
     748             : 
     749           0 : void TurboAssembler::Cvtqsi2sd(XMMRegister dst, const Operand& src) {
     750           0 :   if (CpuFeatures::IsSupported(AVX)) {
     751             :     CpuFeatureScope scope(this, AVX);
     752           0 :     vxorpd(dst, dst, dst);
     753             :     vcvtqsi2sd(dst, dst, src);
     754             :   } else {
     755           0 :     xorpd(dst, dst);
     756           0 :     cvtqsi2sd(dst, src);
     757             :   }
     758           0 : }
     759             : 
     760          47 : void TurboAssembler::Cvtqui2ss(XMMRegister dst, Register src, Register tmp) {
     761             :   Label msb_set_src;
     762             :   Label jmp_return;
     763          47 :   testq(src, src);
     764          47 :   j(sign, &msb_set_src, Label::kNear);
     765          47 :   Cvtqsi2ss(dst, src);
     766          47 :   jmp(&jmp_return, Label::kNear);
     767          47 :   bind(&msb_set_src);
     768             :   movq(tmp, src);
     769             :   shrq(src, Immediate(1));
     770             :   // Recover the least significant bit to avoid rounding errors.
     771          47 :   andq(tmp, Immediate(1));
     772          47 :   orq(src, tmp);
     773          47 :   Cvtqsi2ss(dst, src);
     774          47 :   addss(dst, dst);
     775          47 :   bind(&jmp_return);
     776          47 : }
     777             : 
     778         174 : void TurboAssembler::Cvtqui2sd(XMMRegister dst, Register src, Register tmp) {
     779             :   Label msb_set_src;
     780             :   Label jmp_return;
     781         174 :   testq(src, src);
     782         175 :   j(sign, &msb_set_src, Label::kNear);
     783         175 :   Cvtqsi2sd(dst, src);
     784         176 :   jmp(&jmp_return, Label::kNear);
     785         175 :   bind(&msb_set_src);
     786             :   movq(tmp, src);
     787             :   shrq(src, Immediate(1));
     788         174 :   andq(tmp, Immediate(1));
     789         174 :   orq(src, tmp);
     790         177 :   Cvtqsi2sd(dst, src);
     791         177 :   addsd(dst, dst);
     792         176 :   bind(&jmp_return);
     793         177 : }
     794             : 
     795         271 : void TurboAssembler::Cvttss2si(Register dst, XMMRegister src) {
     796         271 :   if (CpuFeatures::IsSupported(AVX)) {
     797             :     CpuFeatureScope scope(this, AVX);
     798         271 :     vcvttss2si(dst, src);
     799             :   } else {
     800           0 :     cvttss2si(dst, src);
     801             :   }
     802         271 : }
     803             : 
     804           0 : void TurboAssembler::Cvttss2si(Register dst, const Operand& src) {
     805           0 :   if (CpuFeatures::IsSupported(AVX)) {
     806             :     CpuFeatureScope scope(this, AVX);
     807           0 :     vcvttss2si(dst, src);
     808             :   } else {
     809           0 :     cvttss2si(dst, src);
     810             :   }
     811           0 : }
     812             : 
     813       86273 : void TurboAssembler::Cvttsd2si(Register dst, XMMRegister src) {
     814       86273 :   if (CpuFeatures::IsSupported(AVX)) {
     815             :     CpuFeatureScope scope(this, AVX);
     816       86061 :     vcvttsd2si(dst, src);
     817             :   } else {
     818         212 :     cvttsd2si(dst, src);
     819             :   }
     820       86274 : }
     821             : 
     822       22436 : void TurboAssembler::Cvttsd2si(Register dst, const Operand& src) {
     823       22436 :   if (CpuFeatures::IsSupported(AVX)) {
     824             :     CpuFeatureScope scope(this, AVX);
     825       22436 :     vcvttsd2si(dst, src);
     826             :   } else {
     827           0 :     cvttsd2si(dst, src);
     828             :   }
     829       22436 : }
     830             : 
     831         289 : void TurboAssembler::Cvttss2siq(Register dst, XMMRegister src) {
     832         289 :   if (CpuFeatures::IsSupported(AVX)) {
     833             :     CpuFeatureScope scope(this, AVX);
     834         289 :     vcvttss2siq(dst, src);
     835             :   } else {
     836           0 :     cvttss2siq(dst, src);
     837             :   }
     838         288 : }
     839             : 
     840           0 : void TurboAssembler::Cvttss2siq(Register dst, const Operand& src) {
     841           0 :   if (CpuFeatures::IsSupported(AVX)) {
     842             :     CpuFeatureScope scope(this, AVX);
     843           0 :     vcvttss2siq(dst, src);
     844             :   } else {
     845           0 :     cvttss2siq(dst, src);
     846             :   }
     847           0 : }
     848             : 
     849       48968 : void TurboAssembler::Cvttsd2siq(Register dst, XMMRegister src) {
     850       48968 :   if (CpuFeatures::IsSupported(AVX)) {
     851             :     CpuFeatureScope scope(this, AVX);
     852       48733 :     vcvttsd2siq(dst, src);
     853             :   } else {
     854         235 :     cvttsd2siq(dst, src);
     855             :   }
     856       48975 : }
     857             : 
     858           0 : void TurboAssembler::Cvttsd2siq(Register dst, const Operand& src) {
     859           0 :   if (CpuFeatures::IsSupported(AVX)) {
     860             :     CpuFeatureScope scope(this, AVX);
     861           0 :     vcvttsd2siq(dst, src);
     862             :   } else {
     863           0 :     cvttsd2siq(dst, src);
     864             :   }
     865           0 : }
     866             : 
     867             : 
     868          54 : void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
     869             :   DCHECK(!r.IsDouble());
     870          54 :   if (r.IsInteger8()) {
     871           6 :     movsxbq(dst, src);
     872          48 :   } else if (r.IsUInteger8()) {
     873           6 :     movzxbl(dst, src);
     874          42 :   } else if (r.IsInteger16()) {
     875           6 :     movsxwq(dst, src);
     876          36 :   } else if (r.IsUInteger16()) {
     877           6 :     movzxwl(dst, src);
     878          30 :   } else if (r.IsInteger32()) {
     879           6 :     movl(dst, src);
     880             :   } else {
     881          24 :     movp(dst, src);
     882             :   }
     883          54 : }
     884             : 
     885             : 
     886          54 : void MacroAssembler::Store(const Operand& dst, Register src, Representation r) {
     887             :   DCHECK(!r.IsDouble());
     888          54 :   if (r.IsInteger8() || r.IsUInteger8()) {
     889          12 :     movb(dst, src);
     890          42 :   } else if (r.IsInteger16() || r.IsUInteger16()) {
     891          12 :     movw(dst, src);
     892          30 :   } else if (r.IsInteger32()) {
     893           6 :     movl(dst, src);
     894             :   } else {
     895          24 :     if (r.IsHeapObject()) {
     896           6 :       AssertNotSmi(src);
     897          18 :     } else if (r.IsSmi()) {
     898           6 :       AssertSmi(src);
     899             :     }
     900          24 :     movp(dst, src);
     901             :   }
     902          54 : }
     903             : 
     904     1498259 : void TurboAssembler::Set(Register dst, int64_t x) {
     905     1498259 :   if (x == 0) {
     906      236880 :     xorl(dst, dst);
     907     1261379 :   } else if (is_uint32(x)) {
     908      446812 :     movl(dst, Immediate(static_cast<uint32_t>(x)));
     909      814567 :   } else if (is_int32(x)) {
     910      194469 :     movq(dst, Immediate(static_cast<int32_t>(x)));
     911             :   } else {
     912      620098 :     movq(dst, x);
     913             :   }
     914     1498261 : }
     915             : 
     916        3480 : void TurboAssembler::Set(const Operand& dst, intptr_t x) {
     917             :   if (kPointerSize == kInt64Size) {
     918        3480 :     if (is_int32(x)) {
     919        3480 :       movp(dst, Immediate(static_cast<int32_t>(x)));
     920             :     } else {
     921           0 :       Set(kScratchRegister, x);
     922           0 :       movp(dst, kScratchRegister);
     923             :     }
     924             :   } else {
     925             :     movp(dst, Immediate(static_cast<int32_t>(x)));
     926             :   }
     927        3480 : }
     928             : 
     929             : 
     930             : // ----------------------------------------------------------------------------
     931             : // Smi tagging, untagging and tag detection.
     932             : 
     933         248 : Register TurboAssembler::GetSmiConstant(Smi* source) {
     934             :   STATIC_ASSERT(kSmiTag == 0);
     935             :   int value = source->value();
     936         248 :   if (value == 0) {
     937           0 :     xorl(kScratchRegister, kScratchRegister);
     938           0 :     return kScratchRegister;
     939             :   }
     940         248 :   Move(kScratchRegister, source);
     941         248 :   return kScratchRegister;
     942             : }
     943             : 
     944     3719495 : void TurboAssembler::Move(Register dst, Smi* source) {
     945             :   STATIC_ASSERT(kSmiTag == 0);
     946             :   int value = source->value();
     947     3719495 :   if (value == 0) {
     948      725277 :     xorl(dst, dst);
     949             :   } else {
     950             :     Move(dst, source, Assembler::RelocInfoNone());
     951             :   }
     952     3719498 : }
     953             : 
     954        1510 : void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
     955             :   STATIC_ASSERT(kSmiTag == 0);
     956        1510 :   if (dst != src) {
     957         278 :     movl(dst, src);
     958             :   }
     959        1510 :   shlp(dst, Immediate(kSmiShift));
     960        1510 : }
     961             : 
     962         899 : void TurboAssembler::SmiToInteger32(Register dst, Register src) {
     963             :   STATIC_ASSERT(kSmiTag == 0);
     964         899 :   if (dst != src) {
     965           0 :     movp(dst, src);
     966             :   }
     967             : 
     968             :   if (SmiValuesAre32Bits()) {
     969         899 :     shrp(dst, Immediate(kSmiShift));
     970             :   } else {
     971             :     DCHECK(SmiValuesAre31Bits());
     972             :     sarl(dst, Immediate(kSmiShift));
     973             :   }
     974         899 : }
     975             : 
     976        4684 : void TurboAssembler::SmiToInteger32(Register dst, const Operand& src) {
     977             :   if (SmiValuesAre32Bits()) {
     978        9368 :     movl(dst, Operand(src, kSmiShift / kBitsPerByte));
     979             :   } else {
     980             :     DCHECK(SmiValuesAre31Bits());
     981             :     movl(dst, src);
     982             :     sarl(dst, Immediate(kSmiShift));
     983             :   }
     984        4684 : }
     985             : 
     986             : 
     987         496 : void MacroAssembler::SmiToInteger64(Register dst, Register src) {
     988             :   STATIC_ASSERT(kSmiTag == 0);
     989         496 :   if (dst != src) {
     990           0 :     movp(dst, src);
     991             :   }
     992         496 :   sarp(dst, Immediate(kSmiShift));
     993             :   if (kPointerSize == kInt32Size) {
     994             :     // Sign extend to 64-bit.
     995             :     movsxlq(dst, dst);
     996             :   }
     997         496 : }
     998             : 
     999         198 : void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
    1000         198 :   AssertSmi(smi1);
    1001         198 :   AssertSmi(smi2);
    1002         198 :   cmpp(smi1, smi2);
    1003         198 : }
    1004             : 
    1005             : 
    1006         372 : void MacroAssembler::SmiCompare(Register dst, Smi* src) {
    1007         372 :   AssertSmi(dst);
    1008         372 :   Cmp(dst, src);
    1009         372 : }
    1010             : 
    1011             : 
    1012         372 : void MacroAssembler::Cmp(Register dst, Smi* src) {
    1013             :   DCHECK_NE(dst, kScratchRegister);
    1014         372 :   if (src->value() == 0) {
    1015         124 :     testp(dst, dst);
    1016             :   } else {
    1017         248 :     Register constant_reg = GetSmiConstant(src);
    1018         248 :     cmpp(dst, constant_reg);
    1019             :   }
    1020         372 : }
    1021             : 
    1022             : 
    1023           0 : void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
    1024           0 :   AssertSmi(dst);
    1025           0 :   AssertSmi(src);
    1026           0 :   cmpp(dst, src);
    1027           0 : }
    1028             : 
    1029             : 
    1030           0 : void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
    1031           0 :   AssertSmi(dst);
    1032           0 :   AssertSmi(src);
    1033           0 :   cmpp(dst, src);
    1034           0 : }
    1035             : 
    1036             : 
    1037      245683 : void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
    1038      245683 :   AssertSmi(dst);
    1039             :   if (SmiValuesAre32Bits()) {
    1040      491366 :     cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
    1041             :   } else {
    1042             :     DCHECK(SmiValuesAre31Bits());
    1043             :     cmpl(dst, Immediate(src));
    1044             :   }
    1045      245683 : }
    1046             : 
    1047             : 
    1048           0 : void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
    1049             :   // The Operand cannot use the smi register.
    1050           0 :   Register smi_reg = GetSmiConstant(src);
    1051             :   DCHECK(!dst.AddressUsesRegister(smi_reg));
    1052           0 :   cmpp(dst, smi_reg);
    1053           0 : }
    1054             : 
    1055             : 
    1056          48 : Condition TurboAssembler::CheckSmi(Register src) {
    1057             :   STATIC_ASSERT(kSmiTag == 0);
    1058      455805 :   testb(src, Immediate(kSmiTagMask));
    1059          48 :   return zero;
    1060             : }
    1061             : 
    1062           0 : Condition TurboAssembler::CheckSmi(const Operand& src) {
    1063             :   STATIC_ASSERT(kSmiTag == 0);
    1064          41 :   testb(src, Immediate(kSmiTagMask));
    1065           0 :   return zero;
    1066             : }
    1067             : 
    1068      455633 : void TurboAssembler::JumpIfSmi(Register src, Label* on_smi,
    1069             :                                Label::Distance near_jump) {
    1070             :   Condition smi = CheckSmi(src);
    1071      455633 :   j(smi, on_smi, near_jump);
    1072      455633 : }
    1073             : 
    1074             : 
    1075         124 : void MacroAssembler::JumpIfNotSmi(Register src,
    1076             :                                   Label* on_not_smi,
    1077             :                                   Label::Distance near_jump) {
    1078             :   Condition smi = CheckSmi(src);
    1079         124 :   j(NegateCondition(smi), on_not_smi, near_jump);
    1080         124 : }
    1081             : 
    1082          31 : void MacroAssembler::JumpIfNotSmi(Operand src, Label* on_not_smi,
    1083             :                                   Label::Distance near_jump) {
    1084             :   Condition smi = CheckSmi(src);
    1085          31 :   j(NegateCondition(smi), on_not_smi, near_jump);
    1086          31 : }
    1087             : 
    1088          31 : void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
    1089          31 :   if (constant->value() != 0) {
    1090             :     if (SmiValuesAre32Bits()) {
    1091             :       addl(Operand(dst, kSmiShift / kBitsPerByte),
    1092          62 :            Immediate(constant->value()));
    1093             :     } else {
    1094             :       DCHECK(SmiValuesAre31Bits());
    1095             :       addp(dst, Immediate(constant));
    1096             :     }
    1097             :   }
    1098          31 : }
    1099             : 
    1100         635 : SmiIndex MacroAssembler::SmiToIndex(Register dst,
    1101             :                                     Register src,
    1102             :                                     int shift) {
    1103             :   if (SmiValuesAre32Bits()) {
    1104             :     DCHECK(is_uint6(shift));
    1105             :     // There is a possible optimization if shift is in the range 60-63, but that
    1106             :     // will (and must) never happen.
    1107         635 :     if (dst != src) {
    1108         240 :       movp(dst, src);
    1109             :     }
    1110         635 :     if (shift < kSmiShift) {
    1111         635 :       sarp(dst, Immediate(kSmiShift - shift));
    1112             :     } else {
    1113           0 :       shlp(dst, Immediate(shift - kSmiShift));
    1114             :     }
    1115         635 :     return SmiIndex(dst, times_1);
    1116             :   } else {
    1117             :     DCHECK(SmiValuesAre31Bits());
    1118             :     DCHECK(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1));
    1119             :     if (dst != src) {
    1120             :       movp(dst, src);
    1121             :     }
    1122             :     // We have to sign extend the index register to 64-bit as the SMI might
    1123             :     // be negative.
    1124             :     movsxlq(dst, dst);
    1125             :     if (shift == times_1) {
    1126             :       sarq(dst, Immediate(kSmiShift));
    1127             :       return SmiIndex(dst, times_1);
    1128             :     }
    1129             :     return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
    1130             :   }
    1131             : }
    1132             : 
    1133      100658 : void TurboAssembler::Push(Smi* source) {
    1134      100658 :   intptr_t smi = reinterpret_cast<intptr_t>(source);
    1135      100658 :   if (is_int32(smi)) {
    1136         909 :     Push(Immediate(static_cast<int32_t>(smi)));
    1137         909 :     return;
    1138             :   }
    1139      199498 :   int first_byte_set = base::bits::CountTrailingZeros64(smi) / 8;
    1140       99749 :   int last_byte_set = (63 - base::bits::CountLeadingZeros64(smi)) / 8;
    1141       99749 :   if (first_byte_set == last_byte_set && kPointerSize == kInt64Size) {
    1142             :     // This sequence has only 7 bytes, compared to the 12 bytes below.
    1143             :     Push(Immediate(0));
    1144             :     movb(Operand(rsp, first_byte_set),
    1145      199498 :          Immediate(static_cast<int8_t>(smi >> (8 * first_byte_set))));
    1146       99749 :     return;
    1147             :   }
    1148           0 :   Register constant = GetSmiConstant(source);
    1149             :   Push(constant);
    1150             : }
    1151             : 
    1152             : // ----------------------------------------------------------------------------
    1153             : 
    1154       11232 : void TurboAssembler::Move(Register dst, Register src) {
    1155       11232 :   if (dst != src) {
    1156       11232 :     movp(dst, src);
    1157             :   }
    1158       11232 : }
    1159             : 
    1160     3113103 : void TurboAssembler::MoveNumber(Register dst, double value) {
    1161             :   int32_t smi;
    1162     3113103 :   if (DoubleToSmiInteger(value, &smi)) {
    1163     3070012 :     Move(dst, Smi::FromInt(smi));
    1164             :   } else {
    1165       43091 :     movp_heap_number(dst, value);
    1166             :   }
    1167     3113100 : }
    1168             : 
    1169      150518 : void TurboAssembler::Move(XMMRegister dst, uint32_t src) {
    1170      150518 :   if (src == 0) {
    1171        9406 :     Xorpd(dst, dst);
    1172             :   } else {
    1173             :     unsigned pop = base::bits::CountPopulation(src);
    1174             :     DCHECK_NE(0u, pop);
    1175      141112 :     if (pop == 32) {
    1176          30 :       Pcmpeqd(dst, dst);
    1177             :     } else {
    1178      141082 :       movl(kScratchRegister, Immediate(src));
    1179      141083 :       Movq(dst, kScratchRegister);
    1180             :     }
    1181             :   }
    1182      150521 : }
    1183             : 
    1184      431504 : void TurboAssembler::Move(XMMRegister dst, uint64_t src) {
    1185      431504 :   if (src == 0) {
    1186       97004 :     Xorpd(dst, dst);
    1187             :   } else {
    1188             :     unsigned nlz = base::bits::CountLeadingZeros64(src);
    1189             :     unsigned ntz = base::bits::CountTrailingZeros64(src);
    1190             :     unsigned pop = base::bits::CountPopulation(src);
    1191             :     DCHECK_NE(0u, pop);
    1192      334500 :     if (pop == 64) {
    1193          30 :       Pcmpeqd(dst, dst);
    1194      334470 :     } else if (pop + ntz == 64) {
    1195       29280 :       Pcmpeqd(dst, dst);
    1196       29280 :       Psllq(dst, ntz);
    1197      305190 :     } else if (pop + nlz == 64) {
    1198        6062 :       Pcmpeqd(dst, dst);
    1199        6062 :       Psrlq(dst, nlz);
    1200             :     } else {
    1201      299128 :       uint32_t lower = static_cast<uint32_t>(src);
    1202      299128 :       uint32_t upper = static_cast<uint32_t>(src >> 32);
    1203      299128 :       if (upper == 0) {
    1204         120 :         Move(dst, lower);
    1205             :       } else {
    1206      299008 :         movq(kScratchRegister, src);
    1207      299009 :         Movq(dst, kScratchRegister);
    1208             :       }
    1209             :     }
    1210             :   }
    1211      431507 : }
    1212             : 
    1213         502 : void TurboAssembler::Movaps(XMMRegister dst, XMMRegister src) {
    1214         502 :   if (CpuFeatures::IsSupported(AVX)) {
    1215             :     CpuFeatureScope scope(this, AVX);
    1216         502 :     vmovaps(dst, src);
    1217             :   } else {
    1218           0 :     movaps(dst, src);
    1219             :   }
    1220         503 : }
    1221             : 
    1222           0 : void TurboAssembler::Movups(XMMRegister dst, XMMRegister src) {
    1223           0 :   if (CpuFeatures::IsSupported(AVX)) {
    1224             :     CpuFeatureScope scope(this, AVX);
    1225           0 :     vmovups(dst, src);
    1226             :   } else {
    1227           0 :     movups(dst, src);
    1228             :   }
    1229           0 : }
    1230             : 
    1231          12 : void TurboAssembler::Movups(XMMRegister dst, const Operand& src) {
    1232          12 :   if (CpuFeatures::IsSupported(AVX)) {
    1233             :     CpuFeatureScope scope(this, AVX);
    1234          12 :     vmovups(dst, src);
    1235             :   } else {
    1236           0 :     movups(dst, src);
    1237             :   }
    1238          12 : }
    1239             : 
    1240          12 : void TurboAssembler::Movups(const Operand& dst, XMMRegister src) {
    1241          12 :   if (CpuFeatures::IsSupported(AVX)) {
    1242             :     CpuFeatureScope scope(this, AVX);
    1243          12 :     vmovups(dst, src);
    1244             :   } else {
    1245           0 :     movups(dst, src);
    1246             :   }
    1247          12 : }
    1248             : 
    1249      105344 : void TurboAssembler::Movapd(XMMRegister dst, XMMRegister src) {
    1250      105344 :   if (CpuFeatures::IsSupported(AVX)) {
    1251             :     CpuFeatureScope scope(this, AVX);
    1252      104924 :     vmovapd(dst, src);
    1253             :   } else {
    1254         420 :     movapd(dst, src);
    1255             :   }
    1256      105356 : }
    1257             : 
    1258        4239 : void TurboAssembler::Movsd(XMMRegister dst, XMMRegister src) {
    1259        4239 :   if (CpuFeatures::IsSupported(AVX)) {
    1260             :     CpuFeatureScope scope(this, AVX);
    1261        4225 :     vmovsd(dst, dst, src);
    1262             :   } else {
    1263          14 :     movsd(dst, src);
    1264             :   }
    1265        4239 : }
    1266             : 
    1267     1231569 : void TurboAssembler::Movsd(XMMRegister dst, const Operand& src) {
    1268     1231569 :   if (CpuFeatures::IsSupported(AVX)) {
    1269             :     CpuFeatureScope scope(this, AVX);
    1270     1229102 :     vmovsd(dst, src);
    1271             :   } else {
    1272        2467 :     movsd(dst, src);
    1273             :   }
    1274     1231578 : }
    1275             : 
    1276     1079654 : void TurboAssembler::Movsd(const Operand& dst, XMMRegister src) {
    1277     1079654 :   if (CpuFeatures::IsSupported(AVX)) {
    1278             :     CpuFeatureScope scope(this, AVX);
    1279     1078017 :     vmovsd(dst, src);
    1280             :   } else {
    1281        1637 :     movsd(dst, src);
    1282             :   }
    1283     1079660 : }
    1284             : 
    1285         122 : void TurboAssembler::Movss(XMMRegister dst, XMMRegister src) {
    1286         122 :   if (CpuFeatures::IsSupported(AVX)) {
    1287             :     CpuFeatureScope scope(this, AVX);
    1288         122 :     vmovss(dst, dst, src);
    1289             :   } else {
    1290           0 :     movss(dst, src);
    1291             :   }
    1292         122 : }
    1293             : 
    1294        7720 : void TurboAssembler::Movss(XMMRegister dst, const Operand& src) {
    1295        7720 :   if (CpuFeatures::IsSupported(AVX)) {
    1296             :     CpuFeatureScope scope(this, AVX);
    1297        7720 :     vmovss(dst, src);
    1298             :   } else {
    1299           0 :     movss(dst, src);
    1300             :   }
    1301        7720 : }
    1302             : 
    1303      581143 : void TurboAssembler::Movss(const Operand& dst, XMMRegister src) {
    1304      581143 :   if (CpuFeatures::IsSupported(AVX)) {
    1305             :     CpuFeatureScope scope(this, AVX);
    1306      581143 :     vmovss(dst, src);
    1307             :   } else {
    1308           0 :     movss(dst, src);
    1309             :   }
    1310      581143 : }
    1311             : 
    1312         218 : void TurboAssembler::Movd(XMMRegister dst, Register src) {
    1313         218 :   if (CpuFeatures::IsSupported(AVX)) {
    1314             :     CpuFeatureScope scope(this, AVX);
    1315         218 :     vmovd(dst, src);
    1316             :   } else {
    1317           0 :     movd(dst, src);
    1318             :   }
    1319         218 : }
    1320             : 
    1321           0 : void TurboAssembler::Movd(XMMRegister dst, const Operand& src) {
    1322           0 :   if (CpuFeatures::IsSupported(AVX)) {
    1323             :     CpuFeatureScope scope(this, AVX);
    1324           0 :     vmovd(dst, src);
    1325             :   } else {
    1326           0 :     movd(dst, src);
    1327             :   }
    1328           0 : }
    1329             : 
    1330       63011 : void TurboAssembler::Movd(Register dst, XMMRegister src) {
    1331       63011 :   if (CpuFeatures::IsSupported(AVX)) {
    1332             :     CpuFeatureScope scope(this, AVX);
    1333       63011 :     vmovd(dst, src);
    1334             :   } else {
    1335           0 :     movd(dst, src);
    1336             :   }
    1337       63011 : }
    1338             : 
    1339      440360 : void TurboAssembler::Movq(XMMRegister dst, Register src) {
    1340      440360 :   if (CpuFeatures::IsSupported(AVX)) {
    1341             :     CpuFeatureScope scope(this, AVX);
    1342      439738 :     vmovq(dst, src);
    1343             :   } else {
    1344         622 :     movq(dst, src);
    1345             :   }
    1346      440365 : }
    1347             : 
    1348       62390 : void TurboAssembler::Movq(Register dst, XMMRegister src) {
    1349       62390 :   if (CpuFeatures::IsSupported(AVX)) {
    1350             :     CpuFeatureScope scope(this, AVX);
    1351       62390 :     vmovq(dst, src);
    1352             :   } else {
    1353           0 :     movq(dst, src);
    1354             :   }
    1355       62390 : }
    1356             : 
    1357         122 : void TurboAssembler::Movmskps(Register dst, XMMRegister src) {
    1358         122 :   if (CpuFeatures::IsSupported(AVX)) {
    1359             :     CpuFeatureScope scope(this, AVX);
    1360         122 :     vmovmskps(dst, src);
    1361             :   } else {
    1362           0 :     movmskps(dst, src);
    1363             :   }
    1364         122 : }
    1365             : 
    1366         442 : void TurboAssembler::Movmskpd(Register dst, XMMRegister src) {
    1367         442 :   if (CpuFeatures::IsSupported(AVX)) {
    1368             :     CpuFeatureScope scope(this, AVX);
    1369         438 :     vmovmskpd(dst, src);
    1370             :   } else {
    1371           4 :     movmskpd(dst, src);
    1372             :   }
    1373         442 : }
    1374             : 
    1375        7734 : void TurboAssembler::Xorps(XMMRegister dst, XMMRegister src) {
    1376        7734 :   if (CpuFeatures::IsSupported(AVX)) {
    1377             :     CpuFeatureScope scope(this, AVX);
    1378        7734 :     vxorps(dst, dst, src);
    1379             :   } else {
    1380           0 :     xorps(dst, src);
    1381             :   }
    1382        7734 : }
    1383             : 
    1384          12 : void TurboAssembler::Xorps(XMMRegister dst, const Operand& src) {
    1385          12 :   if (CpuFeatures::IsSupported(AVX)) {
    1386             :     CpuFeatureScope scope(this, AVX);
    1387          12 :     vxorps(dst, dst, src);
    1388             :   } else {
    1389           0 :     xorps(dst, src);
    1390             :   }
    1391          12 : }
    1392             : 
    1393         512 : void TurboAssembler::Roundss(XMMRegister dst, XMMRegister src,
    1394             :                              RoundingMode mode) {
    1395         512 :   if (CpuFeatures::IsSupported(AVX)) {
    1396             :     CpuFeatureScope scope(this, AVX);
    1397         512 :     vroundss(dst, dst, src, mode);
    1398             :   } else {
    1399           0 :     roundss(dst, src, mode);
    1400             :   }
    1401         512 : }
    1402             : 
    1403       18795 : void TurboAssembler::Roundsd(XMMRegister dst, XMMRegister src,
    1404             :                              RoundingMode mode) {
    1405       18795 :   if (CpuFeatures::IsSupported(AVX)) {
    1406             :     CpuFeatureScope scope(this, AVX);
    1407       18795 :     vroundsd(dst, dst, src, mode);
    1408             :   } else {
    1409           0 :     roundsd(dst, src, mode);
    1410             :   }
    1411       18796 : }
    1412             : 
    1413         421 : void TurboAssembler::Sqrtsd(XMMRegister dst, XMMRegister src) {
    1414         421 :   if (CpuFeatures::IsSupported(AVX)) {
    1415             :     CpuFeatureScope scope(this, AVX);
    1416         419 :     vsqrtsd(dst, dst, src);
    1417             :   } else {
    1418           2 :     sqrtsd(dst, src);
    1419             :   }
    1420         421 : }
    1421             : 
    1422           0 : void TurboAssembler::Sqrtsd(XMMRegister dst, const Operand& src) {
    1423           0 :   if (CpuFeatures::IsSupported(AVX)) {
    1424             :     CpuFeatureScope scope(this, AVX);
    1425           0 :     vsqrtsd(dst, dst, src);
    1426             :   } else {
    1427           0 :     sqrtsd(dst, src);
    1428             :   }
    1429           0 : }
    1430             : 
    1431         191 : void TurboAssembler::Ucomiss(XMMRegister src1, XMMRegister src2) {
    1432         191 :   if (CpuFeatures::IsSupported(AVX)) {
    1433             :     CpuFeatureScope scope(this, AVX);
    1434         191 :     vucomiss(src1, src2);
    1435             :   } else {
    1436           0 :     ucomiss(src1, src2);
    1437             :   }
    1438         191 : }
    1439             : 
    1440          48 : void TurboAssembler::Ucomiss(XMMRegister src1, const Operand& src2) {
    1441          48 :   if (CpuFeatures::IsSupported(AVX)) {
    1442             :     CpuFeatureScope scope(this, AVX);
    1443          48 :     vucomiss(src1, src2);
    1444             :   } else {
    1445           0 :     ucomiss(src1, src2);
    1446             :   }
    1447          48 : }
    1448             : 
    1449        1975 : void TurboAssembler::Ucomisd(XMMRegister src1, XMMRegister src2) {
    1450        1975 :   if (CpuFeatures::IsSupported(AVX)) {
    1451             :     CpuFeatureScope scope(this, AVX);
    1452         611 :     vucomisd(src1, src2);
    1453             :   } else {
    1454        1364 :     ucomisd(src1, src2);
    1455             :   }
    1456        1975 : }
    1457             : 
    1458          32 : void TurboAssembler::Ucomisd(XMMRegister src1, const Operand& src2) {
    1459          32 :   if (CpuFeatures::IsSupported(AVX)) {
    1460             :     CpuFeatureScope scope(this, AVX);
    1461          32 :     vucomisd(src1, src2);
    1462             :   } else {
    1463           0 :     ucomisd(src1, src2);
    1464             :   }
    1465          32 : }
    1466             : 
    1467             : // ----------------------------------------------------------------------------
    1468             : 
    1469           6 : void MacroAssembler::Absps(XMMRegister dst) {
    1470             :   Andps(dst,
    1471           6 :         ExternalOperand(ExternalReference::address_of_float_abs_constant()));
    1472           6 : }
    1473             : 
    1474           6 : void MacroAssembler::Negps(XMMRegister dst) {
    1475             :   Xorps(dst,
    1476           6 :         ExternalOperand(ExternalReference::address_of_float_neg_constant()));
    1477           6 : }
    1478             : 
    1479           6 : void MacroAssembler::Abspd(XMMRegister dst) {
    1480             :   Andps(dst,
    1481           6 :         ExternalOperand(ExternalReference::address_of_double_abs_constant()));
    1482           6 : }
    1483             : 
    1484           6 : void MacroAssembler::Negpd(XMMRegister dst) {
    1485             :   Xorps(dst,
    1486           6 :         ExternalOperand(ExternalReference::address_of_double_neg_constant()));
    1487           6 : }
    1488             : 
    1489        7192 : void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
    1490             :   AllowDeferredHandleDereference smi_check;
    1491        7192 :   if (source->IsSmi()) {
    1492           0 :     Cmp(dst, Smi::cast(*source));
    1493             :   } else {
    1494             :     Move(kScratchRegister, Handle<HeapObject>::cast(source));
    1495        7192 :     cmpp(dst, kScratchRegister);
    1496             :   }
    1497        7192 : }
    1498             : 
    1499             : 
    1500        3602 : void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
    1501             :   AllowDeferredHandleDereference smi_check;
    1502        3602 :   if (source->IsSmi()) {
    1503           0 :     Cmp(dst, Smi::cast(*source));
    1504             :   } else {
    1505             :     Move(kScratchRegister, Handle<HeapObject>::cast(source));
    1506        3602 :     cmpp(dst, kScratchRegister);
    1507             :   }
    1508        3602 : }
    1509             : 
    1510      202034 : void TurboAssembler::Push(Handle<HeapObject> source) {
    1511             :   Move(kScratchRegister, source);
    1512             :   Push(kScratchRegister);
    1513      202034 : }
    1514             : 
    1515     4437063 : void TurboAssembler::Move(Register result, Handle<HeapObject> object,
    1516             :                           RelocInfo::Mode rmode) {
    1517     4954613 :   movp(result, reinterpret_cast<void*>(object.address()), rmode);
    1518     4437063 : }
    1519             : 
    1520           0 : void TurboAssembler::Move(const Operand& dst, Handle<HeapObject> object,
    1521             :                           RelocInfo::Mode rmode) {
    1522             :   Move(kScratchRegister, object, rmode);
    1523             :   movp(dst, kScratchRegister);
    1524           0 : }
    1525             : 
    1526      144681 : void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
    1527             :   Move(value, cell, RelocInfo::EMBEDDED_OBJECT);
    1528      144681 :   movp(value, FieldOperand(value, WeakCell::kValueOffset));
    1529      144681 : }
    1530             : 
    1531             : 
    1532      144681 : void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
    1533             :                                    Label* miss) {
    1534      144681 :   GetWeakValue(value, cell);
    1535      144681 :   JumpIfSmi(value, miss);
    1536      144681 : }
    1537             : 
    1538             : 
    1539         233 : void MacroAssembler::Drop(int stack_elements) {
    1540         233 :   if (stack_elements > 0) {
    1541         466 :     addp(rsp, Immediate(stack_elements * kPointerSize));
    1542             :   }
    1543         233 : }
    1544             : 
    1545             : 
    1546          31 : void MacroAssembler::DropUnderReturnAddress(int stack_elements,
    1547             :                                             Register scratch) {
    1548             :   DCHECK_GT(stack_elements, 0);
    1549          31 :   if (kPointerSize == kInt64Size && stack_elements == 1) {
    1550          31 :     popq(MemOperand(rsp, 0));
    1551          62 :     return;
    1552             :   }
    1553             : 
    1554             :   PopReturnAddressTo(scratch);
    1555           0 :   Drop(stack_elements);
    1556             :   PushReturnAddressFrom(scratch);
    1557             : }
    1558             : 
    1559     1276479 : void TurboAssembler::Push(Register src) {
    1560             :   if (kPointerSize == kInt64Size) {
    1561     2258923 :     pushq(src);
    1562             :   } else {
    1563             :     // x32 uses 64-bit push for rbp in the prologue.
    1564             :     DCHECK(src.code() != rbp.code());
    1565             :     leal(rsp, Operand(rsp, -4));
    1566             :     movp(Operand(rsp, 0), src);
    1567             :   }
    1568     1276479 : }
    1569             : 
    1570       49979 : void TurboAssembler::Push(const Operand& src) {
    1571             :   if (kPointerSize == kInt64Size) {
    1572       60946 :     pushq(src);
    1573             :   } else {
    1574             :     movp(kScratchRegister, src);
    1575             :     leal(rsp, Operand(rsp, -4));
    1576             :     movp(Operand(rsp, 0), kScratchRegister);
    1577             :   }
    1578       49979 : }
    1579             : 
    1580             : 
    1581      691776 : void MacroAssembler::PushQuad(const Operand& src) {
    1582             :   if (kPointerSize == kInt64Size) {
    1583      691776 :     pushq(src);
    1584             :   } else {
    1585             :     movp(kScratchRegister, src);
    1586             :     pushq(kScratchRegister);
    1587             :   }
    1588      691776 : }
    1589             : 
    1590      188338 : void TurboAssembler::Push(Immediate value) {
    1591             :   if (kPointerSize == kInt64Size) {
    1592      696576 :     pushq(value);
    1593             :   } else {
    1594             :     leal(rsp, Operand(rsp, -4));
    1595             :     movp(Operand(rsp, 0), value);
    1596             :   }
    1597      188338 : }
    1598             : 
    1599             : 
    1600           0 : void MacroAssembler::PushImm32(int32_t imm32) {
    1601             :   if (kPointerSize == kInt64Size) {
    1602           0 :     pushq_imm32(imm32);
    1603             :   } else {
    1604             :     leal(rsp, Operand(rsp, -4));
    1605             :     movp(Operand(rsp, 0), Immediate(imm32));
    1606             :   }
    1607           0 : }
    1608             : 
    1609             : 
    1610      634915 : void MacroAssembler::Pop(Register dst) {
    1611             :   if (kPointerSize == kInt64Size) {
    1612      635783 :     popq(dst);
    1613             :   } else {
    1614             :     // x32 uses 64-bit pop for rbp in the epilogue.
    1615             :     DCHECK(dst.code() != rbp.code());
    1616             :     movp(dst, Operand(rsp, 0));
    1617             :     leal(rsp, Operand(rsp, 4));
    1618             :   }
    1619      634915 : }
    1620             : 
    1621             : 
    1622       38494 : void MacroAssembler::Pop(const Operand& dst) {
    1623             :   if (kPointerSize == kInt64Size) {
    1624       38556 :     popq(dst);
    1625             :   } else {
    1626             :     Register scratch = dst.AddressUsesRegister(kScratchRegister)
    1627             :         ? kRootRegister : kScratchRegister;
    1628             :     movp(scratch, Operand(rsp, 0));
    1629             :     movp(dst, scratch);
    1630             :     leal(rsp, Operand(rsp, 4));
    1631             :     if (scratch == kRootRegister) {
    1632             :       // Restore kRootRegister.
    1633             :       InitializeRootRegister();
    1634             :     }
    1635             :   }
    1636       38494 : }
    1637             : 
    1638             : 
    1639      614912 : void MacroAssembler::PopQuad(const Operand& dst) {
    1640             :   if (kPointerSize == kInt64Size) {
    1641      614912 :     popq(dst);
    1642             :   } else {
    1643             :     popq(kScratchRegister);
    1644             :     movp(dst, kScratchRegister);
    1645             :   }
    1646      614912 : }
    1647             : 
    1648             : 
    1649           0 : void MacroAssembler::Jump(ExternalReference ext) {
    1650           0 :   LoadAddress(kScratchRegister, ext);
    1651           0 :   jmp(kScratchRegister);
    1652           0 : }
    1653             : 
    1654             : 
    1655           0 : void MacroAssembler::Jump(const Operand& op) {
    1656             :   if (kPointerSize == kInt64Size) {
    1657           0 :     jmp(op);
    1658             :   } else {
    1659             :     movp(kScratchRegister, op);
    1660             :     jmp(kScratchRegister);
    1661             :   }
    1662           0 : }
    1663             : 
    1664             : 
    1665           0 : void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
    1666             :   Move(kScratchRegister, destination, rmode);
    1667           0 :   jmp(kScratchRegister);
    1668           0 : }
    1669             : 
    1670             : 
    1671        7936 : void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
    1672             :   // TODO(X64): Inline this
    1673       12590 :   jmp(code_object, rmode);
    1674        7936 : }
    1675             : 
    1676           0 : int TurboAssembler::CallSize(ExternalReference ext) {
    1677             :   // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
    1678           0 :   return LoadAddressSize(ext) +
    1679           0 :          Assembler::kCallScratchRegisterInstructionLength;
    1680             : }
    1681             : 
    1682           0 : void TurboAssembler::Call(ExternalReference ext) {
    1683             : #ifdef DEBUG
    1684             :   int end_position = pc_offset() + CallSize(ext);
    1685             : #endif
    1686           0 :   LoadAddress(kScratchRegister, ext);
    1687           0 :   call(kScratchRegister);
    1688             : #ifdef DEBUG
    1689             :   DCHECK_EQ(end_position, pc_offset());
    1690             : #endif
    1691           0 : }
    1692             : 
    1693           0 : void TurboAssembler::Call(const Operand& op) {
    1694           0 :   if (kPointerSize == kInt64Size && !CpuFeatures::IsSupported(ATOM)) {
    1695           0 :     call(op);
    1696             :   } else {
    1697           0 :     movp(kScratchRegister, op);
    1698           0 :     call(kScratchRegister);
    1699             :   }
    1700           0 : }
    1701             : 
    1702          48 : void TurboAssembler::Call(Address destination, RelocInfo::Mode rmode) {
    1703             : #ifdef DEBUG
    1704             :   int end_position = pc_offset() + CallSize(destination);
    1705             : #endif
    1706             :   Move(kScratchRegister, destination, rmode);
    1707          48 :   call(kScratchRegister);
    1708             : #ifdef DEBUG
    1709             :   DCHECK_EQ(pc_offset(), end_position);
    1710             : #endif
    1711          48 : }
    1712             : 
    1713     4695043 : void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
    1714             : #ifdef DEBUG
    1715             :   int end_position = pc_offset() + CallSize(code_object);
    1716             : #endif
    1717             :   DCHECK(RelocInfo::IsCodeTarget(rmode));
    1718     5024652 :   call(code_object, rmode);
    1719             : #ifdef DEBUG
    1720             :   DCHECK_EQ(end_position, pc_offset());
    1721             : #endif
    1722     4695122 : }
    1723             : 
    1724       30603 : void TurboAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
    1725       30603 :   if (imm8 == 0) {
    1726         222 :     Movd(dst, src);
    1727         222 :     return;
    1728             :   }
    1729       30381 :   if (CpuFeatures::IsSupported(SSE4_1)) {
    1730             :     CpuFeatureScope sse_scope(this, SSE4_1);
    1731       30247 :     pextrd(dst, src, imm8);
    1732             :     return;
    1733             :   }
    1734             :   DCHECK_EQ(1, imm8);
    1735         134 :   movq(dst, src);
    1736             :   shrq(dst, Immediate(32));
    1737             : }
    1738             : 
    1739          88 : void TurboAssembler::Pinsrd(XMMRegister dst, Register src, int8_t imm8) {
    1740          88 :   if (CpuFeatures::IsSupported(SSE4_1)) {
    1741             :     CpuFeatureScope sse_scope(this, SSE4_1);
    1742          88 :     pinsrd(dst, src, imm8);
    1743          88 :     return;
    1744             :   }
    1745           0 :   Movd(kScratchDoubleReg, src);
    1746           0 :   if (imm8 == 1) {
    1747           0 :     punpckldq(dst, kScratchDoubleReg);
    1748             :   } else {
    1749             :     DCHECK_EQ(0, imm8);
    1750           0 :     Movss(dst, kScratchDoubleReg);
    1751             :   }
    1752             : }
    1753             : 
    1754           0 : void TurboAssembler::Pinsrd(XMMRegister dst, const Operand& src, int8_t imm8) {
    1755             :   DCHECK(imm8 == 0 || imm8 == 1);
    1756           0 :   if (CpuFeatures::IsSupported(SSE4_1)) {
    1757             :     CpuFeatureScope sse_scope(this, SSE4_1);
    1758           0 :     pinsrd(dst, src, imm8);
    1759           0 :     return;
    1760             :   }
    1761           0 :   Movd(kScratchDoubleReg, src);
    1762           0 :   if (imm8 == 1) {
    1763           0 :     punpckldq(dst, kScratchDoubleReg);
    1764             :   } else {
    1765             :     DCHECK_EQ(0, imm8);
    1766           0 :     Movss(dst, kScratchDoubleReg);
    1767             :   }
    1768             : }
    1769             : 
    1770        1695 : void TurboAssembler::Lzcntl(Register dst, Register src) {
    1771        1695 :   if (CpuFeatures::IsSupported(LZCNT)) {
    1772             :     CpuFeatureScope scope(this, LZCNT);
    1773        1693 :     lzcntl(dst, src);
    1774        1695 :     return;
    1775             :   }
    1776             :   Label not_zero_src;
    1777           2 :   bsrl(dst, src);
    1778           2 :   j(not_zero, &not_zero_src, Label::kNear);
    1779           2 :   Set(dst, 63);  // 63^31 == 32
    1780           2 :   bind(&not_zero_src);
    1781           2 :   xorl(dst, Immediate(31));  // for x in [0..31], 31^x == 31 - x
    1782             : }
    1783             : 
    1784         160 : void TurboAssembler::Lzcntl(Register dst, const Operand& src) {
    1785         160 :   if (CpuFeatures::IsSupported(LZCNT)) {
    1786             :     CpuFeatureScope scope(this, LZCNT);
    1787         160 :     lzcntl(dst, src);
    1788         160 :     return;
    1789             :   }
    1790             :   Label not_zero_src;
    1791           0 :   bsrl(dst, src);
    1792           0 :   j(not_zero, &not_zero_src, Label::kNear);
    1793           0 :   Set(dst, 63);  // 63^31 == 32
    1794           0 :   bind(&not_zero_src);
    1795           0 :   xorl(dst, Immediate(31));  // for x in [0..31], 31^x == 31 - x
    1796             : }
    1797             : 
    1798          37 : void TurboAssembler::Lzcntq(Register dst, Register src) {
    1799          37 :   if (CpuFeatures::IsSupported(LZCNT)) {
    1800             :     CpuFeatureScope scope(this, LZCNT);
    1801          37 :     lzcntq(dst, src);
    1802          37 :     return;
    1803             :   }
    1804             :   Label not_zero_src;
    1805           0 :   bsrq(dst, src);
    1806           0 :   j(not_zero, &not_zero_src, Label::kNear);
    1807           0 :   Set(dst, 127);  // 127^63 == 64
    1808           0 :   bind(&not_zero_src);
    1809           0 :   xorl(dst, Immediate(63));  // for x in [0..63], 63^x == 63 - x
    1810             : }
    1811             : 
    1812           0 : void TurboAssembler::Lzcntq(Register dst, const Operand& src) {
    1813           0 :   if (CpuFeatures::IsSupported(LZCNT)) {
    1814             :     CpuFeatureScope scope(this, LZCNT);
    1815           0 :     lzcntq(dst, src);
    1816           0 :     return;
    1817             :   }
    1818             :   Label not_zero_src;
    1819           0 :   bsrq(dst, src);
    1820           0 :   j(not_zero, &not_zero_src, Label::kNear);
    1821           0 :   Set(dst, 127);  // 127^63 == 64
    1822           0 :   bind(&not_zero_src);
    1823           0 :   xorl(dst, Immediate(63));  // for x in [0..63], 63^x == 63 - x
    1824             : }
    1825             : 
    1826          27 : void TurboAssembler::Tzcntq(Register dst, Register src) {
    1827          27 :   if (CpuFeatures::IsSupported(BMI1)) {
    1828             :     CpuFeatureScope scope(this, BMI1);
    1829          27 :     tzcntq(dst, src);
    1830          27 :     return;
    1831             :   }
    1832             :   Label not_zero_src;
    1833           0 :   bsfq(dst, src);
    1834           0 :   j(not_zero, &not_zero_src, Label::kNear);
    1835             :   // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
    1836           0 :   Set(dst, 64);
    1837           0 :   bind(&not_zero_src);
    1838             : }
    1839             : 
    1840           0 : void TurboAssembler::Tzcntq(Register dst, const Operand& src) {
    1841           0 :   if (CpuFeatures::IsSupported(BMI1)) {
    1842             :     CpuFeatureScope scope(this, BMI1);
    1843           0 :     tzcntq(dst, src);
    1844           0 :     return;
    1845             :   }
    1846             :   Label not_zero_src;
    1847           0 :   bsfq(dst, src);
    1848           0 :   j(not_zero, &not_zero_src, Label::kNear);
    1849             :   // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
    1850           0 :   Set(dst, 64);
    1851           0 :   bind(&not_zero_src);
    1852             : }
    1853             : 
    1854         857 : void TurboAssembler::Tzcntl(Register dst, Register src) {
    1855         857 :   if (CpuFeatures::IsSupported(BMI1)) {
    1856             :     CpuFeatureScope scope(this, BMI1);
    1857         857 :     tzcntl(dst, src);
    1858         857 :     return;
    1859             :   }
    1860             :   Label not_zero_src;
    1861           0 :   bsfl(dst, src);
    1862           0 :   j(not_zero, &not_zero_src, Label::kNear);
    1863           0 :   Set(dst, 32);  // The result of tzcnt is 32 if src = 0.
    1864           0 :   bind(&not_zero_src);
    1865             : }
    1866             : 
    1867           0 : void TurboAssembler::Tzcntl(Register dst, const Operand& src) {
    1868           0 :   if (CpuFeatures::IsSupported(BMI1)) {
    1869             :     CpuFeatureScope scope(this, BMI1);
    1870           0 :     tzcntl(dst, src);
    1871           0 :     return;
    1872             :   }
    1873             :   Label not_zero_src;
    1874           0 :   bsfl(dst, src);
    1875           0 :   j(not_zero, &not_zero_src, Label::kNear);
    1876           0 :   Set(dst, 32);  // The result of tzcnt is 32 if src = 0.
    1877           0 :   bind(&not_zero_src);
    1878             : }
    1879             : 
    1880         157 : void TurboAssembler::Popcntl(Register dst, Register src) {
    1881         157 :   if (CpuFeatures::IsSupported(POPCNT)) {
    1882             :     CpuFeatureScope scope(this, POPCNT);
    1883         157 :     popcntl(dst, src);
    1884         157 :     return;
    1885             :   }
    1886           0 :   UNREACHABLE();
    1887             : }
    1888             : 
    1889           0 : void TurboAssembler::Popcntl(Register dst, const Operand& src) {
    1890           0 :   if (CpuFeatures::IsSupported(POPCNT)) {
    1891             :     CpuFeatureScope scope(this, POPCNT);
    1892           0 :     popcntl(dst, src);
    1893           0 :     return;
    1894             :   }
    1895           0 :   UNREACHABLE();
    1896             : }
    1897             : 
    1898          49 : void TurboAssembler::Popcntq(Register dst, Register src) {
    1899          49 :   if (CpuFeatures::IsSupported(POPCNT)) {
    1900             :     CpuFeatureScope scope(this, POPCNT);
    1901          49 :     popcntq(dst, src);
    1902          49 :     return;
    1903             :   }
    1904           0 :   UNREACHABLE();
    1905             : }
    1906             : 
    1907           0 : void TurboAssembler::Popcntq(Register dst, const Operand& src) {
    1908           0 :   if (CpuFeatures::IsSupported(POPCNT)) {
    1909             :     CpuFeatureScope scope(this, POPCNT);
    1910           0 :     popcntq(dst, src);
    1911           0 :     return;
    1912             :   }
    1913           0 :   UNREACHABLE();
    1914             : }
    1915             : 
    1916             : 
    1917           0 : void MacroAssembler::Pushad() {
    1918             :   Push(rax);
    1919             :   Push(rcx);
    1920             :   Push(rdx);
    1921             :   Push(rbx);
    1922             :   // Not pushing rsp or rbp.
    1923             :   Push(rsi);
    1924             :   Push(rdi);
    1925             :   Push(r8);
    1926             :   Push(r9);
    1927             :   // r10 is kScratchRegister.
    1928             :   Push(r11);
    1929             :   Push(r12);
    1930             :   // r13 is kRootRegister.
    1931             :   Push(r14);
    1932             :   Push(r15);
    1933             :   STATIC_ASSERT(12 == kNumSafepointSavedRegisters);
    1934             :   // Use lea for symmetry with Popad.
    1935             :   int sp_delta =
    1936             :       (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
    1937           0 :   leap(rsp, Operand(rsp, -sp_delta));
    1938           0 : }
    1939             : 
    1940             : 
    1941           0 : void MacroAssembler::Popad() {
    1942             :   // Popad must not change the flags, so use lea instead of addq.
    1943             :   int sp_delta =
    1944             :       (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
    1945           0 :   leap(rsp, Operand(rsp, sp_delta));
    1946             :   Pop(r15);
    1947             :   Pop(r14);
    1948             :   Pop(r12);
    1949             :   Pop(r11);
    1950             :   Pop(r9);
    1951             :   Pop(r8);
    1952             :   Pop(rdi);
    1953             :   Pop(rsi);
    1954             :   Pop(rbx);
    1955             :   Pop(rdx);
    1956             :   Pop(rcx);
    1957             :   Pop(rax);
    1958           0 : }
    1959             : 
    1960             : 
    1961             : // Order general registers are pushed by Pushad:
    1962             : // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
    1963             : const int
    1964             : MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
    1965             :     0,
    1966             :     1,
    1967             :     2,
    1968             :     3,
    1969             :     -1,
    1970             :     -1,
    1971             :     4,
    1972             :     5,
    1973             :     6,
    1974             :     7,
    1975             :     -1,
    1976             :     8,
    1977             :     9,
    1978             :     -1,
    1979             :     10,
    1980             :     11
    1981             : };
    1982             : 
    1983          62 : void MacroAssembler::PushStackHandler() {
    1984             :   // Adjust this code if not the case.
    1985             :   STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
    1986             :   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    1987             : 
    1988             :   // Link the current handler as the next handler.
    1989             :   ExternalReference handler_address(IsolateAddressId::kHandlerAddress,
    1990          62 :                                     isolate());
    1991         124 :   Push(ExternalOperand(handler_address));
    1992             : 
    1993             :   // Set this new handler as the current one.
    1994          62 :   movp(ExternalOperand(handler_address), rsp);
    1995          62 : }
    1996             : 
    1997             : 
    1998          62 : void MacroAssembler::PopStackHandler() {
    1999             :   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    2000             :   ExternalReference handler_address(IsolateAddressId::kHandlerAddress,
    2001          62 :                                     isolate());
    2002         124 :   Pop(ExternalOperand(handler_address));
    2003          62 :   addp(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
    2004          62 : }
    2005             : 
    2006         124 : void TurboAssembler::Ret() { ret(0); }
    2007             : 
    2008     1233627 : void TurboAssembler::Ret(int bytes_dropped, Register scratch) {
    2009     1233627 :   if (is_uint16(bytes_dropped)) {
    2010     1233623 :     ret(bytes_dropped);
    2011             :   } else {
    2012             :     PopReturnAddressTo(scratch);
    2013           4 :     addp(rsp, Immediate(bytes_dropped));
    2014             :     PushReturnAddressFrom(scratch);
    2015           4 :     ret(0);
    2016             :   }
    2017     1233812 : }
    2018             : 
    2019         292 : void MacroAssembler::CmpObjectType(Register heap_object,
    2020             :                                    InstanceType type,
    2021             :                                    Register map) {
    2022         292 :   movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    2023         292 :   CmpInstanceType(map, type);
    2024         292 : }
    2025             : 
    2026             : 
    2027        1369 : void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
    2028             :   cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
    2029        4107 :        Immediate(static_cast<int8_t>(type)));
    2030        1369 : }
    2031             : 
    2032       45403 : void TurboAssembler::SlowTruncateToIDelayed(Zone* zone, Register result_reg) {
    2033       45403 :   CallStubDelayed(new (zone) DoubleToIStub(nullptr, result_reg));
    2034       45412 : }
    2035             : 
    2036          31 : void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
    2037             :                                XMMRegister scratch,
    2038             :                                MinusZeroMode minus_zero_mode,
    2039             :                                Label* lost_precision, Label* is_nan,
    2040             :                                Label* minus_zero, Label::Distance dst) {
    2041          31 :   Cvttsd2si(result_reg, input_reg);
    2042          31 :   Cvtlsi2sd(kScratchDoubleReg, result_reg);
    2043          31 :   Ucomisd(kScratchDoubleReg, input_reg);
    2044          31 :   j(not_equal, lost_precision, dst);
    2045          31 :   j(parity_even, is_nan, dst);  // NaN.
    2046          31 :   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
    2047             :     Label done;
    2048             :     // The integer converted back is equal to the original. We
    2049             :     // only have to test if we got -0 as an input.
    2050             :     testl(result_reg, result_reg);
    2051           0 :     j(not_zero, &done, Label::kNear);
    2052           0 :     Movmskpd(result_reg, input_reg);
    2053             :     // Bit 0 contains the sign of the double in input_reg.
    2054             :     // If input was positive, we are ok and return 0, otherwise
    2055             :     // jump to minus_zero.
    2056           0 :     andl(result_reg, Immediate(1));
    2057           0 :     j(not_zero, minus_zero, dst);
    2058           0 :     bind(&done);
    2059             :   }
    2060          31 : }
    2061             : 
    2062             : 
    2063      145103 : void MacroAssembler::LoadInstanceDescriptors(Register map,
    2064             :                                              Register descriptors) {
    2065      145103 :   movp(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
    2066      145103 : }
    2067             : 
    2068      145103 : void MacroAssembler::LoadAccessor(Register dst, Register holder,
    2069             :                                   int accessor_index,
    2070             :                                   AccessorComponent accessor) {
    2071      145103 :   movp(dst, FieldOperand(holder, HeapObject::kMapOffset));
    2072      145103 :   LoadInstanceDescriptors(dst, dst);
    2073             :   movp(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
    2074             :   int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
    2075      145103 :                                            : AccessorPair::kSetterOffset;
    2076             :   movp(dst, FieldOperand(dst, offset));
    2077      145103 : }
    2078             : 
    2079             : 
    2080         316 : void MacroAssembler::AssertNotSmi(Register object) {
    2081         316 :   if (emit_debug_code()) {
    2082             :     Condition is_smi = CheckSmi(object);
    2083           0 :     Check(NegateCondition(is_smi), kOperandIsASmi);
    2084             :   }
    2085         316 : }
    2086             : 
    2087             : 
    2088         805 : void MacroAssembler::AssertSmi(Register object) {
    2089         805 :   if (emit_debug_code()) {
    2090             :     Condition is_smi = CheckSmi(object);
    2091           0 :     Check(is_smi, kOperandIsNotASmi);
    2092             :   }
    2093         805 : }
    2094             : 
    2095             : 
    2096      245683 : void MacroAssembler::AssertSmi(const Operand& object) {
    2097      245683 :   if (emit_debug_code()) {
    2098             :     Condition is_smi = CheckSmi(object);
    2099          10 :     Check(is_smi, kOperandIsNotASmi);
    2100             :   }
    2101      245683 : }
    2102             : 
    2103          62 : void MacroAssembler::AssertFixedArray(Register object) {
    2104          62 :   if (emit_debug_code()) {
    2105           0 :     testb(object, Immediate(kSmiTagMask));
    2106           0 :     Check(not_equal, kOperandIsASmiAndNotAFixedArray);
    2107             :     Push(object);
    2108           0 :     CmpObjectType(object, FIXED_ARRAY_TYPE, object);
    2109             :     Pop(object);
    2110           0 :     Check(equal, kOperandIsNotAFixedArray);
    2111             :   }
    2112          62 : }
    2113             : 
    2114      797259 : void TurboAssembler::AssertZeroExtended(Register int32_register) {
    2115      797259 :   if (emit_debug_code()) {
    2116             :     DCHECK_NE(int32_register, kScratchRegister);
    2117          25 :     movq(kScratchRegister, V8_INT64_C(0x0000000100000000));
    2118          25 :     cmpq(kScratchRegister, int32_register);
    2119          25 :     Check(above_equal, k32BitValueInRegisterIsNotZeroExtended);
    2120             :   }
    2121      797259 : }
    2122             : 
    2123             : 
    2124         248 : void MacroAssembler::AssertFunction(Register object) {
    2125         248 :   if (emit_debug_code()) {
    2126           0 :     testb(object, Immediate(kSmiTagMask));
    2127           0 :     Check(not_equal, kOperandIsASmiAndNotAFunction);
    2128             :     Push(object);
    2129           0 :     CmpObjectType(object, JS_FUNCTION_TYPE, object);
    2130             :     Pop(object);
    2131           0 :     Check(equal, kOperandIsNotAFunction);
    2132             :   }
    2133         248 : }
    2134             : 
    2135             : 
    2136          62 : void MacroAssembler::AssertBoundFunction(Register object) {
    2137          62 :   if (emit_debug_code()) {
    2138           0 :     testb(object, Immediate(kSmiTagMask));
    2139           0 :     Check(not_equal, kOperandIsASmiAndNotABoundFunction);
    2140             :     Push(object);
    2141           0 :     CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
    2142             :     Pop(object);
    2143           0 :     Check(equal, kOperandIsNotABoundFunction);
    2144             :   }
    2145          62 : }
    2146             : 
    2147          31 : void MacroAssembler::AssertGeneratorObject(Register object) {
    2148          62 :   if (!emit_debug_code()) return;
    2149           0 :   testb(object, Immediate(kSmiTagMask));
    2150           0 :   Check(not_equal, kOperandIsASmiAndNotAGeneratorObject);
    2151             : 
    2152             :   // Load map
    2153           0 :   Register map = object;
    2154             :   Push(object);
    2155             :   movp(map, FieldOperand(object, HeapObject::kMapOffset));
    2156             : 
    2157             :   Label do_check;
    2158             :   // Check if JSGeneratorObject
    2159           0 :   CmpInstanceType(map, JS_GENERATOR_OBJECT_TYPE);
    2160           0 :   j(equal, &do_check);
    2161             : 
    2162             :   // Check if JSAsyncGeneratorObject
    2163           0 :   CmpInstanceType(map, JS_ASYNC_GENERATOR_OBJECT_TYPE);
    2164             : 
    2165           0 :   bind(&do_check);
    2166             :   // Restore generator object to register and perform assertion
    2167             :   Pop(object);
    2168           0 :   Check(equal, kOperandIsNotAGeneratorObject);
    2169             : }
    2170             : 
    2171          62 : void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
    2172          62 :   if (emit_debug_code()) {
    2173             :     Label done_checking;
    2174           0 :     AssertNotSmi(object);
    2175           0 :     Cmp(object, isolate()->factory()->undefined_value());
    2176           0 :     j(equal, &done_checking);
    2177           0 :     Cmp(FieldOperand(object, 0), isolate()->factory()->allocation_site_map());
    2178           0 :     Assert(equal, kExpectedUndefinedOrCell);
    2179           0 :     bind(&done_checking);
    2180             :   }
    2181          62 : }
    2182             : 
    2183          44 : void MacroAssembler::GetMapConstructor(Register result, Register map,
    2184             :                                        Register temp) {
    2185             :   Label done, loop;
    2186          44 :   movp(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
    2187          44 :   bind(&loop);
    2188          44 :   JumpIfSmi(result, &done, Label::kNear);
    2189          44 :   CmpObjectType(result, MAP_TYPE, temp);
    2190          44 :   j(not_equal, &done, Label::kNear);
    2191             :   movp(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
    2192          44 :   jmp(&loop);
    2193          44 :   bind(&done);
    2194          44 : }
    2195             : 
    2196        2061 : void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
    2197             :   DCHECK_GT(value, 0);
    2198        2061 :   if (FLAG_native_code_counters && counter->Enabled()) {
    2199           0 :     Operand counter_operand = ExternalOperand(ExternalReference(counter));
    2200           0 :     if (value == 1) {
    2201           0 :       incl(counter_operand);
    2202             :     } else {
    2203           0 :       addl(counter_operand, Immediate(value));
    2204             :     }
    2205             :   }
    2206        2061 : }
    2207             : 
    2208             : 
    2209         829 : void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
    2210             :   DCHECK_GT(value, 0);
    2211         829 :   if (FLAG_native_code_counters && counter->Enabled()) {
    2212           0 :     Operand counter_operand = ExternalOperand(ExternalReference(counter));
    2213           0 :     if (value == 1) {
    2214           0 :       decl(counter_operand);
    2215             :     } else {
    2216           0 :       subl(counter_operand, Immediate(value));
    2217             :     }
    2218             :   }
    2219         829 : }
    2220             : 
    2221          31 : void MacroAssembler::MaybeDropFrames() {
    2222             :   // Check whether we need to drop frames to restart a function on the stack.
    2223             :   ExternalReference restart_fp =
    2224          62 :       ExternalReference::debug_restart_fp_address(isolate());
    2225          31 :   Load(rbx, restart_fp);
    2226          31 :   testp(rbx, rbx);
    2227             :   j(not_zero, BUILTIN_CODE(isolate(), FrameDropperTrampoline),
    2228          31 :     RelocInfo::CODE_TARGET);
    2229          31 : }
    2230             : 
    2231         930 : void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
    2232             :                                         Register caller_args_count_reg,
    2233             :                                         Register scratch0, Register scratch1) {
    2234             : #if DEBUG
    2235             :   if (callee_args_count.is_reg()) {
    2236             :     DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
    2237             :                        scratch1));
    2238             :   } else {
    2239             :     DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
    2240             :   }
    2241             : #endif
    2242             : 
    2243             :   // Calculate the destination address where we will put the return address
    2244             :   // after we drop current frame.
    2245         930 :   Register new_sp_reg = scratch0;
    2246         930 :   if (callee_args_count.is_reg()) {
    2247         930 :     subp(caller_args_count_reg, callee_args_count.reg());
    2248             :     leap(new_sp_reg, Operand(rbp, caller_args_count_reg, times_pointer_size,
    2249        1860 :                              StandardFrameConstants::kCallerPCOffset));
    2250             :   } else {
    2251             :     leap(new_sp_reg, Operand(rbp, caller_args_count_reg, times_pointer_size,
    2252             :                              StandardFrameConstants::kCallerPCOffset -
    2253           0 :                                  callee_args_count.immediate() * kPointerSize));
    2254             :   }
    2255             : 
    2256         930 :   if (FLAG_debug_code) {
    2257           0 :     cmpp(rsp, new_sp_reg);
    2258           0 :     Check(below, kStackAccessBelowStackPointer);
    2259             :   }
    2260             : 
    2261             :   // Copy return address from caller's frame to current frame's return address
    2262             :   // to avoid its trashing and let the following loop copy it to the right
    2263             :   // place.
    2264         930 :   Register tmp_reg = scratch1;
    2265        1860 :   movp(tmp_reg, Operand(rbp, StandardFrameConstants::kCallerPCOffset));
    2266        1860 :   movp(Operand(rsp, 0), tmp_reg);
    2267             : 
    2268             :   // Restore caller's frame pointer now as it could be overwritten by
    2269             :   // the copying loop.
    2270        1860 :   movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
    2271             : 
    2272             :   // +2 here is to copy both receiver and return address.
    2273         930 :   Register count_reg = caller_args_count_reg;
    2274         930 :   if (callee_args_count.is_reg()) {
    2275        1860 :     leap(count_reg, Operand(callee_args_count.reg(), 2));
    2276             :   } else {
    2277           0 :     movp(count_reg, Immediate(callee_args_count.immediate() + 2));
    2278             :     // TODO(ishell): Unroll copying loop for small immediate values.
    2279             :   }
    2280             : 
    2281             :   // Now copy callee arguments to the caller frame going backwards to avoid
    2282             :   // callee arguments corruption (source and destination areas could overlap).
    2283             :   Label loop, entry;
    2284         930 :   jmp(&entry, Label::kNear);
    2285         930 :   bind(&loop);
    2286             :   decp(count_reg);
    2287        1860 :   movp(tmp_reg, Operand(rsp, count_reg, times_pointer_size, 0));
    2288        1860 :   movp(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
    2289         930 :   bind(&entry);
    2290         930 :   cmpp(count_reg, Immediate(0));
    2291         930 :   j(not_equal, &loop, Label::kNear);
    2292             : 
    2293             :   // Leave current frame.
    2294             :   movp(rsp, new_sp_reg);
    2295         930 : }
    2296             : 
    2297         124 : void MacroAssembler::InvokeFunction(Register function, Register new_target,
    2298             :                                     const ParameterCount& actual,
    2299             :                                     InvokeFlag flag) {
    2300         124 :   movp(rbx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
    2301             :   movsxlq(rbx,
    2302         124 :           FieldOperand(rbx, SharedFunctionInfo::kFormalParameterCountOffset));
    2303             : 
    2304             :   ParameterCount expected(rbx);
    2305         124 :   InvokeFunction(function, new_target, expected, actual, flag);
    2306         124 : }
    2307             : 
    2308         155 : void MacroAssembler::InvokeFunction(Register function, Register new_target,
    2309             :                                     const ParameterCount& expected,
    2310             :                                     const ParameterCount& actual,
    2311             :                                     InvokeFlag flag) {
    2312             :   DCHECK(function == rdi);
    2313         155 :   movp(rsi, FieldOperand(function, JSFunction::kContextOffset));
    2314         155 :   InvokeFunctionCode(rdi, new_target, expected, actual, flag);
    2315         155 : }
    2316             : 
    2317         248 : void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
    2318             :                                         const ParameterCount& expected,
    2319             :                                         const ParameterCount& actual,
    2320             :                                         InvokeFlag flag) {
    2321             :   // You can't call a function without a valid frame.
    2322             :   DCHECK(flag == JUMP_FUNCTION || has_frame());
    2323             :   DCHECK(function == rdi);
    2324             :   DCHECK_IMPLIES(new_target.is_valid(), new_target == rdx);
    2325             : 
    2326             :   // On function call, call into the debugger if necessary.
    2327         248 :   CheckDebugHook(function, new_target, expected, actual);
    2328             : 
    2329             :   // Clear the new.target register if not given.
    2330         248 :   if (!new_target.is_valid()) {
    2331         124 :     LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
    2332             :   }
    2333             : 
    2334             :   Label done;
    2335         248 :   bool definitely_mismatches = false;
    2336             :   InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
    2337         248 :                  Label::kNear);
    2338         248 :   if (!definitely_mismatches) {
    2339             :     // We call indirectly through the code field in the function to
    2340             :     // allow recompilation to take effect without changing any of the
    2341             :     // call sites.
    2342         248 :     movp(rcx, FieldOperand(function, JSFunction::kCodeOffset));
    2343         248 :     addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
    2344         248 :     if (flag == CALL_FUNCTION) {
    2345         124 :       call(rcx);
    2346             :     } else {
    2347             :       DCHECK(flag == JUMP_FUNCTION);
    2348         124 :       jmp(rcx);
    2349             :     }
    2350         248 :     bind(&done);
    2351             :   }
    2352         248 : }
    2353             : 
    2354         248 : void MacroAssembler::InvokePrologue(const ParameterCount& expected,
    2355           0 :                                     const ParameterCount& actual, Label* done,
    2356             :                                     bool* definitely_mismatches,
    2357             :                                     InvokeFlag flag,
    2358             :                                     Label::Distance near_jump) {
    2359             :   bool definitely_matches = false;
    2360         248 :   *definitely_mismatches = false;
    2361             :   Label invoke;
    2362         248 :   if (expected.is_immediate()) {
    2363             :     DCHECK(actual.is_immediate());
    2364         217 :     Set(rax, actual.immediate());
    2365           0 :     if (expected.immediate() == actual.immediate()) {
    2366             :       definitely_matches = true;
    2367             :     } else {
    2368           0 :       if (expected.immediate() ==
    2369             :               SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
    2370             :         // Don't worry about adapting arguments for built-ins that
    2371             :         // don't want that done. Skip adaption code by making it look
    2372             :         // like we have a match between expected and actual number of
    2373             :         // arguments.
    2374             :         definitely_matches = true;
    2375             :       } else {
    2376           0 :         *definitely_mismatches = true;
    2377           0 :         Set(rbx, expected.immediate());
    2378             :       }
    2379             :     }
    2380             :   } else {
    2381         248 :     if (actual.is_immediate()) {
    2382             :       // Expected is in register, actual is immediate. This is the
    2383             :       // case when we invoke function values without going through the
    2384             :       // IC mechanism.
    2385           0 :       Set(rax, actual.immediate());
    2386           0 :       cmpp(expected.reg(), Immediate(actual.immediate()));
    2387           0 :       j(equal, &invoke, Label::kNear);
    2388             :       DCHECK(expected.reg() == rbx);
    2389         248 :     } else if (expected.reg() != actual.reg()) {
    2390             :       // Both expected and actual are in (different) registers. This
    2391             :       // is the case when we invoke functions using call and apply.
    2392         217 :       cmpp(expected.reg(), actual.reg());
    2393         217 :       j(equal, &invoke, Label::kNear);
    2394             :       DCHECK(actual.reg() == rax);
    2395             :       DCHECK(expected.reg() == rbx);
    2396             :     } else {
    2397             :       definitely_matches = true;
    2398          31 :       Move(rax, actual.reg());
    2399             :     }
    2400             :   }
    2401             : 
    2402         248 :   if (!definitely_matches) {
    2403         217 :     Handle<Code> adaptor = BUILTIN_CODE(isolate(), ArgumentsAdaptorTrampoline);
    2404         217 :     if (flag == CALL_FUNCTION) {
    2405             :       Call(adaptor, RelocInfo::CODE_TARGET);
    2406         124 :       if (!*definitely_mismatches) {
    2407         124 :         jmp(done, near_jump);
    2408             :       }
    2409             :     } else {
    2410             :       Jump(adaptor, RelocInfo::CODE_TARGET);
    2411             :     }
    2412         217 :     bind(&invoke);
    2413             :   }
    2414         248 : }
    2415             : 
    2416         248 : void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
    2417             :                                     const ParameterCount& expected,
    2418             :                                     const ParameterCount& actual) {
    2419             :   Label skip_hook;
    2420             :   ExternalReference debug_hook_active =
    2421         496 :       ExternalReference::debug_hook_on_function_call_address(isolate());
    2422         248 :   Operand debug_hook_active_operand = ExternalOperand(debug_hook_active);
    2423         248 :   cmpb(debug_hook_active_operand, Immediate(0));
    2424         248 :   j(equal, &skip_hook);
    2425             :   {
    2426             :     FrameScope frame(this,
    2427         248 :                      has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
    2428         248 :     if (expected.is_reg()) {
    2429         248 :       Integer32ToSmi(expected.reg(), expected.reg());
    2430             :       Push(expected.reg());
    2431             :     }
    2432         248 :     if (actual.is_reg()) {
    2433         248 :       Integer32ToSmi(actual.reg(), actual.reg());
    2434             :       Push(actual.reg());
    2435             :     }
    2436         248 :     if (new_target.is_valid()) {
    2437             :       Push(new_target);
    2438             :     }
    2439             :     Push(fun);
    2440             :     Push(fun);
    2441         248 :     CallRuntime(Runtime::kDebugOnFunctionCall);
    2442             :     Pop(fun);
    2443         248 :     if (new_target.is_valid()) {
    2444             :       Pop(new_target);
    2445             :     }
    2446         248 :     if (actual.is_reg()) {
    2447             :       Pop(actual.reg());
    2448         248 :       SmiToInteger64(actual.reg(), actual.reg());
    2449             :     }
    2450         248 :     if (expected.is_reg()) {
    2451             :       Pop(expected.reg());
    2452         248 :       SmiToInteger64(expected.reg(), expected.reg());
    2453         248 :     }
    2454             :   }
    2455         248 :   bind(&skip_hook);
    2456         248 : }
    2457             : 
    2458      226083 : void TurboAssembler::StubPrologue(StackFrame::Type type) {
    2459      226083 :   pushq(rbp);  // Caller's frame pointer.
    2460             :   movp(rbp, rsp);
    2461             :   Push(Immediate(StackFrame::TypeToMarker(type)));
    2462      226075 : }
    2463             : 
    2464      619708 : void TurboAssembler::Prologue() {
    2465      619708 :   pushq(rbp);  // Caller's frame pointer.
    2466             :   movp(rbp, rsp);
    2467             :   Push(rsi);  // Callee's context.
    2468             :   Push(rdi);  // Callee's JS function.
    2469      619715 : }
    2470             : 
    2471      164119 : void TurboAssembler::EnterFrame(StackFrame::Type type) {
    2472      164119 :   pushq(rbp);
    2473             :   movp(rbp, rsp);
    2474             :   Push(Immediate(StackFrame::TypeToMarker(type)));
    2475      164117 :   if (type == StackFrame::INTERNAL) {
    2476             :     Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
    2477             :     Push(kScratchRegister);
    2478             :   }
    2479      164115 :   if (emit_debug_code()) {
    2480             :     Move(kScratchRegister,
    2481             :          isolate()->factory()->undefined_value(),
    2482             :          RelocInfo::EMBEDDED_OBJECT);
    2483           0 :     cmpp(Operand(rsp, 0), kScratchRegister);
    2484           0 :     Check(not_equal, kCodeObjectNotProperlyPatched);
    2485             :   }
    2486      164115 : }
    2487             : 
    2488      177640 : void TurboAssembler::LeaveFrame(StackFrame::Type type) {
    2489      177640 :   if (emit_debug_code()) {
    2490             :     cmpp(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
    2491           0 :          Immediate(StackFrame::TypeToMarker(type)));
    2492           0 :     Check(equal, kStackFrameTypesMustMatch);
    2493             :   }
    2494      177640 :   movp(rsp, rbp);
    2495      177640 :   popq(rbp);
    2496      177640 : }
    2497             : 
    2498           0 : void MacroAssembler::EnterBuiltinFrame(Register context, Register target,
    2499             :                                        Register argc) {
    2500             :   Push(rbp);
    2501           0 :   Move(rbp, rsp);
    2502             :   Push(context);
    2503             :   Push(target);
    2504             :   Push(argc);
    2505           0 : }
    2506             : 
    2507           0 : void MacroAssembler::LeaveBuiltinFrame(Register context, Register target,
    2508             :                                        Register argc) {
    2509             :   Pop(argc);
    2510             :   Pop(target);
    2511             :   Pop(context);
    2512           0 :   leave();
    2513           0 : }
    2514             : 
    2515       17360 : void MacroAssembler::EnterExitFramePrologue(bool save_rax,
    2516             :                                             StackFrame::Type frame_type) {
    2517             :   DCHECK(frame_type == StackFrame::EXIT ||
    2518             :          frame_type == StackFrame::BUILTIN_EXIT);
    2519             : 
    2520             :   // Set up the frame structure on the stack.
    2521             :   // All constants are relative to the frame pointer of the exit frame.
    2522             :   DCHECK_EQ(kFPOnStackSize + kPCOnStackSize,
    2523             :             ExitFrameConstants::kCallerSPDisplacement);
    2524             :   DCHECK_EQ(kFPOnStackSize, ExitFrameConstants::kCallerPCOffset);
    2525             :   DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
    2526       17360 :   pushq(rbp);
    2527             :   movp(rbp, rsp);
    2528             : 
    2529             :   // Reserve room for entry stack pointer and push the code object.
    2530       52080 :   Push(Immediate(StackFrame::TypeToMarker(frame_type)));
    2531             :   DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
    2532             :   Push(Immediate(0));  // Saved entry sp, patched before call.
    2533             :   Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
    2534             :   Push(kScratchRegister);  // Accessed from ExitFrame::code_slot.
    2535             : 
    2536             :   // Save the frame pointer and the context in top.
    2537       17360 :   if (save_rax) {
    2538             :     movp(r14, rax);  // Backup rax in callee-save register.
    2539             :   }
    2540             : 
    2541       17360 :   Store(ExternalReference(IsolateAddressId::kCEntryFPAddress, isolate()), rbp);
    2542       17360 :   Store(ExternalReference(IsolateAddressId::kContextAddress, isolate()), rsi);
    2543       17360 :   Store(ExternalReference(IsolateAddressId::kCFunctionAddress, isolate()), rbx);
    2544       17360 : }
    2545             : 
    2546             : 
    2547       17360 : void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
    2548             :                                             bool save_doubles) {
    2549             : #ifdef _WIN64
    2550             :   const int kShadowSpace = 4;
    2551             :   arg_stack_space += kShadowSpace;
    2552             : #endif
    2553             :   // Optionally save all XMM registers.
    2554       17360 :   if (save_doubles) {
    2555          31 :     int space = XMMRegister::kNumRegisters * kDoubleSize +
    2556          31 :                 arg_stack_space * kRegisterSize;
    2557          31 :     subp(rsp, Immediate(space));
    2558             :     int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
    2559         992 :     const RegisterConfiguration* config = RegisterConfiguration::Default();
    2560         992 :     for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
    2561             :       DoubleRegister reg =
    2562         465 :           DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
    2563         465 :       Movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
    2564             :     }
    2565       17329 :   } else if (arg_stack_space > 0) {
    2566        7022 :     subp(rsp, Immediate(arg_stack_space * kRegisterSize));
    2567             :   }
    2568             : 
    2569             :   // Get the required frame alignment for the OS.
    2570       17360 :   const int kFrameAlignment = base::OS::ActivationFrameAlignment();
    2571       17360 :   if (kFrameAlignment > 0) {
    2572             :     DCHECK(base::bits::IsPowerOfTwo(kFrameAlignment));
    2573             :     DCHECK(is_int8(kFrameAlignment));
    2574       34720 :     andp(rsp, Immediate(-kFrameAlignment));
    2575             :   }
    2576             : 
    2577             :   // Patch the saved entry sp.
    2578       34720 :   movp(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
    2579       17360 : }
    2580             : 
    2581       13787 : void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles,
    2582             :                                     StackFrame::Type frame_type) {
    2583       13787 :   EnterExitFramePrologue(true, frame_type);
    2584             : 
    2585             :   // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
    2586             :   // so it must be retained across the C-call.
    2587             :   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
    2588       27574 :   leap(r15, Operand(rbp, r14, times_pointer_size, offset));
    2589             : 
    2590       13787 :   EnterExitFrameEpilogue(arg_stack_space, save_doubles);
    2591       13787 : }
    2592             : 
    2593             : 
    2594        3573 : void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
    2595        3573 :   EnterExitFramePrologue(false, StackFrame::EXIT);
    2596        3573 :   EnterExitFrameEpilogue(arg_stack_space, false);
    2597        3573 : }
    2598             : 
    2599             : 
    2600       13849 : void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
    2601             :   // Registers:
    2602             :   // r15 : argv
    2603       13849 :   if (save_doubles) {
    2604             :     int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
    2605         992 :     const RegisterConfiguration* config = RegisterConfiguration::Default();
    2606         992 :     for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
    2607             :       DoubleRegister reg =
    2608         465 :           DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
    2609         465 :       Movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
    2610             :     }
    2611             :   }
    2612             : 
    2613       13849 :   if (pop_arguments) {
    2614             :     // Get the return address from the stack and restore the frame pointer.
    2615       27574 :     movp(rcx, Operand(rbp, kFPOnStackSize));
    2616       27574 :     movp(rbp, Operand(rbp, 0 * kPointerSize));
    2617             : 
    2618             :     // Drop everything up to and including the arguments and the receiver
    2619             :     // from the caller stack.
    2620       27574 :     leap(rsp, Operand(r15, 1 * kPointerSize));
    2621             : 
    2622             :     PushReturnAddressFrom(rcx);
    2623             :   } else {
    2624             :     // Otherwise just leave the exit frame.
    2625          62 :     leave();
    2626             :   }
    2627             : 
    2628       13849 :   LeaveExitFrameEpilogue(true);
    2629       13849 : }
    2630             : 
    2631             : 
    2632        3511 : void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
    2633        3511 :   movp(rsp, rbp);
    2634        3511 :   popq(rbp);
    2635             : 
    2636        3511 :   LeaveExitFrameEpilogue(restore_context);
    2637        3511 : }
    2638             : 
    2639             : 
    2640       17360 : void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
    2641             :   // Restore current context from top and clear it in debug mode.
    2642             :   ExternalReference context_address(IsolateAddressId::kContextAddress,
    2643       34720 :                                     isolate());
    2644       17360 :   Operand context_operand = ExternalOperand(context_address);
    2645       17360 :   if (restore_context) {
    2646       13880 :     movp(rsi, context_operand);
    2647             :   }
    2648             : #ifdef DEBUG
    2649             :   movp(context_operand, Immediate(0));
    2650             : #endif
    2651             : 
    2652             :   // Clear the top frame.
    2653             :   ExternalReference c_entry_fp_address(IsolateAddressId::kCEntryFPAddress,
    2654       17360 :                                        isolate());
    2655       17360 :   Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
    2656       17360 :   movp(c_entry_fp_operand, Immediate(0));
    2657       17360 : }
    2658             : 
    2659             : 
    2660             : #ifdef _WIN64
    2661             : static const int kRegisterPassedArguments = 4;
    2662             : #else
    2663             : static const int kRegisterPassedArguments = 6;
    2664             : #endif
    2665             : 
    2666             : 
    2667         248 : void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
    2668         248 :   movp(dst, NativeContextOperand());
    2669         248 :   movp(dst, ContextOperand(dst, index));
    2670         248 : }
    2671             : 
    2672             : 
    2673           0 : int TurboAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
    2674             :   // On Windows 64 stack slots are reserved by the caller for all arguments
    2675             :   // including the ones passed in registers, and space is always allocated for
    2676             :   // the four register arguments even if the function takes fewer than four
    2677             :   // arguments.
    2678             :   // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
    2679             :   // and the caller does not reserve stack slots for them.
    2680             :   DCHECK_GE(num_arguments, 0);
    2681             : #ifdef _WIN64
    2682             :   const int kMinimumStackSlots = kRegisterPassedArguments;
    2683             :   if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
    2684             :   return num_arguments;
    2685             : #else
    2686      899896 :   if (num_arguments < kRegisterPassedArguments) return 0;
    2687       79624 :   return num_arguments - kRegisterPassedArguments;
    2688             : #endif
    2689             : }
    2690             : 
    2691      449948 : void TurboAssembler::PrepareCallCFunction(int num_arguments) {
    2692      449948 :   int frame_alignment = base::OS::ActivationFrameAlignment();
    2693             :   DCHECK_NE(frame_alignment, 0);
    2694             :   DCHECK_GE(num_arguments, 0);
    2695             : 
    2696             :   // Make stack end at alignment and allocate space for arguments and old rsp.
    2697      449948 :   movp(kScratchRegister, rsp);
    2698             :   DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
    2699             :   int argument_slots_on_stack =
    2700             :       ArgumentStackSlotsForCFunctionCall(num_arguments);
    2701      899896 :   subp(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize));
    2702      899896 :   andp(rsp, Immediate(-frame_alignment));
    2703      899896 :   movp(Operand(rsp, argument_slots_on_stack * kRegisterSize), kScratchRegister);
    2704      449948 : }
    2705             : 
    2706      449303 : void TurboAssembler::CallCFunction(ExternalReference function,
    2707             :                                    int num_arguments) {
    2708      449303 :   LoadAddress(rax, function);
    2709      449303 :   CallCFunction(rax, num_arguments);
    2710      449303 : }
    2711             : 
    2712      449948 : void TurboAssembler::CallCFunction(Register function, int num_arguments) {
    2713             :   DCHECK_LE(num_arguments, kMaxCParameters);
    2714             :   DCHECK(has_frame());
    2715             :   // Check stack alignment.
    2716      449948 :   if (emit_debug_code()) {
    2717          31 :     CheckStackAlignment();
    2718             :   }
    2719             : 
    2720      449948 :   call(function);
    2721             :   DCHECK_NE(base::OS::ActivationFrameAlignment(), 0);
    2722             :   DCHECK_GE(num_arguments, 0);
    2723             :   int argument_slots_on_stack =
    2724             :       ArgumentStackSlotsForCFunctionCall(num_arguments);
    2725      899896 :   movp(rsp, Operand(rsp, argument_slots_on_stack * kRegisterSize));
    2726      449948 : }
    2727             : 
    2728             : 
    2729             : #ifdef DEBUG
    2730             : bool AreAliased(Register reg1,
    2731             :                 Register reg2,
    2732             :                 Register reg3,
    2733             :                 Register reg4,
    2734             :                 Register reg5,
    2735             :                 Register reg6,
    2736             :                 Register reg7,
    2737             :                 Register reg8) {
    2738             :   int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
    2739             :       reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
    2740             :       reg7.is_valid() + reg8.is_valid();
    2741             : 
    2742             :   RegList regs = 0;
    2743             :   if (reg1.is_valid()) regs |= reg1.bit();
    2744             :   if (reg2.is_valid()) regs |= reg2.bit();
    2745             :   if (reg3.is_valid()) regs |= reg3.bit();
    2746             :   if (reg4.is_valid()) regs |= reg4.bit();
    2747             :   if (reg5.is_valid()) regs |= reg5.bit();
    2748             :   if (reg6.is_valid()) regs |= reg6.bit();
    2749             :   if (reg7.is_valid()) regs |= reg7.bit();
    2750             :   if (reg8.is_valid()) regs |= reg8.bit();
    2751             :   int n_of_non_aliasing_regs = NumRegs(regs);
    2752             : 
    2753             :   return n_of_valid_regs != n_of_non_aliasing_regs;
    2754             : }
    2755             : #endif
    2756             : 
    2757      642384 : void TurboAssembler::CheckPageFlag(Register object, Register scratch, int mask,
    2758             :                                    Condition cc, Label* condition_met,
    2759             :                                    Label::Distance condition_met_distance) {
    2760             :   DCHECK(cc == zero || cc == not_zero);
    2761      642384 :   if (scratch == object) {
    2762         310 :     andp(scratch, Immediate(~Page::kPageAlignmentMask));
    2763             :   } else {
    2764      642074 :     movp(scratch, Immediate(~Page::kPageAlignmentMask));
    2765      642074 :     andp(scratch, object);
    2766             :   }
    2767      642384 :   if (mask < (1 << kBitsPerByte)) {
    2768             :     testb(Operand(scratch, MemoryChunk::kFlagsOffset),
    2769     1284768 :           Immediate(static_cast<uint8_t>(mask)));
    2770             :   } else {
    2771           0 :     testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
    2772             :   }
    2773      642384 :   j(cc, condition_met, condition_met_distance);
    2774      642384 : }
    2775             : 
    2776             : 
    2777           0 : void MacroAssembler::JumpIfBlack(Register object,
    2778             :                                  Register bitmap_scratch,
    2779             :                                  Register mask_scratch,
    2780             :                                  Label* on_black,
    2781             :                                  Label::Distance on_black_distance) {
    2782             :   DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, rcx));
    2783             : 
    2784           0 :   GetMarkBits(object, bitmap_scratch, mask_scratch);
    2785             : 
    2786             :   DCHECK_EQ(strcmp(Marking::kBlackBitPattern, "11"), 0);
    2787             :   // The mask_scratch register contains a 1 at the position of the first bit
    2788             :   // and a 1 at a position of the second bit. All other positions are zero.
    2789           0 :   movp(rcx, mask_scratch);
    2790           0 :   andp(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
    2791           0 :   cmpp(mask_scratch, rcx);
    2792           0 :   j(equal, on_black, on_black_distance);
    2793           0 : }
    2794             : 
    2795             : 
    2796           0 : void MacroAssembler::GetMarkBits(Register addr_reg,
    2797             :                                  Register bitmap_reg,
    2798             :                                  Register mask_reg) {
    2799             :   DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx));
    2800           0 :   movp(bitmap_reg, addr_reg);
    2801             :   // Sign extended 32 bit immediate.
    2802           0 :   andp(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
    2803             :   movp(rcx, addr_reg);
    2804             :   int shift =
    2805             :       Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
    2806             :   shrl(rcx, Immediate(shift));
    2807             :   andp(rcx,
    2808             :        Immediate((Page::kPageAlignmentMask >> shift) &
    2809           0 :                  ~(Bitmap::kBytesPerCell - 1)));
    2810             : 
    2811           0 :   addp(bitmap_reg, rcx);
    2812             :   movp(rcx, addr_reg);
    2813             :   shrl(rcx, Immediate(kPointerSizeLog2));
    2814           0 :   andp(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
    2815             :   movl(mask_reg, Immediate(3));
    2816             :   shlp_cl(mask_reg);
    2817           0 : }
    2818             : 
    2819             : 
    2820           0 : void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
    2821             :                                  Register mask_scratch, Label* value_is_white,
    2822             :                                  Label::Distance distance) {
    2823             :   DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, rcx));
    2824           0 :   GetMarkBits(value, bitmap_scratch, mask_scratch);
    2825             : 
    2826             :   // If the value is black or grey we don't need to do anything.
    2827             :   DCHECK_EQ(strcmp(Marking::kWhiteBitPattern, "00"), 0);
    2828             :   DCHECK_EQ(strcmp(Marking::kBlackBitPattern, "11"), 0);
    2829             :   DCHECK_EQ(strcmp(Marking::kGreyBitPattern, "10"), 0);
    2830             :   DCHECK_EQ(strcmp(Marking::kImpossibleBitPattern, "01"), 0);
    2831             : 
    2832             :   // Since both black and grey have a 1 in the first position and white does
    2833             :   // not have a 1 there we only need to check one bit.
    2834           0 :   testp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
    2835           0 :   j(zero, value_is_white, distance);
    2836           0 : }
    2837             : 
    2838             : }  // namespace internal
    2839             : }  // namespace v8
    2840             : 
    2841             : #endif  // V8_TARGET_ARCH_X64

Generated by: LCOV version 1.10