LCOV - code coverage report
Current view: top level - src/x64 - macro-assembler-x64.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 919 1225 75.0 %
Date: 2019-03-21 Functions: 167 208 80.3 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #if V8_TARGET_ARCH_X64
       6             : 
       7             : #include "src/base/bits.h"
       8             : #include "src/base/division-by-constant.h"
       9             : #include "src/base/utils/random-number-generator.h"
      10             : #include "src/bootstrapper.h"
      11             : #include "src/callable.h"
      12             : #include "src/code-factory.h"
      13             : #include "src/counters.h"
      14             : #include "src/debug/debug.h"
      15             : #include "src/external-reference-table.h"
      16             : #include "src/frames-inl.h"
      17             : #include "src/globals.h"
      18             : #include "src/heap/heap-inl.h"  // For MemoryChunk.
      19             : #include "src/macro-assembler.h"
      20             : #include "src/objects-inl.h"
      21             : #include "src/objects/smi.h"
      22             : #include "src/register-configuration.h"
      23             : #include "src/snapshot/embedded-data.h"
      24             : #include "src/snapshot/snapshot.h"
      25             : #include "src/string-constants.h"
      26             : #include "src/x64/assembler-x64.h"
      27             : 
      28             : // Satisfy cpplint check, but don't include platform-specific header. It is
      29             : // included recursively via macro-assembler.h.
      30             : #if 0
      31             : #include "src/x64/macro-assembler-x64.h"
      32             : #endif
      33             : 
      34             : namespace v8 {
      35             : namespace internal {
      36             : 
      37        1792 : Operand StackArgumentsAccessor::GetArgumentOperand(int index) {
      38             :   DCHECK_GE(index, 0);
      39        1792 :   int receiver = (receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER) ? 1 : 0;
      40             :   int displacement_to_last_argument =
      41        1792 :       base_reg_ == rsp ? kPCOnStackSize : kFPOnStackSize + kPCOnStackSize;
      42        1792 :   displacement_to_last_argument += extra_displacement_to_last_argument_;
      43        1792 :   if (argument_count_reg_ == no_reg) {
      44             :     // argument[0] is at base_reg_ + displacement_to_last_argument +
      45             :     // (argument_count_immediate_ + receiver - 1) * kSystemPointerSize.
      46             :     DCHECK_GT(argument_count_immediate_ + receiver, 0);
      47             :     return Operand(base_reg_,
      48             :                    displacement_to_last_argument +
      49           0 :                        (argument_count_immediate_ + receiver - 1 - index) *
      50           0 :                            kSystemPointerSize);
      51             :   } else {
      52             :     // argument[0] is at base_reg_ + displacement_to_last_argument +
      53             :     // argument_count_reg_ * times_system_pointer_size + (receiver - 1) *
      54             :     // kSystemPointerSize.
      55             :     return Operand(base_reg_, argument_count_reg_, times_system_pointer_size,
      56             :                    displacement_to_last_argument +
      57        1792 :                        (receiver - 1 - index) * kSystemPointerSize);
      58             :   }
      59             : }
      60             : 
      61           0 : StackArgumentsAccessor::StackArgumentsAccessor(
      62             :     Register base_reg, const ParameterCount& parameter_count,
      63             :     StackArgumentsAccessorReceiverMode receiver_mode,
      64             :     int extra_displacement_to_last_argument)
      65             :     : base_reg_(base_reg),
      66             :       argument_count_reg_(parameter_count.is_reg() ? parameter_count.reg()
      67             :                                                    : no_reg),
      68             :       argument_count_immediate_(
      69             :           parameter_count.is_immediate() ? parameter_count.immediate() : 0),
      70             :       receiver_mode_(receiver_mode),
      71             :       extra_displacement_to_last_argument_(
      72         672 :           extra_displacement_to_last_argument) {}
      73             : 
      74         392 : void MacroAssembler::Load(Register destination, ExternalReference source) {
      75         392 :   if (root_array_available_ && options().enable_root_array_delta_access) {
      76           0 :     intptr_t delta = RootRegisterOffsetForExternalReference(isolate(), source);
      77           0 :     if (is_int32(delta)) {
      78           0 :       movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
      79           0 :       return;
      80             :     }
      81             :   }
      82             :   // Safe code.
      83         392 :   if (destination == rax && !options().isolate_independent_code) {
      84           0 :     load_rax(source);
      85             :   } else {
      86         392 :     movq(destination, ExternalReferenceAsOperand(source));
      87             :   }
      88             : }
      89             : 
      90             : 
      91       46380 : void MacroAssembler::Store(ExternalReference destination, Register source) {
      92       46380 :   if (root_array_available_ && options().enable_root_array_delta_access) {
      93             :     intptr_t delta =
      94           0 :         RootRegisterOffsetForExternalReference(isolate(), destination);
      95           0 :     if (is_int32(delta)) {
      96           0 :       movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
      97           0 :       return;
      98             :     }
      99             :   }
     100             :   // Safe code.
     101       46380 :   if (source == rax && !options().isolate_independent_code) {
     102           0 :     store_rax(destination);
     103             :   } else {
     104       46380 :     movq(ExternalReferenceAsOperand(destination), source);
     105             :   }
     106             : }
     107             : 
     108       39648 : void TurboAssembler::LoadFromConstantsTable(Register destination,
     109             :                                             int constant_index) {
     110             :   DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kBuiltinsConstantsTable));
     111       39648 :   LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
     112             :   LoadTaggedPointerField(
     113             :       destination,
     114             :       FieldOperand(destination, FixedArray::OffsetOfElementAt(constant_index)));
     115       39648 : }
     116             : 
     117       18816 : void TurboAssembler::LoadRootRegisterOffset(Register destination,
     118             :                                             intptr_t offset) {
     119             :   DCHECK(is_int32(offset));
     120       18816 :   if (offset == 0) {
     121             :     Move(destination, kRootRegister);
     122             :   } else {
     123       35840 :     leaq(destination, Operand(kRootRegister, static_cast<int32_t>(offset)));
     124             :   }
     125       18816 : }
     126             : 
     127      807912 : void TurboAssembler::LoadRootRelative(Register destination, int32_t offset) {
     128     1615824 :   movq(destination, Operand(kRootRegister, offset));
     129      807912 : }
     130             : 
     131      950006 : void TurboAssembler::LoadAddress(Register destination,
     132             :                                  ExternalReference source) {
     133      950006 :   if (root_array_available_ && options().enable_root_array_delta_access) {
     134        1205 :     intptr_t delta = RootRegisterOffsetForExternalReference(isolate(), source);
     135        1205 :     if (is_int32(delta)) {
     136           0 :       leaq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
     137           0 :       return;
     138             :     }
     139             :   }
     140             :   // Safe code.
     141             :   if (FLAG_embedded_builtins) {
     142      950006 :     if (root_array_available_ && options().isolate_independent_code) {
     143       45192 :       IndirectLoadExternalReference(destination, source);
     144       45192 :       return;
     145             :     }
     146             :   }
     147      904814 :   Move(destination, source);
     148             : }
     149             : 
     150     1142047 : Operand TurboAssembler::ExternalReferenceAsOperand(ExternalReference reference,
     151             :                                                    Register scratch) {
     152     1142047 :   if (root_array_available_ && options().enable_root_array_delta_access) {
     153             :     int64_t delta =
     154        3695 :         RootRegisterOffsetForExternalReference(isolate(), reference);
     155        3695 :     if (is_int32(delta)) {
     156        3675 :       return Operand(kRootRegister, static_cast<int32_t>(delta));
     157             :     }
     158             :   }
     159     1138372 :   if (root_array_available_ && options().isolate_independent_code) {
     160       90832 :     if (IsAddressableThroughRootRegister(isolate(), reference)) {
     161             :       // Some external references can be efficiently loaded as an offset from
     162             :       // kRootRegister.
     163             :       intptr_t offset =
     164       90216 :           RootRegisterOffsetForExternalReference(isolate(), reference);
     165       90216 :       CHECK(is_int32(offset));
     166       90216 :       return Operand(kRootRegister, static_cast<int32_t>(offset));
     167             :     } else {
     168             :       // Otherwise, do a memory load from the external reference table.
     169        1232 :       movq(scratch, Operand(kRootRegister,
     170             :                             RootRegisterOffsetForExternalReferenceTableEntry(
     171             :                                 isolate(), reference)));
     172         616 :       return Operand(scratch, 0);
     173             :     }
     174             :   }
     175     1047540 :   Move(scratch, reference);
     176     1047539 :   return Operand(scratch, 0);
     177             : }
     178             : 
     179         112 : void MacroAssembler::PushAddress(ExternalReference source) {
     180         112 :   LoadAddress(kScratchRegister, source);
     181             :   Push(kScratchRegister);
     182         112 : }
     183             : 
     184     1962439 : void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
     185             :   DCHECK(root_array_available_);
     186     3924888 :   movq(destination,
     187             :        Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
     188     1962449 : }
     189             : 
     190         672 : void MacroAssembler::PushRoot(RootIndex index) {
     191             :   DCHECK(root_array_available_);
     192        1344 :   Push(Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
     193         672 : }
     194             : 
     195      563174 : void TurboAssembler::CompareRoot(Register with, RootIndex index) {
     196             :   DCHECK(root_array_available_);
     197      563174 :   if (IsInRange(index, RootIndex::kFirstStrongOrReadOnlyRoot,
     198             :                 RootIndex::kLastStrongOrReadOnlyRoot)) {
     199        2016 :     cmp_tagged(with,
     200        1008 :                Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
     201             :   } else {
     202             :     // Some smi roots contain system pointer size values like stack limits.
     203      562166 :     cmpq(with, Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
     204             :   }
     205      563183 : }
     206             : 
     207           0 : void TurboAssembler::CompareRoot(Operand with, RootIndex index) {
     208             :   DCHECK(root_array_available_);
     209             :   DCHECK(!with.AddressUsesRegister(kScratchRegister));
     210           0 :   LoadRoot(kScratchRegister, index);
     211           0 :   if (IsInRange(index, RootIndex::kFirstStrongOrReadOnlyRoot,
     212             :                 RootIndex::kLastStrongOrReadOnlyRoot)) {
     213           0 :     cmp_tagged(with, kScratchRegister);
     214             :   } else {
     215             :     // Some smi roots contain system pointer size values like stack limits.
     216           0 :     cmpq(with, kScratchRegister);
     217             :   }
     218           0 : }
     219             : 
     220      734455 : void TurboAssembler::LoadTaggedPointerField(Register destination,
     221             :                                             Operand field_operand) {
     222             : #ifdef V8_COMPRESS_POINTERS
     223      775951 :   DecompressTaggedPointer(destination, field_operand);
     224             : #else
     225             :   mov_tagged(destination, field_operand);
     226             : #endif
     227      734455 : }
     228             : 
     229         336 : void TurboAssembler::LoadAnyTaggedField(Register destination,
     230             :                                         Operand field_operand,
     231             :                                         Register scratch) {
     232             : #ifdef V8_COMPRESS_POINTERS
     233         336 :   DecompressAnyTagged(destination, field_operand, scratch);
     234             : #else
     235             :   mov_tagged(destination, field_operand);
     236             : #endif
     237         336 : }
     238             : 
     239         112 : void TurboAssembler::PushTaggedPointerField(Operand field_operand,
     240             :                                             Register scratch) {
     241             : #ifdef V8_COMPRESS_POINTERS
     242             :   DCHECK(!field_operand.AddressUsesRegister(scratch));
     243         112 :   DecompressTaggedPointer(scratch, field_operand);
     244             :   Push(scratch);
     245             : #else
     246             :   Push(field_operand);
     247             : #endif
     248         112 : }
     249             : 
     250         112 : void TurboAssembler::PushTaggedAnyField(Operand field_operand,
     251             :                                         Register scratch1, Register scratch2) {
     252             : #ifdef V8_COMPRESS_POINTERS
     253             :   DCHECK(!AreAliased(scratch1, scratch2));
     254             :   DCHECK(!field_operand.AddressUsesRegister(scratch1));
     255             :   DCHECK(!field_operand.AddressUsesRegister(scratch2));
     256         112 :   DecompressAnyTagged(scratch1, field_operand, scratch2);
     257             :   Push(scratch1);
     258             : #else
     259             :   Push(field_operand);
     260             : #endif
     261         112 : }
     262             : 
     263         280 : void TurboAssembler::SmiUntagField(Register dst, Operand src) {
     264         280 :   SmiUntag(dst, src);
     265         280 : }
     266             : 
     267      197938 : void TurboAssembler::StoreTaggedField(Operand dst_field_operand,
     268             :                                       Immediate value) {
     269             : #ifdef V8_COMPRESS_POINTERS
     270      197938 :   RecordComment("[ StoreTagged");
     271      197938 :   movl(dst_field_operand, value);
     272      197938 :   RecordComment("]");
     273             : #else
     274             :   movq(dst_field_operand, value);
     275             : #endif
     276      197938 : }
     277             : 
     278     2618236 : void TurboAssembler::StoreTaggedField(Operand dst_field_operand,
     279             :                                       Register value) {
     280             : #ifdef V8_COMPRESS_POINTERS
     281     2618236 :   RecordComment("[ StoreTagged");
     282     2618238 :   movl(dst_field_operand, value);
     283     2618239 :   RecordComment("]");
     284             : #else
     285             :   movq(dst_field_operand, value);
     286             : #endif
     287     2618242 : }
     288             : 
     289       87022 : void TurboAssembler::DecompressTaggedSigned(Register destination,
     290             :                                             Operand field_operand) {
     291       87022 :   RecordComment("[ DecompressTaggedSigned");
     292       87023 :   movsxlq(destination, field_operand);
     293       87023 :   RecordComment("]");
     294       87023 : }
     295             : 
     296     2017128 : void TurboAssembler::DecompressTaggedPointer(Register destination,
     297             :                                              Operand field_operand) {
     298     2017128 :   RecordComment("[ DecompressTaggedPointer");
     299     2017147 :   movsxlq(destination, field_operand);
     300             :   addq(destination, kRootRegister);
     301     2017143 :   RecordComment("]");
     302     2017142 : }
     303             : 
     304     2933096 : void TurboAssembler::DecompressAnyTagged(Register destination,
     305             :                                          Operand field_operand,
     306             :                                          Register scratch) {
     307             :   DCHECK(!AreAliased(destination, scratch));
     308     2933096 :   RecordComment("[ DecompressAnyTagged");
     309     2933099 :   movsxlq(destination, field_operand);
     310             :   if (kUseBranchlessPtrDecompression) {
     311             :     // Branchlessly compute |masked_root|:
     312             :     // masked_root = HAS_SMI_TAG(destination) ? 0 : kRootRegister;
     313             :     STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag < 32));
     314     2933100 :     Register masked_root = scratch;
     315             :     movl(masked_root, destination);
     316             :     andl(masked_root, Immediate(kSmiTagMask));
     317             :     negq(masked_root);
     318             :     andq(masked_root, kRootRegister);
     319             :     // Now this add operation will either leave the value unchanged if it is
     320             :     // a smi or add the isolate root if it is a heap object.
     321             :     addq(destination, masked_root);
     322             :   } else {
     323             :     Label done;
     324             :     JumpIfSmi(destination, &done);
     325             :     addq(destination, kRootRegister);
     326             :     bind(&done);
     327             :   }
     328     2933097 :   RecordComment("]");
     329     2933100 : }
     330             : 
     331         112 : void MacroAssembler::RecordWriteField(Register object, int offset,
     332             :                                       Register value, Register dst,
     333             :                                       SaveFPRegsMode save_fp,
     334             :                                       RememberedSetAction remembered_set_action,
     335             :                                       SmiCheck smi_check) {
     336             :   // First, check if a write barrier is even needed. The tests below
     337             :   // catch stores of Smis.
     338         112 :   Label done;
     339             : 
     340             :   // Skip barrier if writing a smi.
     341         112 :   if (smi_check == INLINE_SMI_CHECK) {
     342          56 :     JumpIfSmi(value, &done);
     343             :   }
     344             : 
     345             :   // Although the object register is tagged, the offset is relative to the start
     346             :   // of the object, so the offset must be a multiple of kTaggedSize.
     347             :   DCHECK(IsAligned(offset, kTaggedSize));
     348             : 
     349         112 :   leaq(dst, FieldOperand(object, offset));
     350         112 :   if (emit_debug_code()) {
     351           0 :     Label ok;
     352           0 :     testb(dst, Immediate(kTaggedSize - 1));
     353           0 :     j(zero, &ok, Label::kNear);
     354           0 :     int3();
     355           0 :     bind(&ok);
     356             :   }
     357             : 
     358             :   RecordWrite(object, dst, value, save_fp, remembered_set_action,
     359         112 :               OMIT_SMI_CHECK);
     360             : 
     361         112 :   bind(&done);
     362             : 
     363             :   // Clobber clobbered input registers when running with the debug-code flag
     364             :   // turned on to provoke errors.
     365         112 :   if (emit_debug_code()) {
     366             :     Move(value, kZapValue, RelocInfo::NONE);
     367             :     Move(dst, kZapValue, RelocInfo::NONE);
     368             :   }
     369         112 : }
     370             : 
     371      329007 : void TurboAssembler::SaveRegisters(RegList registers) {
     372             :   DCHECK_GT(NumRegs(registers), 0);
     373    10857529 :   for (int i = 0; i < Register::kNumRegisters; ++i) {
     374     5264250 :     if ((registers >> i) & 1u) {
     375     1645086 :       pushq(Register::from_code(i));
     376             :     }
     377             :   }
     378      329018 : }
     379             : 
     380      329011 : void TurboAssembler::RestoreRegisters(RegList registers) {
     381             :   DCHECK_GT(NumRegs(registers), 0);
     382    10857541 :   for (int i = Register::kNumRegisters - 1; i >= 0; --i) {
     383     5264258 :     if ((registers >> i) & 1u) {
     384     1645087 :       popq(Register::from_code(i));
     385             :     }
     386             :   }
     387      329018 : }
     388             : 
     389      328706 : void TurboAssembler::CallRecordWriteStub(
     390             :     Register object, Register address,
     391             :     RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
     392      328706 :   CallRecordWriteStub(
     393             :       object, address, remembered_set_action, fp_mode,
     394             :       isolate()->builtins()->builtin_handle(Builtins::kRecordWrite),
     395      328706 :       kNullAddress);
     396      328706 : }
     397             : 
     398         309 : void TurboAssembler::CallRecordWriteStub(
     399             :     Register object, Register address,
     400             :     RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
     401             :     Address wasm_target) {
     402             :   CallRecordWriteStub(object, address, remembered_set_action, fp_mode,
     403         309 :                       Handle<Code>::null(), wasm_target);
     404         312 : }
     405             : 
     406      329015 : void TurboAssembler::CallRecordWriteStub(
     407             :     Register object, Register address,
     408             :     RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
     409             :     Handle<Code> code_target, Address wasm_target) {
     410             :   DCHECK_NE(code_target.is_null(), wasm_target == kNullAddress);
     411             : 
     412             :   RecordWriteDescriptor descriptor;
     413             :   RegList registers = descriptor.allocatable_registers();
     414             : 
     415      329015 :   SaveRegisters(registers);
     416             : 
     417             :   Register object_parameter(
     418      329018 :       descriptor.GetRegisterParameter(RecordWriteDescriptor::kObject));
     419             :   Register slot_parameter(
     420      329018 :       descriptor.GetRegisterParameter(RecordWriteDescriptor::kSlot));
     421             :   Register remembered_set_parameter(
     422             :       descriptor.GetRegisterParameter(RecordWriteDescriptor::kRememberedSet));
     423             :   Register fp_mode_parameter(
     424             :       descriptor.GetRegisterParameter(RecordWriteDescriptor::kFPMode));
     425             : 
     426             :   // Prepare argument registers for calling RecordWrite
     427             :   // slot_parameter   <= address
     428             :   // object_parameter <= object
     429      329018 :   MovePair(slot_parameter, address, object_parameter, object);
     430             : 
     431             :   Smi smi_rsa = Smi::FromEnum(remembered_set_action);
     432             :   Smi smi_fm = Smi::FromEnum(fp_mode);
     433      329016 :   Move(remembered_set_parameter, smi_rsa);
     434      329016 :   if (smi_rsa != smi_fm) {
     435      285788 :     Move(fp_mode_parameter, smi_fm);
     436             :   } else {
     437       43228 :     movq(fp_mode_parameter, remembered_set_parameter);
     438             :   }
     439      329017 :   if (code_target.is_null()) {
     440             :     // Use {near_call} for direct Wasm call within a module.
     441         311 :     near_call(wasm_target, RelocInfo::WASM_STUB_CALL);
     442             :   } else {
     443      328706 :     Call(code_target, RelocInfo::CODE_TARGET);
     444             :   }
     445             : 
     446      329018 :   RestoreRegisters(registers);
     447      329018 : }
     448             : 
     449         112 : void MacroAssembler::RecordWrite(Register object, Register address,
     450             :                                  Register value, SaveFPRegsMode fp_mode,
     451             :                                  RememberedSetAction remembered_set_action,
     452             :                                  SmiCheck smi_check) {
     453             :   DCHECK(object != value);
     454             :   DCHECK(object != address);
     455             :   DCHECK(value != address);
     456         112 :   AssertNotSmi(object);
     457             : 
     458         168 :   if (remembered_set_action == OMIT_REMEMBERED_SET &&
     459          56 :       !FLAG_incremental_marking) {
     460           0 :     return;
     461             :   }
     462             : 
     463         112 :   if (emit_debug_code()) {
     464           0 :     Label ok;
     465           0 :     cmp_tagged(value, Operand(address, 0));
     466           0 :     j(equal, &ok, Label::kNear);
     467           0 :     int3();
     468           0 :     bind(&ok);
     469             :   }
     470             : 
     471             :   // First, check if a write barrier is even needed. The tests below
     472             :   // catch stores of smis and stores into the young generation.
     473         112 :   Label done;
     474             : 
     475         112 :   if (smi_check == INLINE_SMI_CHECK) {
     476             :     // Skip barrier if writing a smi.
     477           0 :     JumpIfSmi(value, &done);
     478             :   }
     479             : 
     480         112 :   CheckPageFlag(value,
     481             :                 value,  // Used as scratch.
     482             :                 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
     483         112 :                 Label::kNear);
     484             : 
     485             :   CheckPageFlag(object,
     486             :                 value,  // Used as scratch.
     487             :                 MemoryChunk::kPointersFromHereAreInterestingMask,
     488             :                 zero,
     489             :                 &done,
     490         112 :                 Label::kNear);
     491             : 
     492         112 :   CallRecordWriteStub(object, address, remembered_set_action, fp_mode);
     493             : 
     494         112 :   bind(&done);
     495             : 
     496             :   // Count number of write barriers in generated code.
     497         112 :   isolate()->counters()->write_barriers_static()->Increment();
     498         112 :   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
     499             : 
     500             :   // Clobber clobbered registers when running with the debug-code flag
     501             :   // turned on to provoke errors.
     502         112 :   if (emit_debug_code()) {
     503             :     Move(address, kZapValue, RelocInfo::NONE);
     504             :     Move(value, kZapValue, RelocInfo::NONE);
     505             :   }
     506             : }
     507             : 
     508         122 : void TurboAssembler::Assert(Condition cc, AbortReason reason) {
     509         122 :   if (emit_debug_code()) Check(cc, reason);
     510         122 : }
     511             : 
     512      364297 : void TurboAssembler::AssertUnreachable(AbortReason reason) {
     513      364297 :   if (emit_debug_code()) Abort(reason);
     514      364297 : }
     515             : 
     516         202 : void TurboAssembler::Check(Condition cc, AbortReason reason) {
     517         202 :   Label L;
     518         202 :   j(cc, &L, Label::kNear);
     519         202 :   Abort(reason);
     520             :   // Control will not return here.
     521         202 :   bind(&L);
     522         202 : }
     523             : 
     524           0 : void TurboAssembler::CheckStackAlignment() {
     525           0 :   int frame_alignment = base::OS::ActivationFrameAlignment();
     526           0 :   int frame_alignment_mask = frame_alignment - 1;
     527           0 :   if (frame_alignment > kSystemPointerSize) {
     528             :     DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
     529           0 :     Label alignment_as_expected;
     530           0 :     testq(rsp, Immediate(frame_alignment_mask));
     531           0 :     j(zero, &alignment_as_expected, Label::kNear);
     532             :     // Abort if stack is not aligned.
     533           0 :     int3();
     534           0 :     bind(&alignment_as_expected);
     535             :   }
     536           0 : }
     537             : 
     538        4957 : void TurboAssembler::Abort(AbortReason reason) {
     539             : #ifdef DEBUG
     540             :   const char* msg = GetAbortReason(reason);
     541             :   RecordComment("Abort message: ");
     542             :   RecordComment(msg);
     543             : #endif
     544             : 
     545             :   // Avoid emitting call to builtin if requested.
     546        4957 :   if (trap_on_abort()) {
     547           0 :     int3();
     548           0 :     return;
     549             :   }
     550             : 
     551        4957 :   if (should_abort_hard()) {
     552             :     // We don't care if we constructed a frame. Just pretend we did.
     553          10 :     FrameScope assume_frame(this, StackFrame::NONE);
     554          10 :     movl(arg_reg_1, Immediate(static_cast<int>(reason)));
     555          10 :     PrepareCallCFunction(1);
     556          10 :     LoadAddress(rax, ExternalReference::abort_with_reason());
     557          10 :     call(rax);
     558             :     return;
     559             :   }
     560             : 
     561        4947 :   Move(rdx, Smi::FromInt(static_cast<int>(reason)));
     562             : 
     563        4947 :   if (!has_frame()) {
     564             :     // We don't actually want to generate a pile of code for this, so just
     565             :     // claim there is a stack frame, without generating one.
     566          56 :     FrameScope scope(this, StackFrame::NONE);
     567          56 :     Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
     568             :   } else {
     569        4891 :     Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
     570             :   }
     571             :   // Control will not return here.
     572        4947 :   int3();
     573             : }
     574             : 
     575          76 : void TurboAssembler::CallRuntimeWithCEntry(Runtime::FunctionId fid,
     576             :                                            Register centry) {
     577          76 :   const Runtime::Function* f = Runtime::FunctionForId(fid);
     578             :   // TODO(1236192): Most runtime routines don't need the number of
     579             :   // arguments passed in because it is constant. At some point we
     580             :   // should remove this need and make the runtime routine entry code
     581             :   // smarter.
     582          76 :   Set(rax, f->nargs);
     583          76 :   LoadAddress(rbx, ExternalReference::Create(f));
     584             :   DCHECK(!AreAliased(centry, rax, rbx));
     585             :   DCHECK(centry == rcx);
     586          76 :   CallCodeObject(centry);
     587          75 : }
     588             : 
     589        1960 : void MacroAssembler::CallRuntime(const Runtime::Function* f,
     590             :                                  int num_arguments,
     591             :                                  SaveFPRegsMode save_doubles) {
     592             :   // If the expected number of arguments of the runtime function is
     593             :   // constant, we check that the actual number of arguments match the
     594             :   // expectation.
     595        1960 :   CHECK(f->nargs < 0 || f->nargs == num_arguments);
     596             : 
     597             :   // TODO(1236192): Most runtime routines don't need the number of
     598             :   // arguments passed in because it is constant. At some point we
     599             :   // should remove this need and make the runtime routine entry code
     600             :   // smarter.
     601        1960 :   Set(rax, num_arguments);
     602        1960 :   LoadAddress(rbx, ExternalReference::Create(f));
     603             :   Handle<Code> code =
     604        1960 :       CodeFactory::CEntry(isolate(), f->result_size, save_doubles);
     605        1960 :   Call(code, RelocInfo::CODE_TARGET);
     606        1960 : }
     607             : 
     608         840 : void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
     609             :   // ----------- S t a t e -------------
     610             :   //  -- rsp[0]                 : return address
     611             :   //  -- rsp[8]                 : argument num_arguments - 1
     612             :   //  ...
     613             :   //  -- rsp[8 * num_arguments] : argument 0 (receiver)
     614             :   //
     615             :   //  For runtime functions with variable arguments:
     616             :   //  -- rax                    : number of  arguments
     617             :   // -----------------------------------
     618             : 
     619         840 :   const Runtime::Function* function = Runtime::FunctionForId(fid);
     620             :   DCHECK_EQ(1, function->result_size);
     621         840 :   if (function->nargs >= 0) {
     622         840 :     Set(rax, function->nargs);
     623             :   }
     624         840 :   JumpToExternalReference(ExternalReference::Create(fid));
     625         840 : }
     626             : 
     627         840 : void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
     628             :                                              bool builtin_exit_frame) {
     629             :   // Set the entry point and jump to the C entry runtime stub.
     630         840 :   LoadAddress(rbx, ext);
     631             :   Handle<Code> code = CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs,
     632         840 :                                           kArgvOnStack, builtin_exit_frame);
     633         840 :   Jump(code, RelocInfo::CODE_TARGET);
     634         840 : }
     635             : 
     636             : static constexpr Register saved_regs[] = {rax, rcx, rdx, rbx, rbp, rsi,
     637             :                                           rdi, r8,  r9,  r10, r11};
     638             : 
     639             : static constexpr int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
     640             : 
     641         676 : int TurboAssembler::RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
     642             :                                                     Register exclusion1,
     643             :                                                     Register exclusion2,
     644             :                                                     Register exclusion3) const {
     645             :   int bytes = 0;
     646       15548 :   for (int i = 0; i < kNumberOfSavedRegs; i++) {
     647        7436 :     Register reg = saved_regs[i];
     648        7436 :     if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
     649        6760 :       bytes += kSystemPointerSize;
     650             :     }
     651             :   }
     652             : 
     653             :   // R12 to r15 are callee save on all platforms.
     654         676 :   if (fp_mode == kSaveFPRegs) {
     655         340 :     bytes += kDoubleSize * XMMRegister::kNumRegisters;
     656             :   }
     657             : 
     658         676 :   return bytes;
     659             : }
     660             : 
     661         676 : int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
     662             :                                     Register exclusion2, Register exclusion3) {
     663             :   // We don't allow a GC during a store buffer overflow so there is no need to
     664             :   // store the registers in any particular way, but we do have to store and
     665             :   // restore them.
     666             :   int bytes = 0;
     667       15548 :   for (int i = 0; i < kNumberOfSavedRegs; i++) {
     668        7436 :     Register reg = saved_regs[i];
     669        7436 :     if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
     670        6760 :       pushq(reg);
     671        6760 :       bytes += kSystemPointerSize;
     672             :     }
     673             :   }
     674             : 
     675             :   // R12 to r15 are callee save on all platforms.
     676         676 :   if (fp_mode == kSaveFPRegs) {
     677             :     int delta = kDoubleSize * XMMRegister::kNumRegisters;
     678         340 :     subq(rsp, Immediate(delta));
     679       11220 :     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
     680             :       XMMRegister reg = XMMRegister::from_code(i);
     681       10880 :       Movsd(Operand(rsp, i * kDoubleSize), reg);
     682             :     }
     683         340 :     bytes += delta;
     684             :   }
     685             : 
     686         676 :   return bytes;
     687             : }
     688             : 
     689         676 : int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
     690             :                                    Register exclusion2, Register exclusion3) {
     691             :   int bytes = 0;
     692         676 :   if (fp_mode == kSaveFPRegs) {
     693       11220 :     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
     694             :       XMMRegister reg = XMMRegister::from_code(i);
     695       10880 :       Movsd(reg, Operand(rsp, i * kDoubleSize));
     696             :     }
     697             :     int delta = kDoubleSize * XMMRegister::kNumRegisters;
     698         340 :     addq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
     699             :     bytes += delta;
     700             :   }
     701             : 
     702       15548 :   for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
     703        7436 :     Register reg = saved_regs[i];
     704        7436 :     if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
     705        6760 :       popq(reg);
     706        6760 :       bytes += kSystemPointerSize;
     707             :     }
     708             :   }
     709             : 
     710         676 :   return bytes;
     711             : }
     712             : 
     713        9195 : void TurboAssembler::Cvtss2sd(XMMRegister dst, XMMRegister src) {
     714        9195 :   if (CpuFeatures::IsSupported(AVX)) {
     715             :     CpuFeatureScope scope(this, AVX);
     716        9179 :     vcvtss2sd(dst, src, src);
     717             :   } else {
     718          16 :     cvtss2sd(dst, src);
     719             :   }
     720        9195 : }
     721             : 
     722       11349 : void TurboAssembler::Cvtss2sd(XMMRegister dst, Operand src) {
     723       11349 :   if (CpuFeatures::IsSupported(AVX)) {
     724             :     CpuFeatureScope scope(this, AVX);
     725       11349 :     vcvtss2sd(dst, dst, src);
     726             :   } else {
     727           0 :     cvtss2sd(dst, src);
     728             :   }
     729       11349 : }
     730             : 
     731        6164 : void TurboAssembler::Cvtsd2ss(XMMRegister dst, XMMRegister src) {
     732        6164 :   if (CpuFeatures::IsSupported(AVX)) {
     733             :     CpuFeatureScope scope(this, AVX);
     734        6145 :     vcvtsd2ss(dst, src, src);
     735             :   } else {
     736          19 :     cvtsd2ss(dst, src);
     737             :   }
     738        6164 : }
     739             : 
     740       11848 : void TurboAssembler::Cvtsd2ss(XMMRegister dst, Operand src) {
     741       11848 :   if (CpuFeatures::IsSupported(AVX)) {
     742             :     CpuFeatureScope scope(this, AVX);
     743       11848 :     vcvtsd2ss(dst, dst, src);
     744             :   } else {
     745           0 :     cvtsd2ss(dst, src);
     746             :   }
     747       11848 : }
     748             : 
     749      375238 : void TurboAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
     750      375238 :   if (CpuFeatures::IsSupported(AVX)) {
     751             :     CpuFeatureScope scope(this, AVX);
     752      373260 :     vxorpd(dst, dst, dst);
     753             :     vcvtlsi2sd(dst, dst, src);
     754             :   } else {
     755        1978 :     xorpd(dst, dst);
     756        1978 :     cvtlsi2sd(dst, src);
     757             :   }
     758      375249 : }
     759             : 
     760       57684 : void TurboAssembler::Cvtlsi2sd(XMMRegister dst, Operand src) {
     761       57684 :   if (CpuFeatures::IsSupported(AVX)) {
     762             :     CpuFeatureScope scope(this, AVX);
     763       57504 :     vxorpd(dst, dst, dst);
     764             :     vcvtlsi2sd(dst, dst, src);
     765             :   } else {
     766         180 :     xorpd(dst, dst);
     767         180 :     cvtlsi2sd(dst, src);
     768             :   }
     769       57691 : }
     770             : 
     771        1136 : void TurboAssembler::Cvtlsi2ss(XMMRegister dst, Register src) {
     772        1136 :   if (CpuFeatures::IsSupported(AVX)) {
     773             :     CpuFeatureScope scope(this, AVX);
     774        1122 :     vxorps(dst, dst, dst);
     775             :     vcvtlsi2ss(dst, dst, src);
     776             :   } else {
     777          14 :     xorps(dst, dst);
     778          14 :     cvtlsi2ss(dst, src);
     779             :   }
     780        1136 : }
     781             : 
     782           8 : void TurboAssembler::Cvtlsi2ss(XMMRegister dst, Operand src) {
     783           8 :   if (CpuFeatures::IsSupported(AVX)) {
     784             :     CpuFeatureScope scope(this, AVX);
     785           8 :     vxorps(dst, dst, dst);
     786             :     vcvtlsi2ss(dst, dst, src);
     787             :   } else {
     788           0 :     xorps(dst, dst);
     789           0 :     cvtlsi2ss(dst, src);
     790             :   }
     791           8 : }
     792             : 
     793         395 : void TurboAssembler::Cvtqsi2ss(XMMRegister dst, Register src) {
     794         395 :   if (CpuFeatures::IsSupported(AVX)) {
     795             :     CpuFeatureScope scope(this, AVX);
     796         395 :     vxorps(dst, dst, dst);
     797             :     vcvtqsi2ss(dst, dst, src);
     798             :   } else {
     799           0 :     xorps(dst, dst);
     800           0 :     cvtqsi2ss(dst, src);
     801             :   }
     802         395 : }
     803             : 
     804           0 : void TurboAssembler::Cvtqsi2ss(XMMRegister dst, Operand src) {
     805           0 :   if (CpuFeatures::IsSupported(AVX)) {
     806             :     CpuFeatureScope scope(this, AVX);
     807           0 :     vxorps(dst, dst, dst);
     808             :     vcvtqsi2ss(dst, dst, src);
     809             :   } else {
     810           0 :     xorps(dst, dst);
     811           0 :     cvtqsi2ss(dst, src);
     812             :   }
     813           0 : }
     814             : 
     815       20861 : void TurboAssembler::Cvtqsi2sd(XMMRegister dst, Register src) {
     816       20861 :   if (CpuFeatures::IsSupported(AVX)) {
     817             :     CpuFeatureScope scope(this, AVX);
     818       20545 :     vxorpd(dst, dst, dst);
     819             :     vcvtqsi2sd(dst, dst, src);
     820             :   } else {
     821         316 :     xorpd(dst, dst);
     822         316 :     cvtqsi2sd(dst, src);
     823             :   }
     824       20876 : }
     825             : 
     826        2015 : void TurboAssembler::Cvtqsi2sd(XMMRegister dst, Operand src) {
     827        2015 :   if (CpuFeatures::IsSupported(AVX)) {
     828             :     CpuFeatureScope scope(this, AVX);
     829        1981 :     vxorpd(dst, dst, dst);
     830             :     vcvtqsi2sd(dst, dst, src);
     831             :   } else {
     832          34 :     xorpd(dst, dst);
     833          34 :     cvtqsi2sd(dst, src);
     834             :   }
     835        2015 : }
     836             : 
     837          88 : void TurboAssembler::Cvtlui2ss(XMMRegister dst, Register src) {
     838             :   // Zero-extend the 32 bit value to 64 bit.
     839          88 :   movl(kScratchRegister, src);
     840          88 :   Cvtqsi2ss(dst, kScratchRegister);
     841          88 : }
     842             : 
     843           0 : void TurboAssembler::Cvtlui2ss(XMMRegister dst, Operand src) {
     844             :   // Zero-extend the 32 bit value to 64 bit.
     845           0 :   movl(kScratchRegister, src);
     846           0 :   Cvtqsi2ss(dst, kScratchRegister);
     847           0 : }
     848             : 
     849         395 : void TurboAssembler::Cvtlui2sd(XMMRegister dst, Register src) {
     850             :   // Zero-extend the 32 bit value to 64 bit.
     851         395 :   movl(kScratchRegister, src);
     852         395 :   Cvtqsi2sd(dst, kScratchRegister);
     853         395 : }
     854             : 
     855       10806 : void TurboAssembler::Cvtlui2sd(XMMRegister dst, Operand src) {
     856             :   // Zero-extend the 32 bit value to 64 bit.
     857       10806 :   movl(kScratchRegister, src);
     858       10807 :   Cvtqsi2sd(dst, kScratchRegister);
     859       10807 : }
     860             : 
     861          60 : void TurboAssembler::Cvtqui2ss(XMMRegister dst, Register src) {
     862          60 :   Label done;
     863          60 :   Cvtqsi2ss(dst, src);
     864          60 :   testq(src, src);
     865          60 :   j(positive, &done, Label::kNear);
     866             : 
     867             :   // Compute {src/2 | (src&1)} (retain the LSB to avoid rounding errors).
     868          59 :   if (src != kScratchRegister) movq(kScratchRegister, src);
     869             :   shrq(kScratchRegister, Immediate(1));
     870             :   // The LSB is shifted into CF. If it is set, set the LSB in {tmp}.
     871          59 :   Label msb_not_set;
     872          59 :   j(not_carry, &msb_not_set, Label::kNear);
     873             :   orq(kScratchRegister, Immediate(1));
     874          59 :   bind(&msb_not_set);
     875          60 :   Cvtqsi2ss(dst, kScratchRegister);
     876          59 :   addss(dst, dst);
     877          60 :   bind(&done);
     878          60 : }
     879             : 
     880           0 : void TurboAssembler::Cvtqui2ss(XMMRegister dst, Operand src) {
     881           0 :   movq(kScratchRegister, src);
     882           0 :   Cvtqui2ss(dst, kScratchRegister);
     883           0 : }
     884             : 
     885        3739 : void TurboAssembler::Cvtqui2sd(XMMRegister dst, Register src) {
     886        3739 :   Label done;
     887        3739 :   Cvtqsi2sd(dst, src);
     888        3740 :   testq(src, src);
     889        3739 :   j(positive, &done, Label::kNear);
     890             : 
     891             :   // Compute {src/2 | (src&1)} (retain the LSB to avoid rounding errors).
     892        3739 :   if (src != kScratchRegister) movq(kScratchRegister, src);
     893             :   shrq(kScratchRegister, Immediate(1));
     894             :   // The LSB is shifted into CF. If it is set, set the LSB in {tmp}.
     895        3739 :   Label msb_not_set;
     896        3739 :   j(not_carry, &msb_not_set, Label::kNear);
     897             :   orq(kScratchRegister, Immediate(1));
     898        3740 :   bind(&msb_not_set);
     899        3740 :   Cvtqsi2sd(dst, kScratchRegister);
     900        3740 :   addsd(dst, dst);
     901        3740 :   bind(&done);
     902        3740 : }
     903             : 
     904        1232 : void TurboAssembler::Cvtqui2sd(XMMRegister dst, Operand src) {
     905        1232 :   movq(kScratchRegister, src);
     906        1232 :   Cvtqui2sd(dst, kScratchRegister);
     907        1232 : }
     908             : 
     909         460 : void TurboAssembler::Cvttss2si(Register dst, XMMRegister src) {
     910         460 :   if (CpuFeatures::IsSupported(AVX)) {
     911             :     CpuFeatureScope scope(this, AVX);
     912         460 :     vcvttss2si(dst, src);
     913             :   } else {
     914           0 :     cvttss2si(dst, src);
     915             :   }
     916         460 : }
     917             : 
     918           0 : void TurboAssembler::Cvttss2si(Register dst, Operand src) {
     919           0 :   if (CpuFeatures::IsSupported(AVX)) {
     920             :     CpuFeatureScope scope(this, AVX);
     921           0 :     vcvttss2si(dst, src);
     922             :   } else {
     923           0 :     cvttss2si(dst, src);
     924             :   }
     925           0 : }
     926             : 
     927      108230 : void TurboAssembler::Cvttsd2si(Register dst, XMMRegister src) {
     928      108230 :   if (CpuFeatures::IsSupported(AVX)) {
     929             :     CpuFeatureScope scope(this, AVX);
     930      107716 :     vcvttsd2si(dst, src);
     931             :   } else {
     932         514 :     cvttsd2si(dst, src);
     933             :   }
     934      108230 : }
     935             : 
     936       20330 : void TurboAssembler::Cvttsd2si(Register dst, Operand src) {
     937       20330 :   if (CpuFeatures::IsSupported(AVX)) {
     938             :     CpuFeatureScope scope(this, AVX);
     939       20330 :     vcvttsd2si(dst, src);
     940             :   } else {
     941           0 :     cvttsd2si(dst, src);
     942             :   }
     943       20330 : }
     944             : 
     945         364 : void TurboAssembler::Cvttss2siq(Register dst, XMMRegister src) {
     946         364 :   if (CpuFeatures::IsSupported(AVX)) {
     947             :     CpuFeatureScope scope(this, AVX);
     948         364 :     vcvttss2siq(dst, src);
     949             :   } else {
     950           0 :     cvttss2siq(dst, src);
     951             :   }
     952         364 : }
     953             : 
     954           0 : void TurboAssembler::Cvttss2siq(Register dst, Operand src) {
     955           0 :   if (CpuFeatures::IsSupported(AVX)) {
     956             :     CpuFeatureScope scope(this, AVX);
     957           0 :     vcvttss2siq(dst, src);
     958             :   } else {
     959           0 :     cvttss2siq(dst, src);
     960             :   }
     961           0 : }
     962             : 
     963       63984 : void TurboAssembler::Cvttsd2siq(Register dst, XMMRegister src) {
     964       63984 :   if (CpuFeatures::IsSupported(AVX)) {
     965             :     CpuFeatureScope scope(this, AVX);
     966       63573 :     vcvttsd2siq(dst, src);
     967             :   } else {
     968         411 :     cvttsd2siq(dst, src);
     969             :   }
     970       63994 : }
     971             : 
     972           1 : void TurboAssembler::Cvttsd2siq(Register dst, Operand src) {
     973           1 :   if (CpuFeatures::IsSupported(AVX)) {
     974             :     CpuFeatureScope scope(this, AVX);
     975           1 :     vcvttsd2siq(dst, src);
     976             :   } else {
     977           0 :     cvttsd2siq(dst, src);
     978             :   }
     979           1 : }
     980             : 
     981             : namespace {
     982             : template <typename OperandOrXMMRegister, bool is_double>
     983        3104 : void ConvertFloatToUint64(TurboAssembler* tasm, Register dst,
     984             :                           OperandOrXMMRegister src, Label* fail) {
     985        3104 :   Label success;
     986             :   // There does not exist a native float-to-uint instruction, so we have to use
     987             :   // a float-to-int, and postprocess the result.
     988             :   if (is_double) {
     989        3016 :     tasm->Cvttsd2siq(dst, src);
     990             :   } else {
     991          88 :     tasm->Cvttss2siq(dst, src);
     992             :   }
     993             :   // If the result of the conversion is positive, we are already done.
     994        3104 :   tasm->testq(dst, dst);
     995        3104 :   tasm->j(positive, &success);
     996             :   // The result of the first conversion was negative, which means that the
     997             :   // input value was not within the positive int64 range. We subtract 2^63
     998             :   // and convert it again to see if it is within the uint64 range.
     999             :   if (is_double) {
    1000             :     tasm->Move(kScratchDoubleReg, -9223372036854775808.0);
    1001        3016 :     tasm->addsd(kScratchDoubleReg, src);
    1002        3016 :     tasm->Cvttsd2siq(dst, kScratchDoubleReg);
    1003             :   } else {
    1004             :     tasm->Move(kScratchDoubleReg, -9223372036854775808.0f);
    1005          88 :     tasm->addss(kScratchDoubleReg, src);
    1006          88 :     tasm->Cvttss2siq(dst, kScratchDoubleReg);
    1007             :   }
    1008             :   tasm->testq(dst, dst);
    1009             :   // The only possible negative value here is 0x80000000000000000, which is
    1010             :   // used on x64 to indicate an integer overflow.
    1011        3104 :   tasm->j(negative, fail ? fail : &success);
    1012             :   // The input value is within uint64 range and the second conversion worked
    1013             :   // successfully, but we still have to undo the subtraction we did
    1014             :   // earlier.
    1015        3104 :   tasm->Set(kScratchRegister, 0x8000000000000000);
    1016             :   tasm->orq(dst, kScratchRegister);
    1017        3104 :   tasm->bind(&success);
    1018        3104 : }
    1019             : }  // namespace
    1020             : 
    1021           0 : void TurboAssembler::Cvttsd2uiq(Register dst, Operand src, Label* success) {
    1022           0 :   ConvertFloatToUint64<Operand, true>(this, dst, src, success);
    1023           0 : }
    1024             : 
    1025        3016 : void TurboAssembler::Cvttsd2uiq(Register dst, XMMRegister src, Label* success) {
    1026        3016 :   ConvertFloatToUint64<XMMRegister, true>(this, dst, src, success);
    1027        3016 : }
    1028             : 
    1029           0 : void TurboAssembler::Cvttss2uiq(Register dst, Operand src, Label* success) {
    1030           0 :   ConvertFloatToUint64<Operand, false>(this, dst, src, success);
    1031           0 : }
    1032             : 
    1033          88 : void TurboAssembler::Cvttss2uiq(Register dst, XMMRegister src, Label* success) {
    1034          88 :   ConvertFloatToUint64<XMMRegister, false>(this, dst, src, success);
    1035          88 : }
    1036             : 
    1037     2182146 : void TurboAssembler::Set(Register dst, int64_t x) {
    1038     2182146 :   if (x == 0) {
    1039      849358 :     xorl(dst, dst);
    1040     1332788 :   } else if (is_uint32(x)) {
    1041      885023 :     movl(dst, Immediate(static_cast<uint32_t>(x)));
    1042      447765 :   } else if (is_int32(x)) {
    1043      196400 :     movq(dst, Immediate(static_cast<int32_t>(x)));
    1044             :   } else {
    1045      251365 :     movq(dst, x);
    1046             :   }
    1047     2182182 : }
    1048             : 
    1049       12727 : void TurboAssembler::Set(Operand dst, intptr_t x) {
    1050       12727 :   if (is_int32(x)) {
    1051       12727 :     movq(dst, Immediate(static_cast<int32_t>(x)));
    1052             :   } else {
    1053           0 :     Set(kScratchRegister, x);
    1054           0 :     movq(dst, kScratchRegister);
    1055             :   }
    1056       12727 : }
    1057             : 
    1058             : 
    1059             : // ----------------------------------------------------------------------------
    1060             : // Smi tagging, untagging and tag detection.
    1061             : 
    1062         168 : Register TurboAssembler::GetSmiConstant(Smi source) {
    1063             :   STATIC_ASSERT(kSmiTag == 0);
    1064             :   int value = source->value();
    1065         168 :   if (value == 0) {
    1066           0 :     xorl(kScratchRegister, kScratchRegister);
    1067           0 :     return kScratchRegister;
    1068             :   }
    1069         168 :   Move(kScratchRegister, source);
    1070         168 :   return kScratchRegister;
    1071             : }
    1072             : 
    1073     2450952 : void TurboAssembler::Move(Register dst, Smi source) {
    1074             :   STATIC_ASSERT(kSmiTag == 0);
    1075             :   int value = source->value();
    1076     2450952 :   if (value == 0) {
    1077      785228 :     xorl(dst, dst);
    1078             :   } else {
    1079             :     Move(dst, source.ptr(), RelocInfo::NONE);
    1080             :   }
    1081     2450947 : }
    1082             : 
    1083     6199577 : void TurboAssembler::Move(Register dst, ExternalReference ext) {
    1084             :   if (FLAG_embedded_builtins) {
    1085     6199577 :     if (root_array_available_ && options().isolate_independent_code) {
    1086      773360 :       IndirectLoadExternalReference(dst, ext);
    1087      773360 :       return;
    1088             :     }
    1089             :   }
    1090     5426217 :   movq(dst, Immediate64(ext.address(), RelocInfo::EXTERNAL_REFERENCE));
    1091             : }
    1092             : 
    1093        1246 : void MacroAssembler::SmiTag(Register dst, Register src) {
    1094             :   STATIC_ASSERT(kSmiTag == 0);
    1095        1246 :   if (dst != src) {
    1096         305 :     movq(dst, src);
    1097             :   }
    1098             :   DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
    1099        1246 :   shlq(dst, Immediate(kSmiShift));
    1100        1246 : }
    1101             : 
    1102        1568 : void TurboAssembler::SmiUntag(Register dst, Register src) {
    1103             :   STATIC_ASSERT(kSmiTag == 0);
    1104        1568 :   if (dst != src) {
    1105           0 :     movq(dst, src);
    1106             :   }
    1107             :   DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
    1108        1568 :   sarq(dst, Immediate(kSmiShift));
    1109        1568 : }
    1110             : 
    1111        1680 : void TurboAssembler::SmiUntag(Register dst, Operand src) {
    1112             :   if (SmiValuesAre32Bits()) {
    1113             :     movl(dst, Operand(src, kSmiShift / kBitsPerByte));
    1114             :     // Sign extend to 64-bit.
    1115             :     movsxlq(dst, dst);
    1116             :   } else {
    1117             :     DCHECK(SmiValuesAre31Bits());
    1118             : #ifdef V8_COMPRESS_POINTERS
    1119        1680 :     movsxlq(dst, src);
    1120             : #else
    1121             :     movq(dst, src);
    1122             : #endif
    1123             :     sarq(dst, Immediate(kSmiShift));
    1124             :   }
    1125        1680 : }
    1126             : 
    1127         165 : void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
    1128         165 :   AssertSmi(smi1);
    1129         165 :   AssertSmi(smi2);
    1130         165 :   cmp_tagged(smi1, smi2);
    1131         165 : }
    1132             : 
    1133         224 : void MacroAssembler::SmiCompare(Register dst, Smi src) {
    1134         224 :   AssertSmi(dst);
    1135         224 :   Cmp(dst, src);
    1136         224 : }
    1137             : 
    1138         224 : void MacroAssembler::Cmp(Register dst, Smi src) {
    1139             :   DCHECK_NE(dst, kScratchRegister);
    1140         224 :   if (src->value() == 0) {
    1141          56 :     test_tagged(dst, dst);
    1142             :   } else {
    1143         168 :     Register constant_reg = GetSmiConstant(src);
    1144         168 :     cmp_tagged(dst, constant_reg);
    1145             :   }
    1146         224 : }
    1147             : 
    1148           0 : void MacroAssembler::SmiCompare(Register dst, Operand src) {
    1149           0 :   AssertSmi(dst);
    1150           0 :   AssertSmi(src);
    1151           0 :   cmp_tagged(dst, src);
    1152           0 : }
    1153             : 
    1154           0 : void MacroAssembler::SmiCompare(Operand dst, Register src) {
    1155           0 :   AssertSmi(dst);
    1156           0 :   AssertSmi(src);
    1157           0 :   cmp_tagged(dst, src);
    1158           0 : }
    1159             : 
    1160           0 : void MacroAssembler::SmiCompare(Operand dst, Smi src) {
    1161           0 :   AssertSmi(dst);
    1162             :   if (SmiValuesAre32Bits()) {
    1163             :     cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
    1164             :   } else {
    1165             :     DCHECK(SmiValuesAre31Bits());
    1166           0 :     cmpl(dst, Immediate(src));
    1167             :   }
    1168           0 : }
    1169             : 
    1170           0 : void MacroAssembler::Cmp(Operand dst, Smi src) {
    1171             :   // The Operand cannot use the smi register.
    1172           0 :   Register smi_reg = GetSmiConstant(src);
    1173             :   DCHECK(!dst.AddressUsesRegister(smi_reg));
    1174           0 :   cmp_tagged(dst, smi_reg);
    1175           0 : }
    1176             : 
    1177             : 
    1178          40 : Condition TurboAssembler::CheckSmi(Register src) {
    1179             :   STATIC_ASSERT(kSmiTag == 0);
    1180      304883 :   testb(src, Immediate(kSmiTagMask));
    1181          40 :   return zero;
    1182             : }
    1183             : 
    1184           0 : Condition TurboAssembler::CheckSmi(Operand src) {
    1185             :   STATIC_ASSERT(kSmiTag == 0);
    1186           0 :   testb(src, Immediate(kSmiTagMask));
    1187           0 :   return zero;
    1188             : }
    1189             : 
    1190      304787 : void TurboAssembler::JumpIfSmi(Register src, Label* on_smi,
    1191             :                                Label::Distance near_jump) {
    1192             :   Condition smi = CheckSmi(src);
    1193      304787 :   j(smi, on_smi, near_jump);
    1194      304787 : }
    1195             : 
    1196          56 : void MacroAssembler::JumpIfNotSmi(Register src,
    1197             :                                   Label* on_not_smi,
    1198             :                                   Label::Distance near_jump) {
    1199             :   Condition smi = CheckSmi(src);
    1200          56 :   j(NegateCondition(smi), on_not_smi, near_jump);
    1201          56 : }
    1202             : 
    1203           0 : void MacroAssembler::JumpIfNotSmi(Operand src, Label* on_not_smi,
    1204             :                                   Label::Distance near_jump) {
    1205             :   Condition smi = CheckSmi(src);
    1206           0 :   j(NegateCondition(smi), on_not_smi, near_jump);
    1207           0 : }
    1208             : 
    1209           0 : void MacroAssembler::SmiAddConstant(Operand dst, Smi constant) {
    1210           0 :   if (constant->value() != 0) {
    1211             :     if (SmiValuesAre32Bits()) {
    1212             :       addl(Operand(dst, kSmiShift / kBitsPerByte),
    1213             :            Immediate(constant->value()));
    1214             :     } else {
    1215             :       DCHECK(SmiValuesAre31Bits());
    1216             :       if (kTaggedSize == kInt64Size) {
    1217             :         // Sign-extend value after addition
    1218             :         movl(kScratchRegister, dst);
    1219             :         addl(kScratchRegister, Immediate(constant));
    1220             :         movsxlq(kScratchRegister, kScratchRegister);
    1221             :         movq(dst, kScratchRegister);
    1222             :       } else {
    1223             :         DCHECK_EQ(kTaggedSize, kInt32Size);
    1224           0 :         addl(dst, Immediate(constant));
    1225             :       }
    1226             :     }
    1227             :   }
    1228           0 : }
    1229             : 
    1230         568 : SmiIndex MacroAssembler::SmiToIndex(Register dst,
    1231             :                                     Register src,
    1232             :                                     int shift) {
    1233             :   if (SmiValuesAre32Bits()) {
    1234             :     DCHECK(is_uint6(shift));
    1235             :     // There is a possible optimization if shift is in the range 60-63, but that
    1236             :     // will (and must) never happen.
    1237             :     if (dst != src) {
    1238             :       movq(dst, src);
    1239             :     }
    1240             :     if (shift < kSmiShift) {
    1241             :       sarq(dst, Immediate(kSmiShift - shift));
    1242             :     } else {
    1243             :       shlq(dst, Immediate(shift - kSmiShift));
    1244             :     }
    1245             :     return SmiIndex(dst, times_1);
    1246             :   } else {
    1247             :     DCHECK(SmiValuesAre31Bits());
    1248         568 :     if (dst != src) {
    1249         200 :       mov_tagged(dst, src);
    1250             :     }
    1251             :     // We have to sign extend the index register to 64-bit as the SMI might
    1252             :     // be negative.
    1253         568 :     movsxlq(dst, dst);
    1254         568 :     if (shift < kSmiShift) {
    1255          50 :       sarq(dst, Immediate(kSmiShift - shift));
    1256         518 :     } else if (shift != kSmiShift) {
    1257         468 :       if (shift - kSmiShift <= static_cast<int>(times_8)) {
    1258         318 :         return SmiIndex(dst, static_cast<ScaleFactor>(shift - kSmiShift));
    1259             :       }
    1260         150 :       shlq(dst, Immediate(shift - kSmiShift));
    1261             :     }
    1262         250 :     return SmiIndex(dst, times_1);
    1263             :   }
    1264             : }
    1265             : 
    1266          56 : void TurboAssembler::Push(Smi source) {
    1267          56 :   intptr_t smi = static_cast<intptr_t>(source.ptr());
    1268          56 :   if (is_int32(smi)) {
    1269          56 :     Push(Immediate(static_cast<int32_t>(smi)));
    1270          56 :     return;
    1271             :   }
    1272           0 :   int first_byte_set = base::bits::CountTrailingZeros64(smi) / 8;
    1273           0 :   int last_byte_set = (63 - base::bits::CountLeadingZeros64(smi)) / 8;
    1274           0 :   if (first_byte_set == last_byte_set) {
    1275             :     // This sequence has only 7 bytes, compared to the 12 bytes below.
    1276             :     Push(Immediate(0));
    1277           0 :     movb(Operand(rsp, first_byte_set),
    1278           0 :          Immediate(static_cast<int8_t>(smi >> (8 * first_byte_set))));
    1279           0 :     return;
    1280             :   }
    1281           0 :   Register constant = GetSmiConstant(source);
    1282             :   Push(constant);
    1283             : }
    1284             : 
    1285             : // ----------------------------------------------------------------------------
    1286             : 
    1287         728 : void TurboAssembler::Move(Register dst, Register src) {
    1288     1065189 :   if (dst != src) {
    1289      647568 :     movq(dst, src);
    1290             :   }
    1291         728 : }
    1292             : 
    1293      329016 : void TurboAssembler::MovePair(Register dst0, Register src0, Register dst1,
    1294             :                               Register src1) {
    1295      329016 :   if (dst0 != src1) {
    1296             :     // Normal case: Writing to dst0 does not destroy src1.
    1297             :     Move(dst0, src0);
    1298             :     Move(dst1, src1);
    1299        1837 :   } else if (dst1 != src0) {
    1300             :     // Only dst0 and src1 are the same register,
    1301             :     // but writing to dst1 does not destroy src0.
    1302             :     Move(dst1, src1);
    1303             :     Move(dst0, src0);
    1304             :   } else {
    1305             :     // dst0 == src1, and dst1 == src0, a swap is required:
    1306             :     // dst0 \/ src0
    1307             :     // dst1 /\ src1
    1308          31 :     xchgq(dst0, dst1);
    1309             :   }
    1310      329016 : }
    1311             : 
    1312     1790694 : void TurboAssembler::MoveNumber(Register dst, double value) {
    1313             :   int32_t smi;
    1314     1790694 :   if (DoubleToSmiInteger(value, &smi)) {
    1315     1745470 :     Move(dst, Smi::FromInt(smi));
    1316             :   } else {
    1317       45224 :     movq_heap_number(dst, value);
    1318             :   }
    1319     1790688 : }
    1320             : 
    1321      131259 : void TurboAssembler::Move(XMMRegister dst, uint32_t src) {
    1322      131259 :   if (src == 0) {
    1323             :     Xorps(dst, dst);
    1324             :   } else {
    1325             :     unsigned nlz = base::bits::CountLeadingZeros(src);
    1326             :     unsigned ntz = base::bits::CountTrailingZeros(src);
    1327             :     unsigned pop = base::bits::CountPopulation(src);
    1328             :     DCHECK_NE(0u, pop);
    1329      121674 :     if (pop + ntz + nlz == 32) {
    1330             :       Pcmpeqd(dst, dst);
    1331       54202 :       if (ntz) Pslld(dst, static_cast<byte>(ntz + nlz));
    1332       54204 :       if (nlz) Psrld(dst, static_cast<byte>(nlz));
    1333             :     } else {
    1334       67471 :       movl(kScratchRegister, Immediate(src));
    1335             :       Movd(dst, kScratchRegister);
    1336             :     }
    1337             :   }
    1338      131260 : }
    1339             : 
    1340      427572 : void TurboAssembler::Move(XMMRegister dst, uint64_t src) {
    1341      427572 :   if (src == 0) {
    1342             :     Xorpd(dst, dst);
    1343             :   } else {
    1344             :     unsigned nlz = base::bits::CountLeadingZeros(src);
    1345             :     unsigned ntz = base::bits::CountTrailingZeros(src);
    1346             :     unsigned pop = base::bits::CountPopulation(src);
    1347             :     DCHECK_NE(0u, pop);
    1348      323584 :     if (pop + ntz + nlz == 64) {
    1349             :       Pcmpeqd(dst, dst);
    1350      215229 :       if (ntz) Psllq(dst, static_cast<byte>(ntz + nlz));
    1351      215234 :       if (nlz) Psrlq(dst, static_cast<byte>(nlz));
    1352             :     } else {
    1353      108356 :       uint32_t lower = static_cast<uint32_t>(src);
    1354      108356 :       uint32_t upper = static_cast<uint32_t>(src >> 32);
    1355      108356 :       if (upper == 0) {
    1356          31 :         Move(dst, lower);
    1357             :       } else {
    1358      108325 :         movq(kScratchRegister, src);
    1359             :         Movq(dst, kScratchRegister);
    1360             :       }
    1361             :     }
    1362             :   }
    1363      427586 : }
    1364             : 
    1365             : // ----------------------------------------------------------------------------
    1366             : 
    1367           5 : void MacroAssembler::Absps(XMMRegister dst) {
    1368           5 :   Andps(dst, ExternalReferenceAsOperand(
    1369             :                  ExternalReference::address_of_float_abs_constant()));
    1370           5 : }
    1371             : 
    1372           5 : void MacroAssembler::Negps(XMMRegister dst) {
    1373           5 :   Xorps(dst, ExternalReferenceAsOperand(
    1374             :                  ExternalReference::address_of_float_neg_constant()));
    1375           5 : }
    1376             : 
    1377           5 : void MacroAssembler::Abspd(XMMRegister dst) {
    1378           5 :   Andps(dst, ExternalReferenceAsOperand(
    1379             :                  ExternalReference::address_of_double_abs_constant()));
    1380           5 : }
    1381             : 
    1382           5 : void MacroAssembler::Negpd(XMMRegister dst) {
    1383           5 :   Xorps(dst, ExternalReferenceAsOperand(
    1384             :                  ExternalReference::address_of_double_neg_constant()));
    1385           5 : }
    1386             : 
    1387           0 : void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
    1388             :   AllowDeferredHandleDereference smi_check;
    1389           0 :   if (source->IsSmi()) {
    1390           0 :     Cmp(dst, Smi::cast(*source));
    1391             :   } else {
    1392           0 :     Move(kScratchRegister, Handle<HeapObject>::cast(source));
    1393           0 :     cmp_tagged(dst, kScratchRegister);
    1394             :   }
    1395           0 : }
    1396             : 
    1397         112 : void MacroAssembler::Cmp(Operand dst, Handle<Object> source) {
    1398             :   AllowDeferredHandleDereference smi_check;
    1399         112 :   if (source->IsSmi()) {
    1400             :     Cmp(dst, Smi::cast(*source));
    1401             :   } else {
    1402         112 :     Move(kScratchRegister, Handle<HeapObject>::cast(source));
    1403         112 :     cmp_tagged(dst, kScratchRegister);
    1404             :   }
    1405         112 : }
    1406             : 
    1407          56 : void MacroAssembler::JumpIfIsInRange(Register value, unsigned lower_limit,
    1408             :                                      unsigned higher_limit, Label* on_in_range,
    1409             :                                      Label::Distance near_jump) {
    1410          56 :   if (lower_limit != 0) {
    1411         112 :     leal(kScratchRegister, Operand(value, 0u - lower_limit));
    1412          56 :     cmpl(kScratchRegister, Immediate(higher_limit - lower_limit));
    1413             :   } else {
    1414           0 :     cmpl(value, Immediate(higher_limit));
    1415             :   }
    1416          56 :   j(below_equal, on_in_range, near_jump);
    1417          56 : }
    1418             : 
    1419          21 : void TurboAssembler::Push(Handle<HeapObject> source) {
    1420          21 :   Move(kScratchRegister, source);
    1421             :   Push(kScratchRegister);
    1422          21 : }
    1423             : 
    1424     6856004 : void TurboAssembler::Move(Register result, Handle<HeapObject> object,
    1425             :                           RelocInfo::Mode rmode) {
    1426             :   if (FLAG_embedded_builtins) {
    1427     6856004 :     if (root_array_available_ && options().isolate_independent_code) {
    1428       47936 :       IndirectLoadConstant(result, object);
    1429       47936 :       return;
    1430             :     }
    1431             :   }
    1432     6808068 :   movq(result, Immediate64(object.address(), rmode));
    1433             : }
    1434             : 
    1435           0 : void TurboAssembler::Move(Operand dst, Handle<HeapObject> object,
    1436             :                           RelocInfo::Mode rmode) {
    1437           0 :   Move(kScratchRegister, object, rmode);
    1438           0 :   movq(dst, kScratchRegister);
    1439           0 : }
    1440             : 
    1441        2125 : void TurboAssembler::MoveStringConstant(Register result,
    1442             :                                         const StringConstantBase* string,
    1443             :                                         RelocInfo::Mode rmode) {
    1444        2125 :   movq_string(result, string);
    1445        2125 : }
    1446             : 
    1447         288 : void MacroAssembler::Drop(int stack_elements) {
    1448         288 :   if (stack_elements > 0) {
    1449         288 :     addq(rsp, Immediate(stack_elements * kSystemPointerSize));
    1450             :   }
    1451         288 : }
    1452             : 
    1453             : 
    1454          56 : void MacroAssembler::DropUnderReturnAddress(int stack_elements,
    1455             :                                             Register scratch) {
    1456             :   DCHECK_GT(stack_elements, 0);
    1457          56 :   if (stack_elements == 1) {
    1458          56 :     popq(MemOperand(rsp, 0));
    1459          56 :     return;
    1460             :   }
    1461             : 
    1462             :   PopReturnAddressTo(scratch);
    1463           0 :   Drop(stack_elements);
    1464             :   PushReturnAddressFrom(scratch);
    1465             : }
    1466             : 
    1467      672412 : void TurboAssembler::Push(Register src) { pushq(src); }
    1468             : 
    1469       59544 : void TurboAssembler::Push(Operand src) { pushq(src); }
    1470             : 
    1471      792491 : void MacroAssembler::PushQuad(Operand src) { pushq(src); }
    1472             : 
    1473     1750829 : void TurboAssembler::Push(Immediate value) { pushq(value); }
    1474             : 
    1475           0 : void MacroAssembler::PushImm32(int32_t imm32) { pushq_imm32(imm32); }
    1476             : 
    1477        3104 : void MacroAssembler::Pop(Register dst) { popq(dst); }
    1478             : 
    1479       44364 : void MacroAssembler::Pop(Operand dst) { popq(dst); }
    1480             : 
    1481      704445 : void MacroAssembler::PopQuad(Operand dst) { popq(dst); }
    1482             : 
    1483           0 : void TurboAssembler::Jump(ExternalReference ext) {
    1484           0 :   LoadAddress(kScratchRegister, ext);
    1485           0 :   jmp(kScratchRegister);
    1486           0 : }
    1487             : 
    1488           0 : void TurboAssembler::Jump(Operand op) { jmp(op); }
    1489             : 
    1490        1024 : void TurboAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
    1491             :   Move(kScratchRegister, destination, rmode);
    1492        1024 :   jmp(kScratchRegister);
    1493        1024 : }
    1494             : 
    1495      513702 : void TurboAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
    1496             :                           Condition cc) {
    1497             :   DCHECK_IMPLIES(options().isolate_independent_code,
    1498             :                  Builtins::IsIsolateIndependentBuiltin(*code_object));
    1499      513702 :   if (options().inline_offheap_trampolines) {
    1500      464031 :     int builtin_index = Builtins::kNoBuiltinId;
    1501      464031 :     if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
    1502             :         Builtins::IsIsolateIndependent(builtin_index)) {
    1503      463839 :       Label skip;
    1504      463839 :       if (cc != always) {
    1505      463834 :         if (cc == never) return;
    1506      463832 :         j(NegateCondition(cc), &skip, Label::kNear);
    1507             :       }
    1508             :       // Inline the trampoline.
    1509      463818 :       RecordCommentForOffHeapTrampoline(builtin_index);
    1510      463821 :       CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
    1511      463827 :       EmbeddedData d = EmbeddedData::FromBlob();
    1512      463827 :       Address entry = d.InstructionStartOfBuiltin(builtin_index);
    1513             :       Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET);
    1514      463833 :       jmp(kScratchRegister);
    1515      463829 :       bind(&skip);
    1516      463833 :       return;
    1517             :     }
    1518             :   }
    1519       49879 :   j(cc, code_object, rmode);
    1520             : }
    1521             : 
    1522    36077743 : void MacroAssembler::JumpToInstructionStream(Address entry) {
    1523             :   Move(kOffHeapTrampolineRegister, entry, RelocInfo::OFF_HEAP_TARGET);
    1524    36077745 :   jmp(kOffHeapTrampolineRegister);
    1525    36077745 : }
    1526             : 
    1527           0 : void TurboAssembler::Call(ExternalReference ext) {
    1528           0 :   LoadAddress(kScratchRegister, ext);
    1529           0 :   call(kScratchRegister);
    1530           0 : }
    1531             : 
    1532        3584 : void TurboAssembler::Call(Operand op) {
    1533        3584 :   if (!CpuFeatures::IsSupported(ATOM)) {
    1534        3584 :     call(op);
    1535             :   } else {
    1536           0 :     movq(kScratchRegister, op);
    1537           0 :     call(kScratchRegister);
    1538             :   }
    1539        3584 : }
    1540             : 
    1541           0 : void TurboAssembler::Call(Address destination, RelocInfo::Mode rmode) {
    1542             :   Move(kScratchRegister, destination, rmode);
    1543           0 :   call(kScratchRegister);
    1544           0 : }
    1545             : 
    1546     5153213 : void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
    1547             :   DCHECK_IMPLIES(options().isolate_independent_code,
    1548             :                  Builtins::IsIsolateIndependentBuiltin(*code_object));
    1549     5153213 :   if (options().inline_offheap_trampolines) {
    1550     4294468 :     int builtin_index = Builtins::kNoBuiltinId;
    1551     4294468 :     if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
    1552             :         Builtins::IsIsolateIndependent(builtin_index)) {
    1553             :       // Inline the trampoline.
    1554     4278257 :       RecordCommentForOffHeapTrampoline(builtin_index);
    1555     4278257 :       CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
    1556     4278263 :       EmbeddedData d = EmbeddedData::FromBlob();
    1557     4278263 :       Address entry = d.InstructionStartOfBuiltin(builtin_index);
    1558             :       Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET);
    1559     4278254 :       call(kScratchRegister);
    1560             :       return;
    1561             :     }
    1562             :   }
    1563             :   DCHECK(RelocInfo::IsCodeTarget(rmode));
    1564      874969 :   call(code_object, rmode);
    1565             : }
    1566             : 
    1567        3584 : void TurboAssembler::CallBuiltinPointer(Register builtin_pointer) {
    1568             : #if defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
    1569             :   STATIC_ASSERT(kSmiShiftSize == 0);
    1570             :   STATIC_ASSERT(kSmiTagSize == 1);
    1571             :   STATIC_ASSERT(kSmiTag == 0);
    1572             : 
    1573             :   // The builtin_pointer register contains the builtin index as a Smi.
    1574             :   // Untagging is folded into the indexing operand below (we use times_4 instead
    1575             :   // of times_8 since smis are already shifted by one).
    1576        7168 :   Call(Operand(kRootRegister, builtin_pointer, times_4,
    1577        3584 :                IsolateData::builtin_entry_table_offset()));
    1578             : #else   // defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
    1579             :   STATIC_ASSERT(kSmiShiftSize == 31);
    1580             :   STATIC_ASSERT(kSmiTagSize == 1);
    1581             :   STATIC_ASSERT(kSmiTag == 0);
    1582             : 
    1583             :   // The builtin_pointer register contains the builtin index as a Smi.
    1584             :   SmiUntag(builtin_pointer, builtin_pointer);
    1585             :   Call(Operand(kRootRegister, builtin_pointer, times_8,
    1586             :                IsolateData::builtin_entry_table_offset()));
    1587             : #endif  // defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
    1588        3584 : }
    1589             : 
    1590      405539 : void TurboAssembler::LoadCodeObjectEntry(Register destination,
    1591             :                                          Register code_object) {
    1592             :   // Code objects are called differently depending on whether we are generating
    1593             :   // builtin code (which will later be embedded into the binary) or compiling
    1594             :   // user JS code at runtime.
    1595             :   // * Builtin code runs in --jitless mode and thus must not call into on-heap
    1596             :   //   Code targets. Instead, we dispatch through the builtins entry table.
    1597             :   // * Codegen at runtime does not have this restriction and we can use the
    1598             :   //   shorter, branchless instruction sequence. The assumption here is that
    1599             :   //   targets are usually generated code and not builtin Code objects.
    1600             : 
    1601      405539 :   if (options().isolate_independent_code) {
    1602             :     DCHECK(root_array_available());
    1603        9688 :     Label if_code_is_off_heap, out;
    1604             : 
    1605             :     // Check whether the Code object is an off-heap trampoline. If so, call its
    1606             :     // (off-heap) entry point directly without going through the (on-heap)
    1607             :     // trampoline.  Otherwise, just call the Code object as always.
    1608        9688 :     testl(FieldOperand(code_object, Code::kFlagsOffset),
    1609             :           Immediate(Code::IsOffHeapTrampoline::kMask));
    1610        9688 :     j(not_equal, &if_code_is_off_heap);
    1611             : 
    1612             :     // Not an off-heap trampoline, the entry point is at
    1613             :     // Code::raw_instruction_start().
    1614             :     Move(destination, code_object);
    1615             :     addq(destination, Immediate(Code::kHeaderSize - kHeapObjectTag));
    1616        9688 :     jmp(&out);
    1617             : 
    1618             :     // An off-heap trampoline, the entry point is loaded from the builtin entry
    1619             :     // table.
    1620        9688 :     bind(&if_code_is_off_heap);
    1621             :     movl(destination, FieldOperand(code_object, Code::kBuiltinIndexOffset));
    1622       19376 :     movq(destination,
    1623             :          Operand(kRootRegister, destination, times_system_pointer_size,
    1624             :                  IsolateData::builtin_entry_table_offset()));
    1625             : 
    1626        9688 :     bind(&out);
    1627             :   } else {
    1628             :     Move(destination, code_object);
    1629      395851 :     addq(destination, Immediate(Code::kHeaderSize - kHeapObjectTag));
    1630             :   }
    1631      405538 : }
    1632             : 
    1633       24072 : void TurboAssembler::CallCodeObject(Register code_object) {
    1634       24072 :   LoadCodeObjectEntry(code_object, code_object);
    1635       24071 :   call(code_object);
    1636       24071 : }
    1637             : 
    1638         728 : void TurboAssembler::JumpCodeObject(Register code_object) {
    1639         728 :   LoadCodeObjectEntry(code_object, code_object);
    1640         728 :   jmp(code_object);
    1641         728 : }
    1642             : 
    1643           0 : void TurboAssembler::RetpolineCall(Register reg) {
    1644           0 :   Label setup_return, setup_target, inner_indirect_branch, capture_spec;
    1645             : 
    1646           0 :   jmp(&setup_return);  // Jump past the entire retpoline below.
    1647             : 
    1648           0 :   bind(&inner_indirect_branch);
    1649           0 :   call(&setup_target);
    1650             : 
    1651           0 :   bind(&capture_spec);
    1652           0 :   pause();
    1653           0 :   jmp(&capture_spec);
    1654             : 
    1655           0 :   bind(&setup_target);
    1656           0 :   movq(Operand(rsp, 0), reg);
    1657           0 :   ret(0);
    1658             : 
    1659           0 :   bind(&setup_return);
    1660           0 :   call(&inner_indirect_branch);  // Callee will return after this instruction.
    1661           0 : }
    1662             : 
    1663           0 : void TurboAssembler::RetpolineCall(Address destination, RelocInfo::Mode rmode) {
    1664             :   Move(kScratchRegister, destination, rmode);
    1665           0 :   RetpolineCall(kScratchRegister);
    1666           0 : }
    1667             : 
    1668           0 : void TurboAssembler::RetpolineJump(Register reg) {
    1669           0 :   Label setup_target, capture_spec;
    1670             : 
    1671           0 :   call(&setup_target);
    1672             : 
    1673           0 :   bind(&capture_spec);
    1674           0 :   pause();
    1675           0 :   jmp(&capture_spec);
    1676             : 
    1677           0 :   bind(&setup_target);
    1678           0 :   movq(Operand(rsp, 0), reg);
    1679           0 :   ret(0);
    1680           0 : }
    1681             : 
    1682       36891 : void TurboAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
    1683       36891 :   if (imm8 == 0) {
    1684             :     Movd(dst, src);
    1685             :     return;
    1686             :   }
    1687       36403 :   if (CpuFeatures::IsSupported(SSE4_1)) {
    1688             :     CpuFeatureScope sse_scope(this, SSE4_1);
    1689       36175 :     pextrd(dst, src, imm8);
    1690             :     return;
    1691             :   }
    1692             :   DCHECK_EQ(1, imm8);
    1693         228 :   movq(dst, src);
    1694             :   shrq(dst, Immediate(32));
    1695             : }
    1696             : 
    1697         464 : void TurboAssembler::Pinsrd(XMMRegister dst, Register src, int8_t imm8) {
    1698         464 :   if (CpuFeatures::IsSupported(SSE4_1)) {
    1699             :     CpuFeatureScope sse_scope(this, SSE4_1);
    1700         462 :     pinsrd(dst, src, imm8);
    1701             :     return;
    1702             :   }
    1703             :   Movd(kScratchDoubleReg, src);
    1704           2 :   if (imm8 == 1) {
    1705             :     punpckldq(dst, kScratchDoubleReg);
    1706             :   } else {
    1707             :     DCHECK_EQ(0, imm8);
    1708             :     Movss(dst, kScratchDoubleReg);
    1709             :   }
    1710             : }
    1711             : 
    1712        1440 : void TurboAssembler::Pinsrd(XMMRegister dst, Operand src, int8_t imm8) {
    1713        1440 :   if (CpuFeatures::IsSupported(SSE4_1)) {
    1714             :     CpuFeatureScope sse_scope(this, SSE4_1);
    1715        1440 :     pinsrd(dst, src, imm8);
    1716             :     return;
    1717             :   }
    1718             :   Movd(kScratchDoubleReg, src);
    1719           0 :   if (imm8 == 1) {
    1720             :     punpckldq(dst, kScratchDoubleReg);
    1721             :   } else {
    1722             :     DCHECK_EQ(0, imm8);
    1723             :     Movss(dst, kScratchDoubleReg);
    1724             :   }
    1725             : }
    1726             : 
    1727         446 : void TurboAssembler::Lzcntl(Register dst, Register src) {
    1728         446 :   if (CpuFeatures::IsSupported(LZCNT)) {
    1729             :     CpuFeatureScope scope(this, LZCNT);
    1730           0 :     lzcntl(dst, src);
    1731             :     return;
    1732             :   }
    1733         446 :   Label not_zero_src;
    1734         446 :   bsrl(dst, src);
    1735         446 :   j(not_zero, &not_zero_src, Label::kNear);
    1736         446 :   Set(dst, 63);  // 63^31 == 32
    1737         446 :   bind(&not_zero_src);
    1738             :   xorl(dst, Immediate(31));  // for x in [0..31], 31^x == 31 - x
    1739             : }
    1740             : 
    1741           0 : void TurboAssembler::Lzcntl(Register dst, Operand src) {
    1742           0 :   if (CpuFeatures::IsSupported(LZCNT)) {
    1743             :     CpuFeatureScope scope(this, LZCNT);
    1744           0 :     lzcntl(dst, src);
    1745             :     return;
    1746             :   }
    1747           0 :   Label not_zero_src;
    1748           0 :   bsrl(dst, src);
    1749           0 :   j(not_zero, &not_zero_src, Label::kNear);
    1750           0 :   Set(dst, 63);  // 63^31 == 32
    1751           0 :   bind(&not_zero_src);
    1752             :   xorl(dst, Immediate(31));  // for x in [0..31], 31^x == 31 - x
    1753             : }
    1754             : 
    1755          36 : void TurboAssembler::Lzcntq(Register dst, Register src) {
    1756          36 :   if (CpuFeatures::IsSupported(LZCNT)) {
    1757             :     CpuFeatureScope scope(this, LZCNT);
    1758           0 :     lzcntq(dst, src);
    1759             :     return;
    1760             :   }
    1761          36 :   Label not_zero_src;
    1762          36 :   bsrq(dst, src);
    1763          36 :   j(not_zero, &not_zero_src, Label::kNear);
    1764          36 :   Set(dst, 127);  // 127^63 == 64
    1765          36 :   bind(&not_zero_src);
    1766             :   xorl(dst, Immediate(63));  // for x in [0..63], 63^x == 63 - x
    1767             : }
    1768             : 
    1769           0 : void TurboAssembler::Lzcntq(Register dst, Operand src) {
    1770           0 :   if (CpuFeatures::IsSupported(LZCNT)) {
    1771             :     CpuFeatureScope scope(this, LZCNT);
    1772           0 :     lzcntq(dst, src);
    1773             :     return;
    1774             :   }
    1775           0 :   Label not_zero_src;
    1776           0 :   bsrq(dst, src);
    1777           0 :   j(not_zero, &not_zero_src, Label::kNear);
    1778           0 :   Set(dst, 127);  // 127^63 == 64
    1779           0 :   bind(&not_zero_src);
    1780             :   xorl(dst, Immediate(63));  // for x in [0..63], 63^x == 63 - x
    1781             : }
    1782             : 
    1783          44 : void TurboAssembler::Tzcntq(Register dst, Register src) {
    1784          44 :   if (CpuFeatures::IsSupported(BMI1)) {
    1785             :     CpuFeatureScope scope(this, BMI1);
    1786           0 :     tzcntq(dst, src);
    1787             :     return;
    1788             :   }
    1789          44 :   Label not_zero_src;
    1790          44 :   bsfq(dst, src);
    1791          44 :   j(not_zero, &not_zero_src, Label::kNear);
    1792             :   // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
    1793          44 :   Set(dst, 64);
    1794          44 :   bind(&not_zero_src);
    1795             : }
    1796             : 
    1797           0 : void TurboAssembler::Tzcntq(Register dst, Operand src) {
    1798           0 :   if (CpuFeatures::IsSupported(BMI1)) {
    1799             :     CpuFeatureScope scope(this, BMI1);
    1800           0 :     tzcntq(dst, src);
    1801             :     return;
    1802             :   }
    1803           0 :   Label not_zero_src;
    1804           0 :   bsfq(dst, src);
    1805           0 :   j(not_zero, &not_zero_src, Label::kNear);
    1806             :   // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
    1807           0 :   Set(dst, 64);
    1808           0 :   bind(&not_zero_src);
    1809             : }
    1810             : 
    1811         332 : void TurboAssembler::Tzcntl(Register dst, Register src) {
    1812         332 :   if (CpuFeatures::IsSupported(BMI1)) {
    1813             :     CpuFeatureScope scope(this, BMI1);
    1814           0 :     tzcntl(dst, src);
    1815             :     return;
    1816             :   }
    1817         332 :   Label not_zero_src;
    1818         332 :   bsfl(dst, src);
    1819         332 :   j(not_zero, &not_zero_src, Label::kNear);
    1820         332 :   Set(dst, 32);  // The result of tzcnt is 32 if src = 0.
    1821         332 :   bind(&not_zero_src);
    1822             : }
    1823             : 
    1824           0 : void TurboAssembler::Tzcntl(Register dst, Operand src) {
    1825           0 :   if (CpuFeatures::IsSupported(BMI1)) {
    1826             :     CpuFeatureScope scope(this, BMI1);
    1827           0 :     tzcntl(dst, src);
    1828             :     return;
    1829             :   }
    1830           0 :   Label not_zero_src;
    1831           0 :   bsfl(dst, src);
    1832           0 :   j(not_zero, &not_zero_src, Label::kNear);
    1833           0 :   Set(dst, 32);  // The result of tzcnt is 32 if src = 0.
    1834           0 :   bind(&not_zero_src);
    1835             : }
    1836             : 
    1837          64 : void TurboAssembler::Popcntl(Register dst, Register src) {
    1838          64 :   if (CpuFeatures::IsSupported(POPCNT)) {
    1839             :     CpuFeatureScope scope(this, POPCNT);
    1840          64 :     popcntl(dst, src);
    1841          64 :     return;
    1842             :   }
    1843           0 :   UNREACHABLE();
    1844             : }
    1845             : 
    1846           0 : void TurboAssembler::Popcntl(Register dst, Operand src) {
    1847           0 :   if (CpuFeatures::IsSupported(POPCNT)) {
    1848             :     CpuFeatureScope scope(this, POPCNT);
    1849           0 :     popcntl(dst, src);
    1850           0 :     return;
    1851             :   }
    1852           0 :   UNREACHABLE();
    1853             : }
    1854             : 
    1855          44 : void TurboAssembler::Popcntq(Register dst, Register src) {
    1856          44 :   if (CpuFeatures::IsSupported(POPCNT)) {
    1857             :     CpuFeatureScope scope(this, POPCNT);
    1858          44 :     popcntq(dst, src);
    1859          44 :     return;
    1860             :   }
    1861           0 :   UNREACHABLE();
    1862             : }
    1863             : 
    1864           0 : void TurboAssembler::Popcntq(Register dst, Operand src) {
    1865           0 :   if (CpuFeatures::IsSupported(POPCNT)) {
    1866             :     CpuFeatureScope scope(this, POPCNT);
    1867           0 :     popcntq(dst, src);
    1868           0 :     return;
    1869             :   }
    1870           0 :   UNREACHABLE();
    1871             : }
    1872             : 
    1873             : 
    1874           0 : void MacroAssembler::Pushad() {
    1875             :   Push(rax);
    1876             :   Push(rcx);
    1877             :   Push(rdx);
    1878             :   Push(rbx);
    1879             :   // Not pushing rsp or rbp.
    1880             :   Push(rsi);
    1881             :   Push(rdi);
    1882             :   Push(r8);
    1883             :   Push(r9);
    1884             :   // r10 is kScratchRegister.
    1885             :   Push(r11);
    1886             :   Push(r12);
    1887             :   // r13 is kRootRegister.
    1888             :   Push(r14);
    1889             :   Push(r15);
    1890             :   STATIC_ASSERT(12 == kNumSafepointSavedRegisters);
    1891             :   // Use lea for symmetry with Popad.
    1892             :   int sp_delta = (kNumSafepointRegisters - kNumSafepointSavedRegisters) *
    1893             :                  kSystemPointerSize;
    1894           0 :   leaq(rsp, Operand(rsp, -sp_delta));
    1895           0 : }
    1896             : 
    1897             : 
    1898           0 : void MacroAssembler::Popad() {
    1899             :   // Popad must not change the flags, so use lea instead of addq.
    1900             :   int sp_delta = (kNumSafepointRegisters - kNumSafepointSavedRegisters) *
    1901             :                  kSystemPointerSize;
    1902           0 :   leaq(rsp, Operand(rsp, sp_delta));
    1903             :   Pop(r15);
    1904             :   Pop(r14);
    1905             :   Pop(r12);
    1906             :   Pop(r11);
    1907             :   Pop(r9);
    1908             :   Pop(r8);
    1909             :   Pop(rdi);
    1910             :   Pop(rsi);
    1911             :   Pop(rbx);
    1912             :   Pop(rdx);
    1913             :   Pop(rcx);
    1914             :   Pop(rax);
    1915           0 : }
    1916             : 
    1917             : 
    1918             : // Order general registers are pushed by Pushad:
    1919             : // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
    1920             : const int
    1921             : MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
    1922             :     0,
    1923             :     1,
    1924             :     2,
    1925             :     3,
    1926             :     -1,
    1927             :     -1,
    1928             :     4,
    1929             :     5,
    1930             :     6,
    1931             :     7,
    1932             :     -1,
    1933             :     8,
    1934             :     9,
    1935             :     -1,
    1936             :     10,
    1937             :     11
    1938             : };
    1939             : 
    1940         168 : void MacroAssembler::PushStackHandler() {
    1941             :   // Adjust this code if not the case.
    1942             :   STATIC_ASSERT(StackHandlerConstants::kSize == 2 * kSystemPointerSize);
    1943             :   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    1944             : 
    1945             :   Push(Immediate(0));  // Padding.
    1946             : 
    1947             :   // Link the current handler as the next handler.
    1948             :   ExternalReference handler_address =
    1949         168 :       ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
    1950         168 :   Push(ExternalReferenceAsOperand(handler_address));
    1951             : 
    1952             :   // Set this new handler as the current one.
    1953         168 :   movq(ExternalReferenceAsOperand(handler_address), rsp);
    1954         168 : }
    1955             : 
    1956             : 
    1957         168 : void MacroAssembler::PopStackHandler() {
    1958             :   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    1959             :   ExternalReference handler_address =
    1960         168 :       ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
    1961         168 :   Pop(ExternalReferenceAsOperand(handler_address));
    1962             :   addq(rsp, Immediate(StackHandlerConstants::kSize - kSystemPointerSize));
    1963         168 : }
    1964             : 
    1965        1392 : void TurboAssembler::Ret() { ret(0); }
    1966             : 
    1967     2619364 : void TurboAssembler::Ret(int bytes_dropped, Register scratch) {
    1968     2619364 :   if (is_uint16(bytes_dropped)) {
    1969     2619360 :     ret(bytes_dropped);
    1970             :   } else {
    1971             :     PopReturnAddressTo(scratch);
    1972             :     addq(rsp, Immediate(bytes_dropped));
    1973             :     PushReturnAddressFrom(scratch);
    1974           4 :     ret(0);
    1975             :   }
    1976     2619515 : }
    1977             : 
    1978         560 : void MacroAssembler::CmpObjectType(Register heap_object,
    1979             :                                    InstanceType type,
    1980             :                                    Register map) {
    1981         560 :   LoadTaggedPointerField(map,
    1982             :                          FieldOperand(heap_object, HeapObject::kMapOffset));
    1983         560 :   CmpInstanceType(map, type);
    1984         560 : }
    1985             : 
    1986             : 
    1987        1120 : void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
    1988        2240 :   cmpw(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
    1989        1120 : }
    1990             : 
    1991           0 : void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
    1992             :                                XMMRegister scratch, Label* lost_precision,
    1993             :                                Label* is_nan, Label::Distance dst) {
    1994           0 :   Cvttsd2si(result_reg, input_reg);
    1995           0 :   Cvtlsi2sd(kScratchDoubleReg, result_reg);
    1996             :   Ucomisd(kScratchDoubleReg, input_reg);
    1997           0 :   j(not_equal, lost_precision, dst);
    1998           0 :   j(parity_even, is_nan, dst);  // NaN.
    1999           0 : }
    2000             : 
    2001             : 
    2002         112 : void MacroAssembler::AssertNotSmi(Register object) {
    2003         112 :   if (emit_debug_code()) {
    2004             :     Condition is_smi = CheckSmi(object);
    2005           0 :     Check(NegateCondition(is_smi), AbortReason::kOperandIsASmi);
    2006             :   }
    2007         112 : }
    2008             : 
    2009             : 
    2010         554 : void MacroAssembler::AssertSmi(Register object) {
    2011         554 :   if (emit_debug_code()) {
    2012             :     Condition is_smi = CheckSmi(object);
    2013           0 :     Check(is_smi, AbortReason::kOperandIsNotASmi);
    2014             :   }
    2015         554 : }
    2016             : 
    2017           0 : void MacroAssembler::AssertSmi(Operand object) {
    2018           0 :   if (emit_debug_code()) {
    2019             :     Condition is_smi = CheckSmi(object);
    2020           0 :     Check(is_smi, AbortReason::kOperandIsNotASmi);
    2021             :   }
    2022           0 : }
    2023             : 
    2024     2146521 : void TurboAssembler::AssertZeroExtended(Register int32_register) {
    2025     2146521 :   if (emit_debug_code()) {
    2026             :     DCHECK_NE(int32_register, kScratchRegister);
    2027          79 :     movq(kScratchRegister, int64_t{0x0000000100000000});
    2028             :     cmpq(kScratchRegister, int32_register);
    2029          79 :     Check(above_equal, AbortReason::k32BitValueInRegisterIsNotZeroExtended);
    2030             :   }
    2031     2146521 : }
    2032             : 
    2033         112 : void MacroAssembler::AssertConstructor(Register object) {
    2034         112 :   if (emit_debug_code()) {
    2035           0 :     testb(object, Immediate(kSmiTagMask));
    2036           0 :     Check(not_equal, AbortReason::kOperandIsASmiAndNotAConstructor);
    2037             :     Push(object);
    2038             :     LoadTaggedPointerField(object,
    2039             :                            FieldOperand(object, HeapObject::kMapOffset));
    2040           0 :     testb(FieldOperand(object, Map::kBitFieldOffset),
    2041           0 :           Immediate(Map::IsConstructorBit::kMask));
    2042             :     Pop(object);
    2043           0 :     Check(not_zero, AbortReason::kOperandIsNotAConstructor);
    2044             :   }
    2045         112 : }
    2046             : 
    2047         280 : void MacroAssembler::AssertFunction(Register object) {
    2048         280 :   if (emit_debug_code()) {
    2049           0 :     testb(object, Immediate(kSmiTagMask));
    2050           0 :     Check(not_equal, AbortReason::kOperandIsASmiAndNotAFunction);
    2051             :     Push(object);
    2052           0 :     CmpObjectType(object, JS_FUNCTION_TYPE, object);
    2053             :     Pop(object);
    2054           0 :     Check(equal, AbortReason::kOperandIsNotAFunction);
    2055             :   }
    2056         280 : }
    2057             : 
    2058             : 
    2059         112 : void MacroAssembler::AssertBoundFunction(Register object) {
    2060         112 :   if (emit_debug_code()) {
    2061           0 :     testb(object, Immediate(kSmiTagMask));
    2062           0 :     Check(not_equal, AbortReason::kOperandIsASmiAndNotABoundFunction);
    2063             :     Push(object);
    2064           0 :     CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
    2065             :     Pop(object);
    2066           0 :     Check(equal, AbortReason::kOperandIsNotABoundFunction);
    2067             :   }
    2068         112 : }
    2069             : 
    2070          56 : void MacroAssembler::AssertGeneratorObject(Register object) {
    2071         112 :   if (!emit_debug_code()) return;
    2072           0 :   testb(object, Immediate(kSmiTagMask));
    2073           0 :   Check(not_equal, AbortReason::kOperandIsASmiAndNotAGeneratorObject);
    2074             : 
    2075             :   // Load map
    2076           0 :   Register map = object;
    2077             :   Push(object);
    2078             :   LoadTaggedPointerField(map, FieldOperand(object, HeapObject::kMapOffset));
    2079             : 
    2080           0 :   Label do_check;
    2081             :   // Check if JSGeneratorObject
    2082           0 :   CmpInstanceType(map, JS_GENERATOR_OBJECT_TYPE);
    2083           0 :   j(equal, &do_check);
    2084             : 
    2085             :   // Check if JSAsyncFunctionObject
    2086           0 :   CmpInstanceType(map, JS_ASYNC_FUNCTION_OBJECT_TYPE);
    2087           0 :   j(equal, &do_check);
    2088             : 
    2089             :   // Check if JSAsyncGeneratorObject
    2090           0 :   CmpInstanceType(map, JS_ASYNC_GENERATOR_OBJECT_TYPE);
    2091             : 
    2092           0 :   bind(&do_check);
    2093             :   // Restore generator object to register and perform assertion
    2094             :   Pop(object);
    2095           0 :   Check(equal, AbortReason::kOperandIsNotAGeneratorObject);
    2096             : }
    2097             : 
    2098         112 : void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
    2099         112 :   if (emit_debug_code()) {
    2100           0 :     Label done_checking;
    2101           0 :     AssertNotSmi(object);
    2102           0 :     Cmp(object, isolate()->factory()->undefined_value());
    2103           0 :     j(equal, &done_checking);
    2104           0 :     Cmp(FieldOperand(object, 0), isolate()->factory()->allocation_site_map());
    2105           0 :     Assert(equal, AbortReason::kExpectedUndefinedOrCell);
    2106           0 :     bind(&done_checking);
    2107             :   }
    2108         112 : }
    2109             : 
    2110          56 : void MacroAssembler::LoadWeakValue(Register in_out, Label* target_if_cleared) {
    2111          56 :   cmpl(in_out, Immediate(kClearedWeakHeapObjectLower32));
    2112          56 :   j(equal, target_if_cleared);
    2113             : 
    2114             :   andq(in_out, Immediate(~static_cast<int32_t>(kWeakHeapObjectMask)));
    2115          56 : }
    2116             : 
    2117         168 : void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
    2118             :   DCHECK_GT(value, 0);
    2119         168 :   if (FLAG_native_code_counters && counter->Enabled()) {
    2120             :     Operand counter_operand =
    2121           0 :         ExternalReferenceAsOperand(ExternalReference::Create(counter));
    2122           0 :     if (value == 1) {
    2123           0 :       incl(counter_operand);
    2124             :     } else {
    2125           0 :       addl(counter_operand, Immediate(value));
    2126             :     }
    2127             :   }
    2128         168 : }
    2129             : 
    2130             : 
    2131           0 : void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
    2132             :   DCHECK_GT(value, 0);
    2133           0 :   if (FLAG_native_code_counters && counter->Enabled()) {
    2134             :     Operand counter_operand =
    2135           0 :         ExternalReferenceAsOperand(ExternalReference::Create(counter));
    2136           0 :     if (value == 1) {
    2137           0 :       decl(counter_operand);
    2138             :     } else {
    2139           0 :       subl(counter_operand, Immediate(value));
    2140             :     }
    2141             :   }
    2142           0 : }
    2143             : 
    2144          56 : void MacroAssembler::MaybeDropFrames() {
    2145             :   // Check whether we need to drop frames to restart a function on the stack.
    2146             :   ExternalReference restart_fp =
    2147          56 :       ExternalReference::debug_restart_fp_address(isolate());
    2148          56 :   Load(rbx, restart_fp);
    2149          56 :   testq(rbx, rbx);
    2150             : 
    2151          56 :   Label dont_drop;
    2152          56 :   j(zero, &dont_drop, Label::kNear);
    2153          56 :   Jump(BUILTIN_CODE(isolate(), FrameDropperTrampoline), RelocInfo::CODE_TARGET);
    2154             : 
    2155          56 :   bind(&dont_drop);
    2156          56 : }
    2157             : 
    2158        1120 : void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
    2159             :                                         Register caller_args_count_reg,
    2160             :                                         Register scratch0, Register scratch1) {
    2161             : #if DEBUG
    2162             :   if (callee_args_count.is_reg()) {
    2163             :     DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
    2164             :                        scratch1));
    2165             :   } else {
    2166             :     DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
    2167             :   }
    2168             : #endif
    2169             : 
    2170             :   // Calculate the destination address where we will put the return address
    2171             :   // after we drop current frame.
    2172        1120 :   Register new_sp_reg = scratch0;
    2173        1120 :   if (callee_args_count.is_reg()) {
    2174        1120 :     subq(caller_args_count_reg, callee_args_count.reg());
    2175        2240 :     leaq(new_sp_reg,
    2176             :          Operand(rbp, caller_args_count_reg, times_system_pointer_size,
    2177             :                  StandardFrameConstants::kCallerPCOffset));
    2178             :   } else {
    2179           0 :     leaq(new_sp_reg,
    2180             :          Operand(rbp, caller_args_count_reg, times_system_pointer_size,
    2181             :                  StandardFrameConstants::kCallerPCOffset -
    2182             :                      callee_args_count.immediate() * kSystemPointerSize));
    2183             :   }
    2184             : 
    2185        1120 :   if (FLAG_debug_code) {
    2186           0 :     cmpq(rsp, new_sp_reg);
    2187           0 :     Check(below, AbortReason::kStackAccessBelowStackPointer);
    2188             :   }
    2189             : 
    2190             :   // Copy return address from caller's frame to current frame's return address
    2191             :   // to avoid its trashing and let the following loop copy it to the right
    2192             :   // place.
    2193        1120 :   Register tmp_reg = scratch1;
    2194        2240 :   movq(tmp_reg, Operand(rbp, StandardFrameConstants::kCallerPCOffset));
    2195        2240 :   movq(Operand(rsp, 0), tmp_reg);
    2196             : 
    2197             :   // Restore caller's frame pointer now as it could be overwritten by
    2198             :   // the copying loop.
    2199        2240 :   movq(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
    2200             : 
    2201             :   // +2 here is to copy both receiver and return address.
    2202        1120 :   Register count_reg = caller_args_count_reg;
    2203        1120 :   if (callee_args_count.is_reg()) {
    2204        2240 :     leaq(count_reg, Operand(callee_args_count.reg(), 2));
    2205             :   } else {
    2206           0 :     movq(count_reg, Immediate(callee_args_count.immediate() + 2));
    2207             :     // TODO(ishell): Unroll copying loop for small immediate values.
    2208             :   }
    2209             : 
    2210             :   // Now copy callee arguments to the caller frame going backwards to avoid
    2211             :   // callee arguments corruption (source and destination areas could overlap).
    2212        1120 :   Label loop, entry;
    2213        1120 :   jmp(&entry, Label::kNear);
    2214        1120 :   bind(&loop);
    2215             :   decq(count_reg);
    2216        2240 :   movq(tmp_reg, Operand(rsp, count_reg, times_system_pointer_size, 0));
    2217        2240 :   movq(Operand(new_sp_reg, count_reg, times_system_pointer_size, 0), tmp_reg);
    2218        1120 :   bind(&entry);
    2219             :   cmpq(count_reg, Immediate(0));
    2220        1120 :   j(not_equal, &loop, Label::kNear);
    2221             : 
    2222             :   // Leave current frame.
    2223             :   movq(rsp, new_sp_reg);
    2224        1120 : }
    2225             : 
    2226         112 : void MacroAssembler::InvokeFunction(Register function, Register new_target,
    2227             :                                     const ParameterCount& actual,
    2228             :                                     InvokeFlag flag) {
    2229         112 :   LoadTaggedPointerField(
    2230             :       rbx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
    2231         112 :   movzxwq(rbx,
    2232             :           FieldOperand(rbx, SharedFunctionInfo::kFormalParameterCountOffset));
    2233             : 
    2234             :   ParameterCount expected(rbx);
    2235         112 :   InvokeFunction(function, new_target, expected, actual, flag);
    2236         112 : }
    2237             : 
    2238         168 : void MacroAssembler::InvokeFunction(Register function, Register new_target,
    2239             :                                     const ParameterCount& expected,
    2240             :                                     const ParameterCount& actual,
    2241             :                                     InvokeFlag flag) {
    2242             :   DCHECK(function == rdi);
    2243         168 :   LoadTaggedPointerField(rsi,
    2244             :                          FieldOperand(function, JSFunction::kContextOffset));
    2245         168 :   InvokeFunctionCode(rdi, new_target, expected, actual, flag);
    2246         168 : }
    2247             : 
    2248         336 : void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
    2249             :                                         const ParameterCount& expected,
    2250             :                                         const ParameterCount& actual,
    2251             :                                         InvokeFlag flag) {
    2252             :   // You can't call a function without a valid frame.
    2253             :   DCHECK(flag == JUMP_FUNCTION || has_frame());
    2254             :   DCHECK(function == rdi);
    2255             :   DCHECK_IMPLIES(new_target.is_valid(), new_target == rdx);
    2256             : 
    2257             :   // On function call, call into the debugger if necessary.
    2258         336 :   CheckDebugHook(function, new_target, expected, actual);
    2259             : 
    2260             :   // Clear the new.target register if not given.
    2261         336 :   if (!new_target.is_valid()) {
    2262         224 :     LoadRoot(rdx, RootIndex::kUndefinedValue);
    2263             :   }
    2264             : 
    2265         336 :   Label done;
    2266         336 :   bool definitely_mismatches = false;
    2267             :   InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
    2268         336 :                  Label::kNear);
    2269         336 :   if (!definitely_mismatches) {
    2270             :     // We call indirectly through the code field in the function to
    2271             :     // allow recompilation to take effect without changing any of the
    2272             :     // call sites.
    2273             :     static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
    2274         336 :     LoadTaggedPointerField(rcx,
    2275             :                            FieldOperand(function, JSFunction::kCodeOffset));
    2276         336 :     if (flag == CALL_FUNCTION) {
    2277         112 :       CallCodeObject(rcx);
    2278             :     } else {
    2279             :       DCHECK(flag == JUMP_FUNCTION);
    2280         224 :       JumpCodeObject(rcx);
    2281             :     }
    2282         336 :     bind(&done);
    2283             :   }
    2284         336 : }
    2285             : 
    2286         336 : void MacroAssembler::InvokePrologue(const ParameterCount& expected,
    2287             :                                     const ParameterCount& actual, Label* done,
    2288             :                                     bool* definitely_mismatches,
    2289             :                                     InvokeFlag flag,
    2290             :                                     Label::Distance near_jump) {
    2291             :   bool definitely_matches = false;
    2292         336 :   *definitely_mismatches = false;
    2293         336 :   Label invoke;
    2294         336 :   if (expected.is_immediate()) {
    2295             :     DCHECK(actual.is_immediate());
    2296           0 :     Set(rax, actual.immediate());
    2297           0 :     if (expected.immediate() == actual.immediate()) {
    2298             :       definitely_matches = true;
    2299             :     } else {
    2300           0 :       if (expected.immediate() ==
    2301             :               SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
    2302             :         // Don't worry about adapting arguments for built-ins that
    2303             :         // don't want that done. Skip adaption code by making it look
    2304             :         // like we have a match between expected and actual number of
    2305             :         // arguments.
    2306             :         definitely_matches = true;
    2307             :       } else {
    2308           0 :         *definitely_mismatches = true;
    2309           0 :         Set(rbx, expected.immediate());
    2310             :       }
    2311             :     }
    2312             :   } else {
    2313         336 :     if (actual.is_immediate()) {
    2314             :       // Expected is in register, actual is immediate. This is the
    2315             :       // case when we invoke function values without going through the
    2316             :       // IC mechanism.
    2317           0 :       Set(rax, actual.immediate());
    2318           0 :       cmpq(expected.reg(), Immediate(actual.immediate()));
    2319           0 :       j(equal, &invoke, Label::kNear);
    2320             :       DCHECK(expected.reg() == rbx);
    2321         336 :     } else if (expected.reg() != actual.reg()) {
    2322             :       // Both expected and actual are in (different) registers. This
    2323             :       // is the case when we invoke functions using call and apply.
    2324         280 :       cmpq(expected.reg(), actual.reg());
    2325         280 :       j(equal, &invoke, Label::kNear);
    2326             :       DCHECK(actual.reg() == rax);
    2327             :       DCHECK(expected.reg() == rbx);
    2328             :     } else {
    2329             :       definitely_matches = true;
    2330             :       Move(rax, actual.reg());
    2331             :     }
    2332             :   }
    2333             : 
    2334         336 :   if (!definitely_matches) {
    2335         280 :     Handle<Code> adaptor = BUILTIN_CODE(isolate(), ArgumentsAdaptorTrampoline);
    2336         280 :     if (flag == CALL_FUNCTION) {
    2337         112 :       Call(adaptor, RelocInfo::CODE_TARGET);
    2338         112 :       if (!*definitely_mismatches) {
    2339         112 :         jmp(done, near_jump);
    2340             :       }
    2341             :     } else {
    2342         168 :       Jump(adaptor, RelocInfo::CODE_TARGET);
    2343             :     }
    2344         280 :     bind(&invoke);
    2345             :   }
    2346         336 : }
    2347             : 
    2348         336 : void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
    2349             :                                     const ParameterCount& expected,
    2350             :                                     const ParameterCount& actual) {
    2351         336 :   Label skip_hook;
    2352             :   ExternalReference debug_hook_active =
    2353         336 :       ExternalReference::debug_hook_on_function_call_address(isolate());
    2354             :   Operand debug_hook_active_operand =
    2355         336 :       ExternalReferenceAsOperand(debug_hook_active);
    2356         336 :   cmpb(debug_hook_active_operand, Immediate(0));
    2357         336 :   j(equal, &skip_hook);
    2358             : 
    2359             :   {
    2360             :     FrameScope frame(this,
    2361         672 :                      has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
    2362         336 :     if (expected.is_reg()) {
    2363         336 :       SmiTag(expected.reg(), expected.reg());
    2364             :       Push(expected.reg());
    2365             :     }
    2366         336 :     if (actual.is_reg()) {
    2367         336 :       SmiTag(actual.reg(), actual.reg());
    2368             :       Push(actual.reg());
    2369         336 :       SmiUntag(actual.reg(), actual.reg());
    2370             :     }
    2371         336 :     if (new_target.is_valid()) {
    2372             :       Push(new_target);
    2373             :     }
    2374             :     Push(fun);
    2375             :     Push(fun);
    2376         336 :     Push(StackArgumentsAccessor(rbp, actual).GetReceiverOperand());
    2377         336 :     CallRuntime(Runtime::kDebugOnFunctionCall);
    2378             :     Pop(fun);
    2379         336 :     if (new_target.is_valid()) {
    2380             :       Pop(new_target);
    2381             :     }
    2382         336 :     if (actual.is_reg()) {
    2383             :       Pop(actual.reg());
    2384         336 :       SmiUntag(actual.reg(), actual.reg());
    2385             :     }
    2386         336 :     if (expected.is_reg()) {
    2387             :       Pop(expected.reg());
    2388         336 :       SmiUntag(expected.reg(), expected.reg());
    2389             :     }
    2390             :   }
    2391         336 :   bind(&skip_hook);
    2392         336 : }
    2393             : 
    2394      989526 : void TurboAssembler::StubPrologue(StackFrame::Type type) {
    2395      989526 :   pushq(rbp);  // Caller's frame pointer.
    2396             :   movq(rbp, rsp);
    2397             :   Push(Immediate(StackFrame::TypeToMarker(type)));
    2398      989742 : }
    2399             : 
    2400      644072 : void TurboAssembler::Prologue() {
    2401      644072 :   pushq(rbp);  // Caller's frame pointer.
    2402             :   movq(rbp, rsp);
    2403             :   Push(rsi);  // Callee's context.
    2404             :   Push(rdi);  // Callee's JS function.
    2405      644072 : }
    2406             : 
    2407      595130 : void TurboAssembler::EnterFrame(StackFrame::Type type) {
    2408      595130 :   pushq(rbp);
    2409             :   movq(rbp, rsp);
    2410             :   Push(Immediate(StackFrame::TypeToMarker(type)));
    2411      595206 : }
    2412             : 
    2413      947041 : void TurboAssembler::LeaveFrame(StackFrame::Type type) {
    2414      947041 :   if (emit_debug_code()) {
    2415           0 :     cmpq(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
    2416           0 :          Immediate(StackFrame::TypeToMarker(type)));
    2417           0 :     Check(equal, AbortReason::kStackFrameTypesMustMatch);
    2418             :   }
    2419      947041 :   movq(rsp, rbp);
    2420      947139 :   popq(rbp);
    2421      946777 : }
    2422             : 
    2423         672 : void MacroAssembler::EnterExitFramePrologue(bool save_rax,
    2424             :                                             StackFrame::Type frame_type) {
    2425             :   DCHECK(frame_type == StackFrame::EXIT ||
    2426             :          frame_type == StackFrame::BUILTIN_EXIT);
    2427             : 
    2428             :   // Set up the frame structure on the stack.
    2429             :   // All constants are relative to the frame pointer of the exit frame.
    2430             :   DCHECK_EQ(kFPOnStackSize + kPCOnStackSize,
    2431             :             ExitFrameConstants::kCallerSPDisplacement);
    2432             :   DCHECK_EQ(kFPOnStackSize, ExitFrameConstants::kCallerPCOffset);
    2433             :   DCHECK_EQ(0 * kSystemPointerSize, ExitFrameConstants::kCallerFPOffset);
    2434         672 :   pushq(rbp);
    2435             :   movq(rbp, rsp);
    2436             : 
    2437             :   // Reserve room for entry stack pointer.
    2438             :   Push(Immediate(StackFrame::TypeToMarker(frame_type)));
    2439             :   DCHECK_EQ(-2 * kSystemPointerSize, ExitFrameConstants::kSPOffset);
    2440             :   Push(Immediate(0));  // Saved entry sp, patched before call.
    2441             : 
    2442             :   // Save the frame pointer and the context in top.
    2443         672 :   if (save_rax) {
    2444             :     movq(r14, rax);  // Backup rax in callee-save register.
    2445             :   }
    2446             : 
    2447         672 :   Store(
    2448             :       ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate()),
    2449         672 :       rbp);
    2450         672 :   Store(ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()),
    2451         672 :         rsi);
    2452         672 :   Store(
    2453             :       ExternalReference::Create(IsolateAddressId::kCFunctionAddress, isolate()),
    2454         672 :       rbx);
    2455         672 : }
    2456             : 
    2457             : 
    2458         672 : void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
    2459             :                                             bool save_doubles) {
    2460             : #ifdef _WIN64
    2461             :   const int kShadowSpace = 4;
    2462             :   arg_stack_space += kShadowSpace;
    2463             : #endif
    2464             :   // Optionally save all XMM registers.
    2465         672 :   if (save_doubles) {
    2466         224 :     int space = XMMRegister::kNumRegisters * kDoubleSize +
    2467         224 :                 arg_stack_space * kSystemPointerSize;
    2468         224 :     subq(rsp, Immediate(space));
    2469             :     int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
    2470         224 :     const RegisterConfiguration* config = RegisterConfiguration::Default();
    2471        6944 :     for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
    2472             :       DoubleRegister reg =
    2473             :           DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
    2474        6720 :       Movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
    2475             :     }
    2476         448 :   } else if (arg_stack_space > 0) {
    2477         112 :     subq(rsp, Immediate(arg_stack_space * kSystemPointerSize));
    2478             :   }
    2479             : 
    2480             :   // Get the required frame alignment for the OS.
    2481         672 :   const int kFrameAlignment = base::OS::ActivationFrameAlignment();
    2482         672 :   if (kFrameAlignment > 0) {
    2483             :     DCHECK(base::bits::IsPowerOfTwo(kFrameAlignment));
    2484             :     DCHECK(is_int8(kFrameAlignment));
    2485         672 :     andq(rsp, Immediate(-kFrameAlignment));
    2486             :   }
    2487             : 
    2488             :   // Patch the saved entry sp.
    2489        1344 :   movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
    2490         672 : }
    2491             : 
    2492         448 : void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles,
    2493             :                                     StackFrame::Type frame_type) {
    2494         448 :   EnterExitFramePrologue(true, frame_type);
    2495             : 
    2496             :   // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
    2497             :   // so it must be retained across the C-call.
    2498             :   int offset = StandardFrameConstants::kCallerSPOffset - kSystemPointerSize;
    2499         896 :   leaq(r15, Operand(rbp, r14, times_system_pointer_size, offset));
    2500             : 
    2501         448 :   EnterExitFrameEpilogue(arg_stack_space, save_doubles);
    2502         448 : }
    2503             : 
    2504             : 
    2505         224 : void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
    2506         224 :   EnterExitFramePrologue(false, StackFrame::EXIT);
    2507         224 :   EnterExitFrameEpilogue(arg_stack_space, false);
    2508         224 : }
    2509             : 
    2510             : 
    2511         560 : void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
    2512             :   // Registers:
    2513             :   // r15 : argv
    2514         560 :   if (save_doubles) {
    2515             :     int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
    2516         224 :     const RegisterConfiguration* config = RegisterConfiguration::Default();
    2517        6944 :     for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
    2518             :       DoubleRegister reg =
    2519             :           DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
    2520        6720 :       Movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
    2521             :     }
    2522             :   }
    2523             : 
    2524         560 :   if (pop_arguments) {
    2525             :     // Get the return address from the stack and restore the frame pointer.
    2526         896 :     movq(rcx, Operand(rbp, kFPOnStackSize));
    2527         896 :     movq(rbp, Operand(rbp, 0 * kSystemPointerSize));
    2528             : 
    2529             :     // Drop everything up to and including the arguments and the receiver
    2530             :     // from the caller stack.
    2531         896 :     leaq(rsp, Operand(r15, 1 * kSystemPointerSize));
    2532             : 
    2533             :     PushReturnAddressFrom(rcx);
    2534             :   } else {
    2535             :     // Otherwise just leave the exit frame.
    2536         112 :     leave();
    2537             :   }
    2538             : 
    2539         560 :   LeaveExitFrameEpilogue();
    2540         560 : }
    2541             : 
    2542         112 : void MacroAssembler::LeaveApiExitFrame() {
    2543         112 :   movq(rsp, rbp);
    2544         112 :   popq(rbp);
    2545             : 
    2546         112 :   LeaveExitFrameEpilogue();
    2547         112 : }
    2548             : 
    2549         672 : void MacroAssembler::LeaveExitFrameEpilogue() {
    2550             :   // Restore current context from top and clear it in debug mode.
    2551             :   ExternalReference context_address =
    2552         672 :       ExternalReference::Create(IsolateAddressId::kContextAddress, isolate());
    2553         672 :   Operand context_operand = ExternalReferenceAsOperand(context_address);
    2554         672 :   movq(rsi, context_operand);
    2555             : #ifdef DEBUG
    2556             :   movq(context_operand, Immediate(Context::kInvalidContext));
    2557             : #endif
    2558             : 
    2559             :   // Clear the top frame.
    2560             :   ExternalReference c_entry_fp_address =
    2561         672 :       ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate());
    2562         672 :   Operand c_entry_fp_operand = ExternalReferenceAsOperand(c_entry_fp_address);
    2563             :   movq(c_entry_fp_operand, Immediate(0));
    2564         672 : }
    2565             : 
    2566             : 
    2567             : #ifdef _WIN64
    2568             : static const int kRegisterPassedArguments = 4;
    2569             : #else
    2570             : static const int kRegisterPassedArguments = 6;
    2571             : #endif
    2572             : 
    2573             : 
    2574         336 : void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
    2575         336 :   LoadTaggedPointerField(dst, NativeContextOperand());
    2576             :   LoadTaggedPointerField(dst, ContextOperand(dst, index));
    2577         336 : }
    2578             : 
    2579             : 
    2580           0 : int TurboAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
    2581             :   // On Windows 64 stack slots are reserved by the caller for all arguments
    2582             :   // including the ones passed in registers, and space is always allocated for
    2583             :   // the four register arguments even if the function takes fewer than four
    2584             :   // arguments.
    2585             :   // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
    2586             :   // and the caller does not reserve stack slots for them.
    2587             :   DCHECK_GE(num_arguments, 0);
    2588             : #ifdef _WIN64
    2589             :   const int kMinimumStackSlots = kRegisterPassedArguments;
    2590             :   if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
    2591             :   return num_arguments;
    2592             : #else
    2593     1524319 :   if (num_arguments < kRegisterPassedArguments) return 0;
    2594       93008 :   return num_arguments - kRegisterPassedArguments;
    2595             : #endif
    2596             : }
    2597             : 
    2598      762163 : void TurboAssembler::PrepareCallCFunction(int num_arguments) {
    2599      762163 :   int frame_alignment = base::OS::ActivationFrameAlignment();
    2600             :   DCHECK_NE(frame_alignment, 0);
    2601             :   DCHECK_GE(num_arguments, 0);
    2602             : 
    2603             :   // Make stack end at alignment and allocate space for arguments and old rsp.
    2604      762163 :   movq(kScratchRegister, rsp);
    2605             :   DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
    2606             :   int argument_slots_on_stack =
    2607             :       ArgumentStackSlotsForCFunctionCall(num_arguments);
    2608      762164 :   subq(rsp, Immediate((argument_slots_on_stack + 1) * kSystemPointerSize));
    2609      762163 :   andq(rsp, Immediate(-frame_alignment));
    2610     1524330 :   movq(Operand(rsp, argument_slots_on_stack * kSystemPointerSize),
    2611             :        kScratchRegister);
    2612      762165 : }
    2613             : 
    2614      761013 : void TurboAssembler::CallCFunction(ExternalReference function,
    2615             :                                    int num_arguments) {
    2616      761013 :   LoadAddress(rax, function);
    2617      761015 :   CallCFunction(rax, num_arguments);
    2618      761015 : }
    2619             : 
    2620      762155 : void TurboAssembler::CallCFunction(Register function, int num_arguments) {
    2621             :   DCHECK_LE(num_arguments, kMaxCParameters);
    2622             :   DCHECK(has_frame());
    2623             :   // Check stack alignment.
    2624      762155 :   if (emit_debug_code()) {
    2625           0 :     CheckStackAlignment();
    2626             :   }
    2627             : 
    2628             :   // Save the frame pointer and PC so that the stack layout remains iterable,
    2629             :   // even without an ExitFrame which normally exists between JS and C frames.
    2630      762155 :   if (isolate() != nullptr) {
    2631      363387 :     Label get_pc;
    2632             :     DCHECK(!AreAliased(kScratchRegister, function));
    2633      726774 :     leaq(kScratchRegister, Operand(&get_pc, 0));
    2634      363387 :     bind(&get_pc);
    2635      363386 :     movq(ExternalReferenceAsOperand(
    2636             :              ExternalReference::fast_c_call_caller_pc_address(isolate())),
    2637             :          kScratchRegister);
    2638      363387 :     movq(ExternalReferenceAsOperand(
    2639             :              ExternalReference::fast_c_call_caller_fp_address(isolate())),
    2640             :          rbp);
    2641             :   }
    2642             : 
    2643      762155 :   call(function);
    2644             : 
    2645      762155 :   if (isolate() != nullptr) {
    2646             :     // We don't unset the PC; the FP is the source of truth.
    2647      363387 :     movq(ExternalReferenceAsOperand(
    2648             :              ExternalReference::fast_c_call_caller_fp_address(isolate())),
    2649             :          Immediate(0));
    2650             :   }
    2651             : 
    2652             :   DCHECK_NE(base::OS::ActivationFrameAlignment(), 0);
    2653             :   DCHECK_GE(num_arguments, 0);
    2654             :   int argument_slots_on_stack =
    2655             :       ArgumentStackSlotsForCFunctionCall(num_arguments);
    2656     1524310 :   movq(rsp, Operand(rsp, argument_slots_on_stack * kSystemPointerSize));
    2657      762155 : }
    2658             : 
    2659      658029 : void TurboAssembler::CheckPageFlag(Register object, Register scratch, int mask,
    2660             :                                    Condition cc, Label* condition_met,
    2661             :                                    Label::Distance condition_met_distance) {
    2662             :   DCHECK(cc == zero || cc == not_zero);
    2663      658029 :   if (scratch == object) {
    2664         112 :     andq(scratch, Immediate(~kPageAlignmentMask));
    2665             :   } else {
    2666      657917 :     movq(scratch, Immediate(~kPageAlignmentMask));
    2667             :     andq(scratch, object);
    2668             :   }
    2669      658031 :   if (mask < (1 << kBitsPerByte)) {
    2670     1974095 :     testb(Operand(scratch, MemoryChunk::kFlagsOffset),
    2671      658033 :           Immediate(static_cast<uint8_t>(mask)));
    2672             :   } else {
    2673           0 :     testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
    2674             :   }
    2675      658033 :   j(cc, condition_met, condition_met_distance);
    2676      658032 : }
    2677             : 
    2678         114 : void TurboAssembler::ComputeCodeStartAddress(Register dst) {
    2679         114 :   Label current;
    2680         114 :   bind(&current);
    2681             :   int pc = pc_offset();
    2682             :   // Load effective address to get the address of the current instruction.
    2683         228 :   leaq(dst, Operand(&current, -pc));
    2684         114 : }
    2685             : 
    2686         560 : void TurboAssembler::ResetSpeculationPoisonRegister() {
    2687             :   // TODO(tebbi): Perhaps, we want to put an lfence here.
    2688         560 :   Set(kSpeculationPoisonRegister, -1);
    2689         560 : }
    2690             : 
    2691     3354655 : void TurboAssembler::CallForDeoptimization(Address target, int deopt_id) {
    2692             :   NoRootArrayScope no_root_array(this);
    2693             :   // Save the deopt id in r13 (we don't need the roots array from now on).
    2694     3354655 :   movq(r13, Immediate(deopt_id));
    2695     3354657 :   call(target, RelocInfo::RUNTIME_ENTRY);
    2696     3354658 : }
    2697             : 
    2698             : }  // namespace internal
    2699      120216 : }  // namespace v8
    2700             : 
    2701             : #endif  // V8_TARGET_ARCH_X64

Generated by: LCOV version 1.10