LCOV - code coverage report
Current view: top level - src/x64 - macro-assembler-x64.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 1646 2068 79.6 %
Date: 2017-04-26 Functions: 262 320 81.9 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #if V8_TARGET_ARCH_X64
       6             : 
       7             : #include "src/base/bits.h"
       8             : #include "src/base/division-by-constant.h"
       9             : #include "src/base/utils/random-number-generator.h"
      10             : #include "src/bootstrapper.h"
      11             : #include "src/codegen.h"
      12             : #include "src/counters.h"
      13             : #include "src/debug/debug.h"
      14             : #include "src/heap/heap-inl.h"
      15             : #include "src/objects-inl.h"
      16             : #include "src/register-configuration.h"
      17             : #include "src/x64/assembler-x64.h"
      18             : 
      19             : #include "src/x64/macro-assembler-x64.h"  // Cannot be the first include.
      20             : 
      21             : namespace v8 {
      22             : namespace internal {
      23             : 
      24    13459112 : MacroAssembler::MacroAssembler(Isolate* isolate, void* buffer, int size,
      25             :                                CodeObjectRequired create_code_object)
      26             :     : Assembler(isolate, buffer, size),
      27             :       generating_stub_(false),
      28             :       has_frame_(false),
      29             :       isolate_(isolate),
      30             :       root_array_available_(true),
      31    13459112 :       jit_cookie_(0) {
      32    13459118 :   if (FLAG_mask_constants_with_cookie) {
      33    26918235 :     jit_cookie_ = isolate->random_number_generator()->NextInt();
      34             :   }
      35    13459115 :   if (create_code_object == CodeObjectRequired::kYes) {
      36             :     code_object_ =
      37     3456212 :         Handle<Object>::New(isolate_->heap()->undefined_value(), isolate_);
      38             :   }
      39    13459115 : }
      40             : 
      41             : 
      42             : static const int64_t kInvalidRootRegisterDelta = -1;
      43             : 
      44             : 
      45     2503436 : int64_t MacroAssembler::RootRegisterDelta(ExternalReference other) {
      46     6719306 :   if (predictable_code_size() &&
      47          90 :       (other.address() < reinterpret_cast<Address>(isolate()) ||
      48          90 :        other.address() >= reinterpret_cast<Address>(isolate() + 1))) {
      49             :     return kInvalidRootRegisterDelta;
      50             :   }
      51             :   Address roots_register_value = kRootRegisterBias +
      52      791000 :       reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
      53             : 
      54             :   int64_t delta = kInvalidRootRegisterDelta;  // Bogus initialization.
      55             :   if (kPointerSize == kInt64Size) {
      56      791000 :     delta = other.address() - roots_register_value;
      57             :   } else {
      58             :     // For x32, zero extend the address to 64-bit and calculate the delta.
      59             :     uint64_t o = static_cast<uint32_t>(
      60             :         reinterpret_cast<intptr_t>(other.address()));
      61             :     uint64_t r = static_cast<uint32_t>(
      62             :         reinterpret_cast<intptr_t>(roots_register_value));
      63             :     delta = o - r;
      64             :   }
      65           0 :   return delta;
      66             : }
      67             : 
      68             : 
      69      437581 : Operand MacroAssembler::ExternalOperand(ExternalReference target,
      70             :                                         Register scratch) {
      71      437581 :   if (root_array_available_ && !serializer_enabled()) {
      72             :     int64_t delta = RootRegisterDelta(target);
      73       93835 :     if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
      74       46895 :       return Operand(kRootRegister, static_cast<int32_t>(delta));
      75             :     }
      76             :   }
      77             :   Move(scratch, target);
      78      390686 :   return Operand(scratch, 0);
      79             : }
      80             : 
      81             : 
      82       53245 : void MacroAssembler::Load(Register destination, ExternalReference source) {
      83       53245 :   if (root_array_available_ && !serializer_enabled()) {
      84             :     int64_t delta = RootRegisterDelta(source);
      85      102564 :     if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
      86      102564 :       movp(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
      87      104527 :       return;
      88             :     }
      89             :   }
      90             :   // Safe code.
      91        1963 :   if (destination.is(rax)) {
      92         258 :     load_rax(source);
      93             :   } else {
      94             :     Move(kScratchRegister, source);
      95        3410 :     movp(destination, Operand(kScratchRegister, 0));
      96             :   }
      97             : }
      98             : 
      99             : 
     100      203569 : void MacroAssembler::Store(ExternalReference destination, Register source) {
     101      203569 :   if (root_array_available_ && !serializer_enabled()) {
     102             :     int64_t delta = RootRegisterDelta(destination);
     103      272636 :     if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
     104      272636 :       movp(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
     105      339887 :       return;
     106             :     }
     107             :   }
     108             :   // Safe code.
     109       67251 :   if (source.is(rax)) {
     110        1505 :     store_rax(destination);
     111             :   } else {
     112             :     Move(kScratchRegister, destination);
     113      131492 :     movp(Operand(kScratchRegister, 0), source);
     114             :   }
     115             : }
     116             : 
     117             : 
     118     2983637 : void MacroAssembler::LoadAddress(Register destination,
     119             :                                  ExternalReference source) {
     120     2983637 :   if (root_array_available_ && !serializer_enabled()) {
     121             :     int64_t delta = RootRegisterDelta(source);
     122     2825400 :     if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
     123     1053110 :       leap(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
     124     3510193 :       return;
     125             :     }
     126             :   }
     127             :   // Safe code.
     128             :   Move(destination, source);
     129             : }
     130             : 
     131             : 
     132           0 : int MacroAssembler::LoadAddressSize(ExternalReference source) {
     133           0 :   if (root_array_available_ && !serializer_enabled()) {
     134             :     // This calculation depends on the internals of LoadAddress.
     135             :     // It's correctness is ensured by the asserts in the Call
     136             :     // instruction below.
     137             :     int64_t delta = RootRegisterDelta(source);
     138           0 :     if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
     139             :       // Operand is leap(scratch, Operand(kRootRegister, delta));
     140             :       // Opcodes : REX.W 8D ModRM Disp8/Disp32  - 4 or 7.
     141             :       int size = 4;
     142           0 :       if (!is_int8(static_cast<int32_t>(delta))) {
     143             :         size += 3;  // Need full four-byte displacement in lea.
     144             :       }
     145           0 :       return size;
     146             :     }
     147             :   }
     148             :   // Size of movp(destination, src);
     149             :   return Assembler::kMoveAddressIntoScratchRegisterInstructionLength;
     150             : }
     151             : 
     152             : 
     153          43 : void MacroAssembler::PushAddress(ExternalReference source) {
     154          43 :   int64_t address = reinterpret_cast<int64_t>(source.address());
     155          43 :   if (is_int32(address) && !serializer_enabled()) {
     156           0 :     if (emit_debug_code()) {
     157             :       Move(kScratchRegister, kZapValue, Assembler::RelocInfoNone());
     158             :     }
     159           0 :     Push(Immediate(static_cast<int32_t>(address)));
     160          43 :     return;
     161             :   }
     162          43 :   LoadAddress(kScratchRegister, source);
     163             :   Push(kScratchRegister);
     164             : }
     165             : 
     166             : 
     167     3812958 : void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
     168             :   DCHECK(root_array_available_);
     169             :   movp(destination, Operand(kRootRegister,
     170     7625922 :                             (index << kPointerSizeLog2) - kRootRegisterBias));
     171     3812964 : }
     172             : 
     173             : 
     174           0 : void MacroAssembler::LoadRootIndexed(Register destination,
     175             :                                      Register variable_offset,
     176             :                                      int fixed_offset) {
     177             :   DCHECK(root_array_available_);
     178             :   movp(destination,
     179             :        Operand(kRootRegister,
     180             :                variable_offset, times_pointer_size,
     181           0 :                (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
     182           0 : }
     183             : 
     184             : 
     185           0 : void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
     186             :   DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
     187             :   DCHECK(root_array_available_);
     188           0 :   movp(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
     189           0 :        source);
     190           0 : }
     191             : 
     192             : 
     193      908411 : void MacroAssembler::PushRoot(Heap::RootListIndex index) {
     194             :   DCHECK(root_array_available_);
     195     1816827 :   Push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
     196      908416 : }
     197             : 
     198             : 
     199     3698608 : void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
     200             :   DCHECK(root_array_available_);
     201             :   cmpp(with, Operand(kRootRegister,
     202     7397218 :                      (index << kPointerSizeLog2) - kRootRegisterBias));
     203     3698610 : }
     204             : 
     205             : 
     206      137067 : void MacroAssembler::CompareRoot(const Operand& with,
     207             :                                  Heap::RootListIndex index) {
     208             :   DCHECK(root_array_available_);
     209             :   DCHECK(!with.AddressUsesRegister(kScratchRegister));
     210      137067 :   LoadRoot(kScratchRegister, index);
     211      137067 :   cmpp(with, kScratchRegister);
     212      137067 : }
     213             : 
     214             : 
     215      133049 : void MacroAssembler::RememberedSetHelper(Register object,  // For debug tests.
     216             :                                          Register addr,
     217             :                                          Register scratch,
     218             :                                          SaveFPRegsMode save_fp,
     219      266098 :                                          RememberedSetFinalAction and_then) {
     220      133049 :   if (emit_debug_code()) {
     221             :     Label ok;
     222           7 :     JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
     223           7 :     int3();
     224           7 :     bind(&ok);
     225             :   }
     226             :   // Load store buffer top.
     227             :   ExternalReference store_buffer =
     228      133049 :       ExternalReference::store_buffer_top(isolate());
     229      133049 :   movp(scratch, ExternalOperand(store_buffer));
     230             :   // Store pointer to buffer.
     231      266098 :   movp(Operand(scratch, 0), addr);
     232             :   // Increment buffer top.
     233      133049 :   addp(scratch, Immediate(kPointerSize));
     234             :   // Write back new top of buffer.
     235      133049 :   movp(ExternalOperand(store_buffer), scratch);
     236             :   // Call stub on end of buffer.
     237             :   Label done;
     238             :   // Check for end of buffer.
     239             :   testp(scratch, Immediate(StoreBuffer::kStoreBufferMask));
     240      133049 :   if (and_then == kReturnAtEnd) {
     241             :     Label buffer_overflowed;
     242      133049 :     j(equal, &buffer_overflowed, Label::kNear);
     243      133049 :     ret(0);
     244      133049 :     bind(&buffer_overflowed);
     245             :   } else {
     246             :     DCHECK(and_then == kFallThroughAtEnd);
     247           0 :     j(not_equal, &done, Label::kNear);
     248             :   }
     249             :   StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
     250      133049 :   CallStub(&store_buffer_overflow);
     251      133049 :   if (and_then == kReturnAtEnd) {
     252      133049 :     ret(0);
     253             :   } else {
     254             :     DCHECK(and_then == kFallThroughAtEnd);
     255           0 :     bind(&done);
     256             :   }
     257      133049 : }
     258             : 
     259             : 
     260       76028 : void MacroAssembler::InNewSpace(Register object,
     261             :                                 Register scratch,
     262             :                                 Condition cc,
     263             :                                 Label* branch,
     264             :                                 Label::Distance distance) {
     265             :   CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cc, branch,
     266       76072 :                 distance);
     267       76028 : }
     268             : 
     269             : 
     270      716277 : void MacroAssembler::RecordWriteField(
     271             :     Register object,
     272             :     int offset,
     273             :     Register value,
     274             :     Register dst,
     275             :     SaveFPRegsMode save_fp,
     276             :     RememberedSetAction remembered_set_action,
     277             :     SmiCheck smi_check,
     278             :     PointersToHereCheck pointers_to_here_check_for_value) {
     279             :   // First, check if a write barrier is even needed. The tests below
     280             :   // catch stores of Smis.
     281             :   Label done;
     282             : 
     283             :   // Skip barrier if writing a smi.
     284      716277 :   if (smi_check == INLINE_SMI_CHECK) {
     285      523645 :     JumpIfSmi(value, &done);
     286             :   }
     287             : 
     288             :   // Although the object register is tagged, the offset is relative to the start
     289             :   // of the object, so so offset must be a multiple of kPointerSize.
     290             :   DCHECK(IsAligned(offset, kPointerSize));
     291             : 
     292      716277 :   leap(dst, FieldOperand(object, offset));
     293     1432554 :   if (emit_debug_code()) {
     294             :     Label ok;
     295           2 :     testb(dst, Immediate((1 << kPointerSizeLog2) - 1));
     296           2 :     j(zero, &ok, Label::kNear);
     297           2 :     int3();
     298           2 :     bind(&ok);
     299             :   }
     300             : 
     301             :   RecordWrite(object, dst, value, save_fp, remembered_set_action,
     302      716277 :               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
     303             : 
     304      716277 :   bind(&done);
     305             : 
     306             :   // Clobber clobbered input registers when running with the debug-code flag
     307             :   // turned on to provoke errors.
     308      716277 :   if (emit_debug_code()) {
     309             :     Move(value, kZapValue, Assembler::RelocInfoNone());
     310             :     Move(dst, kZapValue, Assembler::RelocInfoNone());
     311             :   }
     312      716277 : }
     313             : 
     314             : 
     315        8912 : void MacroAssembler::RecordWriteForMap(Register object,
     316             :                                        Register map,
     317             :                                        Register dst,
     318       26721 :                                        SaveFPRegsMode fp_mode) {
     319             :   DCHECK(!object.is(kScratchRegister));
     320             :   DCHECK(!object.is(map));
     321             :   DCHECK(!object.is(dst));
     322             :   DCHECK(!map.is(dst));
     323        8912 :   AssertNotSmi(object);
     324             : 
     325       26726 :   if (emit_debug_code()) {
     326             :     Label ok;
     327           0 :     if (map.is(kScratchRegister)) pushq(map);
     328           0 :     CompareMap(map, isolate()->factory()->meta_map());
     329           0 :     if (map.is(kScratchRegister)) popq(map);
     330           0 :     j(equal, &ok, Label::kNear);
     331           0 :     int3();
     332           0 :     bind(&ok);
     333             :   }
     334             : 
     335        8912 :   if (!FLAG_incremental_marking) {
     336           5 :     return;
     337             :   }
     338             : 
     339        8907 :   if (emit_debug_code()) {
     340             :     Label ok;
     341           0 :     if (map.is(kScratchRegister)) pushq(map);
     342           0 :     cmpp(map, FieldOperand(object, HeapObject::kMapOffset));
     343           0 :     if (map.is(kScratchRegister)) popq(map);
     344           0 :     j(equal, &ok, Label::kNear);
     345           0 :     int3();
     346           0 :     bind(&ok);
     347             :   }
     348             : 
     349             :   // Compute the address.
     350        8907 :   leap(dst, FieldOperand(object, HeapObject::kMapOffset));
     351             : 
     352             :   // First, check if a write barrier is even needed. The tests below
     353             :   // catch stores of smis and stores into the young generation.
     354             :   Label done;
     355             : 
     356             :   // A single check of the map's pages interesting flag suffices, since it is
     357             :   // only set during incremental collection, and then it's also guaranteed that
     358             :   // the from object's page's interesting flag is also set.  This optimization
     359             :   // relies on the fact that maps can never be in new space.
     360             :   CheckPageFlag(map,
     361             :                 map,  // Used as scratch.
     362             :                 MemoryChunk::kPointersToHereAreInterestingMask,
     363             :                 zero,
     364             :                 &done,
     365        8907 :                 Label::kNear);
     366             : 
     367             :   RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
     368        8907 :                        fp_mode);
     369        8907 :   CallStub(&stub);
     370             : 
     371        8907 :   bind(&done);
     372             : 
     373             :   // Count number of write barriers in generated code.
     374        8907 :   isolate()->counters()->write_barriers_static()->Increment();
     375        8907 :   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
     376             : 
     377             :   // Clobber clobbered registers when running with the debug-code flag
     378             :   // turned on to provoke errors.
     379        8907 :   if (emit_debug_code()) {
     380             :     Move(dst, kZapValue, Assembler::RelocInfoNone());
     381             :     Move(map, kZapValue, Assembler::RelocInfoNone());
     382             :   }
     383             : }
     384             : 
     385             : 
     386      722807 : void MacroAssembler::RecordWrite(
     387             :     Register object,
     388             :     Register address,
     389             :     Register value,
     390             :     SaveFPRegsMode fp_mode,
     391             :     RememberedSetAction remembered_set_action,
     392             :     SmiCheck smi_check,
     393     2168421 :     PointersToHereCheck pointers_to_here_check_for_value) {
     394             :   DCHECK(!object.is(value));
     395             :   DCHECK(!object.is(address));
     396             :   DCHECK(!value.is(address));
     397      722807 :   AssertNotSmi(object);
     398             : 
     399      722807 :   if (remembered_set_action == OMIT_REMEMBERED_SET &&
     400           0 :       !FLAG_incremental_marking) {
     401           0 :     return;
     402             :   }
     403             : 
     404     1445614 :   if (emit_debug_code()) {
     405             :     Label ok;
     406           4 :     cmpp(value, Operand(address, 0));
     407           2 :     j(equal, &ok, Label::kNear);
     408           2 :     int3();
     409           2 :     bind(&ok);
     410             :   }
     411             : 
     412             :   // First, check if a write barrier is even needed. The tests below
     413             :   // catch stores of smis and stores into the young generation.
     414             :   Label done;
     415             : 
     416      722807 :   if (smi_check == INLINE_SMI_CHECK) {
     417             :     // Skip barrier if writing a smi.
     418        5381 :     JumpIfSmi(value, &done);
     419             :   }
     420             : 
     421      722807 :   if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
     422             :     CheckPageFlag(value,
     423             :                   value,  // Used as scratch.
     424             :                   MemoryChunk::kPointersToHereAreInterestingMask,
     425             :                   zero,
     426             :                   &done,
     427      722149 :                   Label::kNear);
     428             :   }
     429             : 
     430             :   CheckPageFlag(object,
     431             :                 value,  // Used as scratch.
     432             :                 MemoryChunk::kPointersFromHereAreInterestingMask,
     433             :                 zero,
     434             :                 &done,
     435      722807 :                 Label::kNear);
     436             : 
     437             :   RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
     438      722807 :                        fp_mode);
     439      722807 :   CallStub(&stub);
     440             : 
     441      722807 :   bind(&done);
     442             : 
     443             :   // Count number of write barriers in generated code.
     444      722807 :   isolate()->counters()->write_barriers_static()->Increment();
     445      722807 :   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
     446             : 
     447             :   // Clobber clobbered registers when running with the debug-code flag
     448             :   // turned on to provoke errors.
     449      722807 :   if (emit_debug_code()) {
     450             :     Move(address, kZapValue, Assembler::RelocInfoNone());
     451             :     Move(value, kZapValue, Assembler::RelocInfoNone());
     452             :   }
     453             : }
     454             : 
     455         129 : void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
     456             :                                                Register code_entry,
     457         258 :                                                Register scratch) {
     458             :   const int offset = JSFunction::kCodeEntryOffset;
     459             : 
     460             :   // The input registers are fixed to make calling the C write barrier function
     461             :   // easier.
     462             :   DCHECK(js_function.is(rdi));
     463             :   DCHECK(code_entry.is(rcx));
     464             :   DCHECK(scratch.is(r15));
     465             : 
     466             :   // Since a code entry (value) is always in old space, we don't need to update
     467             :   // remembered set. If incremental marking is off, there is nothing for us to
     468             :   // do.
     469         129 :   if (!FLAG_incremental_marking) return;
     470             : 
     471         129 :   AssertNotSmi(js_function);
     472             : 
     473         129 :   if (emit_debug_code()) {
     474             :     Label ok;
     475           0 :     leap(scratch, FieldOperand(js_function, offset));
     476           0 :     cmpp(code_entry, Operand(scratch, 0));
     477           0 :     j(equal, &ok, Label::kNear);
     478           0 :     int3();
     479           0 :     bind(&ok);
     480             :   }
     481             : 
     482             :   // First, check if a write barrier is even needed. The tests below
     483             :   // catch stores of Smis and stores into young gen.
     484             :   Label done;
     485             : 
     486             :   CheckPageFlag(code_entry, scratch,
     487             :                 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
     488         129 :                 Label::kNear);
     489             :   CheckPageFlag(js_function, scratch,
     490             :                 MemoryChunk::kPointersFromHereAreInterestingMask, zero, &done,
     491         129 :                 Label::kNear);
     492             : 
     493             :   // Save input registers.
     494             :   Push(js_function);
     495             :   Push(code_entry);
     496             : 
     497         129 :   const Register dst = scratch;
     498             :   leap(dst, FieldOperand(js_function, offset));
     499             : 
     500             :   // Save caller-saved registers.
     501         129 :   PushCallerSaved(kDontSaveFPRegs, js_function, code_entry);
     502             : 
     503             :   int argument_count = 3;
     504         129 :   PrepareCallCFunction(argument_count);
     505             : 
     506             :   // Load the argument registers.
     507             :   if (arg_reg_1.is(rcx)) {
     508             :     // Windows calling convention.
     509             :     DCHECK(arg_reg_2.is(rdx) && arg_reg_3.is(r8));
     510             : 
     511             :     movp(arg_reg_1, js_function);  // rcx gets rdi.
     512             :     movp(arg_reg_2, dst);          // rdx gets r15.
     513             :   } else {
     514             :     // AMD64 calling convention.
     515             :     DCHECK(arg_reg_1.is(rdi) && arg_reg_2.is(rsi) && arg_reg_3.is(rdx));
     516             : 
     517             :     // rdi is already loaded with js_function.
     518             :     movp(arg_reg_2, dst);  // rsi gets r15.
     519             :   }
     520         129 :   Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
     521             : 
     522             :   {
     523             :     AllowExternalCallThatCantCauseGC scope(this);
     524             :     CallCFunction(
     525             :         ExternalReference::incremental_marking_record_write_code_entry_function(
     526             :             isolate()),
     527         129 :         argument_count);
     528             :   }
     529             : 
     530             :   // Restore caller-saved registers.
     531         129 :   PopCallerSaved(kDontSaveFPRegs, js_function, code_entry);
     532             : 
     533             :   // Restore input registers.
     534             :   Pop(code_entry);
     535             :   Pop(js_function);
     536             : 
     537         129 :   bind(&done);
     538             : }
     539             : 
     540        5683 : void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
     541        8638 :   if (emit_debug_code()) Check(cc, reason);
     542        5683 : }
     543             : 
     544             : 
     545         627 : void MacroAssembler::Check(Condition cc, BailoutReason reason) {
     546             :   Label L;
     547         627 :   j(cc, &L, Label::kNear);
     548         627 :   Abort(reason);
     549             :   // Control will not return here.
     550         627 :   bind(&L);
     551         627 : }
     552             : 
     553             : 
     554          66 : void MacroAssembler::CheckStackAlignment() {
     555          66 :   int frame_alignment = base::OS::ActivationFrameAlignment();
     556          66 :   int frame_alignment_mask = frame_alignment - 1;
     557          66 :   if (frame_alignment > kPointerSize) {
     558             :     DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
     559             :     Label alignment_as_expected;
     560          66 :     testp(rsp, Immediate(frame_alignment_mask));
     561          66 :     j(zero, &alignment_as_expected, Label::kNear);
     562             :     // Abort if stack is not aligned.
     563          66 :     int3();
     564          66 :     bind(&alignment_as_expected);
     565             :   }
     566          66 : }
     567             : 
     568             : 
     569       13566 : void MacroAssembler::Abort(BailoutReason reason) {
     570             : #ifdef DEBUG
     571             :   const char* msg = GetBailoutReason(reason);
     572             :   if (msg != NULL) {
     573             :     RecordComment("Abort message: ");
     574             :     RecordComment(msg);
     575             :   }
     576             : 
     577             :   if (FLAG_trap_on_abort) {
     578             :     int3();
     579             :     return;
     580             :   }
     581             : #endif
     582             : 
     583             :   // Check if Abort() has already been initialized.
     584             :   DCHECK(isolate()->builtins()->Abort()->IsHeapObject());
     585             : 
     586        6783 :   Move(rdx, Smi::FromInt(static_cast<int>(reason)));
     587             : 
     588        6783 :   if (!has_frame_) {
     589             :     // We don't actually want to generate a pile of code for this, so just
     590             :     // claim there is a stack frame, without generating one.
     591         516 :     FrameScope scope(this, StackFrame::NONE);
     592        1032 :     Call(isolate()->builtins()->Abort(), RelocInfo::CODE_TARGET);
     593             :   } else {
     594        6267 :     Call(isolate()->builtins()->Abort(), RelocInfo::CODE_TARGET);
     595             :   }
     596             :   // Control will not return here.
     597        6783 :   int3();
     598        6783 : }
     599             : 
     600             : 
     601     3243752 : void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
     602             :   DCHECK(AllowThisStubCall(stub));  // Calls are not allowed in some stubs
     603     3243752 :   Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
     604     3243757 : }
     605             : 
     606             : 
     607        5375 : void MacroAssembler::TailCallStub(CodeStub* stub) {
     608        5375 :   Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
     609        5375 : }
     610             : 
     611             : 
     612             : bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
     613             :   return has_frame_ || !stub->SometimesSetsUpAFrame();
     614             : }
     615             : 
     616     2010446 : void MacroAssembler::CallRuntime(const Runtime::Function* f,
     617             :                                  int num_arguments,
     618     4020895 :                                  SaveFPRegsMode save_doubles) {
     619             :   // If the expected number of arguments of the runtime function is
     620             :   // constant, we check that the actual number of arguments match the
     621             :   // expectation.
     622     2010446 :   CHECK(f->nargs < 0 || f->nargs == num_arguments);
     623             : 
     624             :   // TODO(1236192): Most runtime routines don't need the number of
     625             :   // arguments passed in because it is constant. At some point we
     626             :   // should remove this need and make the runtime routine entry code
     627             :   // smarter.
     628     2010446 :   Set(rax, num_arguments);
     629     2010447 :   LoadAddress(rbx, ExternalReference(f, isolate()));
     630     2010448 :   CEntryStub ces(isolate(), f->result_size, save_doubles);
     631     2010448 :   CallStub(&ces);
     632     2010447 : }
     633             : 
     634             : 
     635        8102 : void MacroAssembler::CallExternalReference(const ExternalReference& ext,
     636        8102 :                                            int num_arguments) {
     637        8102 :   Set(rax, num_arguments);
     638        8102 :   LoadAddress(rbx, ext);
     639             : 
     640             :   CEntryStub stub(isolate(), 1);
     641        8102 :   CallStub(&stub);
     642        8102 : }
     643             : 
     644             : 
     645       58390 : void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
     646             :   // ----------- S t a t e -------------
     647             :   //  -- rsp[0]                 : return address
     648             :   //  -- rsp[8]                 : argument num_arguments - 1
     649             :   //  ...
     650             :   //  -- rsp[8 * num_arguments] : argument 0 (receiver)
     651             :   //
     652             :   //  For runtime functions with variable arguments:
     653             :   //  -- rax                    : number of  arguments
     654             :   // -----------------------------------
     655             : 
     656       29195 :   const Runtime::Function* function = Runtime::FunctionForId(fid);
     657             :   DCHECK_EQ(1, function->result_size);
     658       29195 :   if (function->nargs >= 0) {
     659       29152 :     Set(rax, function->nargs);
     660             :   }
     661       29195 :   JumpToExternalReference(ExternalReference(fid, isolate()));
     662       29195 : }
     663             : 
     664       39343 : void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
     665       39343 :                                              bool builtin_exit_frame) {
     666             :   // Set the entry point and jump to the C entry runtime stub.
     667       39343 :   LoadAddress(rbx, ext);
     668             :   CEntryStub ces(isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
     669             :                  builtin_exit_frame);
     670       39343 :   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
     671       39343 : }
     672             : 
     673             : #define REG(Name) \
     674             :   { Register::kCode_##Name }
     675             : 
     676             : static const Register saved_regs[] = {
     677             :   REG(rax), REG(rcx), REG(rdx), REG(rbx), REG(rbp), REG(rsi), REG(rdi), REG(r8),
     678             :   REG(r9), REG(r10), REG(r11)
     679             : };
     680             : 
     681             : #undef REG
     682             : 
     683             : static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
     684             : 
     685             : 
     686       83835 : void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
     687             :                                      Register exclusion1,
     688             :                                      Register exclusion2,
     689             :                                      Register exclusion3) {
     690             :   // We don't allow a GC during a store buffer overflow so there is no need to
     691             :   // store the registers in any particular way, but we do have to store and
     692             :   // restore them.
     693     1006020 :   for (int i = 0; i < kNumberOfSavedRegs; i++) {
     694      922185 :     Register reg = saved_regs[i];
     695      922185 :     if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
     696      678159 :       pushq(reg);
     697             :     }
     698             :   }
     699             :   // R12 to r15 are callee save on all platforms.
     700       83835 :   if (fp_mode == kSaveFPRegs) {
     701       63687 :     subp(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
     702     1082679 :     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
     703     1018992 :       XMMRegister reg = XMMRegister::from_code(i);
     704     1018992 :       Movsd(Operand(rsp, i * kDoubleSize), reg);
     705             :     }
     706             :   }
     707       83835 : }
     708             : 
     709             : 
     710       83835 : void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode,
     711             :                                     Register exclusion1,
     712             :                                     Register exclusion2,
     713             :                                     Register exclusion3) {
     714       83835 :   if (fp_mode == kSaveFPRegs) {
     715     1018992 :     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
     716     1018992 :       XMMRegister reg = XMMRegister::from_code(i);
     717     1018992 :       Movsd(reg, Operand(rsp, i * kDoubleSize));
     718             :     }
     719       63687 :     addp(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
     720             :   }
     721      922185 :   for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
     722      922185 :     Register reg = saved_regs[i];
     723      922185 :     if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
     724      678159 :       popq(reg);
     725             :     }
     726             :   }
     727       83835 : }
     728             : 
     729             : 
     730       10303 : void MacroAssembler::Cvtss2sd(XMMRegister dst, XMMRegister src) {
     731       10303 :   if (CpuFeatures::IsSupported(AVX)) {
     732             :     CpuFeatureScope scope(this, AVX);
     733       10295 :     vcvtss2sd(dst, src, src);
     734             :   } else {
     735           8 :     cvtss2sd(dst, src);
     736             :   }
     737       10303 : }
     738             : 
     739             : 
     740        2110 : void MacroAssembler::Cvtss2sd(XMMRegister dst, const Operand& src) {
     741        2110 :   if (CpuFeatures::IsSupported(AVX)) {
     742             :     CpuFeatureScope scope(this, AVX);
     743        2110 :     vcvtss2sd(dst, dst, src);
     744             :   } else {
     745           0 :     cvtss2sd(dst, src);
     746             :   }
     747        2110 : }
     748             : 
     749             : 
     750        6871 : void MacroAssembler::Cvtsd2ss(XMMRegister dst, XMMRegister src) {
     751        6871 :   if (CpuFeatures::IsSupported(AVX)) {
     752             :     CpuFeatureScope scope(this, AVX);
     753        6870 :     vcvtsd2ss(dst, src, src);
     754             :   } else {
     755           1 :     cvtsd2ss(dst, src);
     756             :   }
     757        6871 : }
     758             : 
     759             : 
     760        1978 : void MacroAssembler::Cvtsd2ss(XMMRegister dst, const Operand& src) {
     761        1978 :   if (CpuFeatures::IsSupported(AVX)) {
     762             :     CpuFeatureScope scope(this, AVX);
     763        1978 :     vcvtsd2ss(dst, dst, src);
     764             :   } else {
     765           0 :     cvtsd2ss(dst, src);
     766             :   }
     767        1978 : }
     768             : 
     769             : 
     770      405540 : void MacroAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
     771      405540 :   if (CpuFeatures::IsSupported(AVX)) {
     772             :     CpuFeatureScope scope(this, AVX);
     773      405034 :     vxorpd(dst, dst, dst);
     774             :     vcvtlsi2sd(dst, dst, src);
     775             :   } else {
     776         506 :     xorpd(dst, dst);
     777         506 :     cvtlsi2sd(dst, src);
     778             :   }
     779      405540 : }
     780             : 
     781             : 
     782        8408 : void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) {
     783        8408 :   if (CpuFeatures::IsSupported(AVX)) {
     784             :     CpuFeatureScope scope(this, AVX);
     785        8408 :     vxorpd(dst, dst, dst);
     786             :     vcvtlsi2sd(dst, dst, src);
     787             :   } else {
     788           0 :     xorpd(dst, dst);
     789           0 :     cvtlsi2sd(dst, src);
     790             :   }
     791        8408 : }
     792             : 
     793             : 
     794         309 : void MacroAssembler::Cvtlsi2ss(XMMRegister dst, Register src) {
     795         309 :   if (CpuFeatures::IsSupported(AVX)) {
     796             :     CpuFeatureScope scope(this, AVX);
     797         309 :     vxorps(dst, dst, dst);
     798             :     vcvtlsi2ss(dst, dst, src);
     799             :   } else {
     800           0 :     xorps(dst, dst);
     801           0 :     cvtlsi2ss(dst, src);
     802             :   }
     803         309 : }
     804             : 
     805             : 
     806           0 : void MacroAssembler::Cvtlsi2ss(XMMRegister dst, const Operand& src) {
     807           0 :   if (CpuFeatures::IsSupported(AVX)) {
     808             :     CpuFeatureScope scope(this, AVX);
     809           0 :     vxorps(dst, dst, dst);
     810             :     vcvtlsi2ss(dst, dst, src);
     811             :   } else {
     812           0 :     xorps(dst, dst);
     813           0 :     cvtlsi2ss(dst, src);
     814             :   }
     815           0 : }
     816             : 
     817             : 
     818          76 : void MacroAssembler::Cvtqsi2ss(XMMRegister dst, Register src) {
     819          76 :   if (CpuFeatures::IsSupported(AVX)) {
     820             :     CpuFeatureScope scope(this, AVX);
     821          76 :     vxorps(dst, dst, dst);
     822             :     vcvtqsi2ss(dst, dst, src);
     823             :   } else {
     824           0 :     xorps(dst, dst);
     825           0 :     cvtqsi2ss(dst, src);
     826             :   }
     827          76 : }
     828             : 
     829             : 
     830           0 : void MacroAssembler::Cvtqsi2ss(XMMRegister dst, const Operand& src) {
     831           0 :   if (CpuFeatures::IsSupported(AVX)) {
     832             :     CpuFeatureScope scope(this, AVX);
     833           0 :     vxorps(dst, dst, dst);
     834             :     vcvtqsi2ss(dst, dst, src);
     835             :   } else {
     836           0 :     xorps(dst, dst);
     837           0 :     cvtqsi2ss(dst, src);
     838             :   }
     839           0 : }
     840             : 
     841             : 
     842        4212 : void MacroAssembler::Cvtqsi2sd(XMMRegister dst, Register src) {
     843        4212 :   if (CpuFeatures::IsSupported(AVX)) {
     844             :     CpuFeatureScope scope(this, AVX);
     845        4172 :     vxorpd(dst, dst, dst);
     846             :     vcvtqsi2sd(dst, dst, src);
     847             :   } else {
     848          40 :     xorpd(dst, dst);
     849          40 :     cvtqsi2sd(dst, src);
     850             :   }
     851        4212 : }
     852             : 
     853             : 
     854           0 : void MacroAssembler::Cvtqsi2sd(XMMRegister dst, const Operand& src) {
     855           0 :   if (CpuFeatures::IsSupported(AVX)) {
     856             :     CpuFeatureScope scope(this, AVX);
     857           0 :     vxorpd(dst, dst, dst);
     858             :     vcvtqsi2sd(dst, dst, src);
     859             :   } else {
     860           0 :     xorpd(dst, dst);
     861           0 :     cvtqsi2sd(dst, src);
     862             :   }
     863           0 : }
     864             : 
     865             : 
     866          13 : void MacroAssembler::Cvtqui2ss(XMMRegister dst, Register src, Register tmp) {
     867             :   Label msb_set_src;
     868             :   Label jmp_return;
     869          13 :   testq(src, src);
     870          13 :   j(sign, &msb_set_src, Label::kNear);
     871          13 :   Cvtqsi2ss(dst, src);
     872          13 :   jmp(&jmp_return, Label::kNear);
     873          13 :   bind(&msb_set_src);
     874             :   movq(tmp, src);
     875             :   shrq(src, Immediate(1));
     876             :   // Recover the least significant bit to avoid rounding errors.
     877          13 :   andq(tmp, Immediate(1));
     878          13 :   orq(src, tmp);
     879          13 :   Cvtqsi2ss(dst, src);
     880          13 :   addss(dst, dst);
     881          13 :   bind(&jmp_return);
     882          13 : }
     883             : 
     884             : 
     885          13 : void MacroAssembler::Cvtqui2sd(XMMRegister dst, Register src, Register tmp) {
     886             :   Label msb_set_src;
     887             :   Label jmp_return;
     888          13 :   testq(src, src);
     889          13 :   j(sign, &msb_set_src, Label::kNear);
     890          13 :   Cvtqsi2sd(dst, src);
     891          13 :   jmp(&jmp_return, Label::kNear);
     892          13 :   bind(&msb_set_src);
     893             :   movq(tmp, src);
     894             :   shrq(src, Immediate(1));
     895          13 :   andq(tmp, Immediate(1));
     896          13 :   orq(src, tmp);
     897          13 :   Cvtqsi2sd(dst, src);
     898          13 :   addsd(dst, dst);
     899          13 :   bind(&jmp_return);
     900          13 : }
     901             : 
     902             : 
     903         244 : void MacroAssembler::Cvtsd2si(Register dst, XMMRegister src) {
     904         244 :   if (CpuFeatures::IsSupported(AVX)) {
     905             :     CpuFeatureScope scope(this, AVX);
     906         244 :     vcvtsd2si(dst, src);
     907             :   } else {
     908           0 :     cvtsd2si(dst, src);
     909             :   }
     910         244 : }
     911             : 
     912             : 
     913         132 : void MacroAssembler::Cvttss2si(Register dst, XMMRegister src) {
     914         132 :   if (CpuFeatures::IsSupported(AVX)) {
     915             :     CpuFeatureScope scope(this, AVX);
     916         132 :     vcvttss2si(dst, src);
     917             :   } else {
     918           0 :     cvttss2si(dst, src);
     919             :   }
     920         132 : }
     921             : 
     922             : 
     923           0 : void MacroAssembler::Cvttss2si(Register dst, const Operand& src) {
     924           0 :   if (CpuFeatures::IsSupported(AVX)) {
     925             :     CpuFeatureScope scope(this, AVX);
     926           0 :     vcvttss2si(dst, src);
     927             :   } else {
     928           0 :     cvttss2si(dst, src);
     929             :   }
     930           0 : }
     931             : 
     932             : 
     933      148954 : void MacroAssembler::Cvttsd2si(Register dst, XMMRegister src) {
     934      148954 :   if (CpuFeatures::IsSupported(AVX)) {
     935             :     CpuFeatureScope scope(this, AVX);
     936      148862 :     vcvttsd2si(dst, src);
     937             :   } else {
     938          92 :     cvttsd2si(dst, src);
     939             :   }
     940      148954 : }
     941             : 
     942             : 
     943        8336 : void MacroAssembler::Cvttsd2si(Register dst, const Operand& src) {
     944        8336 :   if (CpuFeatures::IsSupported(AVX)) {
     945             :     CpuFeatureScope scope(this, AVX);
     946        8336 :     vcvttsd2si(dst, src);
     947             :   } else {
     948           0 :     cvttsd2si(dst, src);
     949             :   }
     950        8336 : }
     951             : 
     952             : 
     953          91 : void MacroAssembler::Cvttss2siq(Register dst, XMMRegister src) {
     954          91 :   if (CpuFeatures::IsSupported(AVX)) {
     955             :     CpuFeatureScope scope(this, AVX);
     956          91 :     vcvttss2siq(dst, src);
     957             :   } else {
     958           0 :     cvttss2siq(dst, src);
     959             :   }
     960          91 : }
     961             : 
     962             : 
     963           0 : void MacroAssembler::Cvttss2siq(Register dst, const Operand& src) {
     964           0 :   if (CpuFeatures::IsSupported(AVX)) {
     965             :     CpuFeatureScope scope(this, AVX);
     966           0 :     vcvttss2siq(dst, src);
     967             :   } else {
     968           0 :     cvttss2siq(dst, src);
     969             :   }
     970           0 : }
     971             : 
     972             : 
     973      103326 : void MacroAssembler::Cvttsd2siq(Register dst, XMMRegister src) {
     974      103326 :   if (CpuFeatures::IsSupported(AVX)) {
     975             :     CpuFeatureScope scope(this, AVX);
     976      103201 :     vcvttsd2siq(dst, src);
     977             :   } else {
     978         125 :     cvttsd2siq(dst, src);
     979             :   }
     980      103326 : }
     981             : 
     982             : 
     983           0 : void MacroAssembler::Cvttsd2siq(Register dst, const Operand& src) {
     984           0 :   if (CpuFeatures::IsSupported(AVX)) {
     985             :     CpuFeatureScope scope(this, AVX);
     986           0 :     vcvttsd2siq(dst, src);
     987             :   } else {
     988           0 :     cvttsd2siq(dst, src);
     989             :   }
     990           0 : }
     991             : 
     992             : 
     993      280848 : void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
     994             :   DCHECK(!r.IsDouble());
     995      280848 :   if (r.IsInteger8()) {
     996           7 :     movsxbq(dst, src);
     997      280841 :   } else if (r.IsUInteger8()) {
     998       21794 :     movzxbl(dst, src);
     999      259047 :   } else if (r.IsInteger16()) {
    1000           7 :     movsxwq(dst, src);
    1001      259040 :   } else if (r.IsUInteger16()) {
    1002           7 :     movzxwl(dst, src);
    1003      259033 :   } else if (r.IsInteger32()) {
    1004       89527 :     movl(dst, src);
    1005             :   } else {
    1006      169506 :     movp(dst, src);
    1007             :   }
    1008      280848 : }
    1009             : 
    1010             : 
    1011       93357 : void MacroAssembler::Store(const Operand& dst, Register src, Representation r) {
    1012             :   DCHECK(!r.IsDouble());
    1013       93357 :   if (r.IsInteger8() || r.IsUInteger8()) {
    1014          14 :     movb(dst, src);
    1015       93343 :   } else if (r.IsInteger16() || r.IsUInteger16()) {
    1016          14 :     movw(dst, src);
    1017       93329 :   } else if (r.IsInteger32()) {
    1018        1863 :     movl(dst, src);
    1019             :   } else {
    1020       91466 :     if (r.IsHeapObject()) {
    1021       11269 :       AssertNotSmi(src);
    1022       80197 :     } else if (r.IsSmi()) {
    1023       35892 :       AssertSmi(src);
    1024             :     }
    1025       91466 :     movp(dst, src);
    1026             :   }
    1027       93357 : }
    1028             : 
    1029             : 
    1030     5813783 : void MacroAssembler::Set(Register dst, int64_t x) {
    1031     5813783 :   if (x == 0) {
    1032      623715 :     xorl(dst, dst);
    1033     5190068 :   } else if (is_uint32(x)) {
    1034     4478675 :     movl(dst, Immediate(static_cast<uint32_t>(x)));
    1035      711393 :   } else if (is_int32(x)) {
    1036      204025 :     movq(dst, Immediate(static_cast<int32_t>(x)));
    1037             :   } else {
    1038      507368 :     movq(dst, x);
    1039             :   }
    1040     5813785 : }
    1041             : 
    1042        6131 : void MacroAssembler::Set(const Operand& dst, intptr_t x) {
    1043             :   if (kPointerSize == kInt64Size) {
    1044        6131 :     if (is_int32(x)) {
    1045        6131 :       movp(dst, Immediate(static_cast<int32_t>(x)));
    1046             :     } else {
    1047           0 :       Set(kScratchRegister, x);
    1048           0 :       movp(dst, kScratchRegister);
    1049             :     }
    1050             :   } else {
    1051             :     movp(dst, Immediate(static_cast<int32_t>(x)));
    1052             :   }
    1053        6131 : }
    1054             : 
    1055             : 
    1056             : // ----------------------------------------------------------------------------
    1057             : // Smi tagging, untagging and tag detection.
    1058             : 
    1059           0 : bool MacroAssembler::IsUnsafeInt(const int32_t x) {
    1060             :   static const int kMaxBits = 17;
    1061     1612464 :   return !is_intn(x, kMaxBits);
    1062             : }
    1063             : 
    1064             : 
    1065     1353863 : void MacroAssembler::SafeMove(Register dst, Smi* src) {
    1066             :   DCHECK(!dst.is(kScratchRegister));
    1067     1229549 :   if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
    1068             :     if (SmiValuesAre32Bits()) {
    1069             :       // JIT cookie can be converted to Smi.
    1070       62157 :       Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
    1071             :       Move(kScratchRegister, Smi::FromInt(jit_cookie()));
    1072       62157 :       xorp(dst, kScratchRegister);
    1073             :     } else {
    1074             :       DCHECK(SmiValuesAre31Bits());
    1075             :       int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src));
    1076             :       movp(dst, Immediate(value ^ jit_cookie()));
    1077             :       xorp(dst, Immediate(jit_cookie()));
    1078             :     }
    1079             :   } else {
    1080             :     Move(dst, src);
    1081             :   }
    1082     1167394 : }
    1083             : 
    1084             : 
    1085      507160 : void MacroAssembler::SafePush(Smi* src) {
    1086      465768 :   if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
    1087             :     if (SmiValuesAre32Bits()) {
    1088             :       // JIT cookie can be converted to Smi.
    1089       41392 :       Push(Smi::FromInt(src->value() ^ jit_cookie()));
    1090             :       Move(kScratchRegister, Smi::FromInt(jit_cookie()));
    1091       41392 :       xorp(Operand(rsp, 0), kScratchRegister);
    1092             :     } else {
    1093             :       DCHECK(SmiValuesAre31Bits());
    1094             :       int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src));
    1095             :       Push(Immediate(value ^ jit_cookie()));
    1096             :       xorp(Operand(rsp, 0), Immediate(jit_cookie()));
    1097             :     }
    1098             :   } else {
    1099      424376 :     Push(src);
    1100             :   }
    1101      445072 : }
    1102             : 
    1103             : 
    1104      716417 : Register MacroAssembler::GetSmiConstant(Smi* source) {
    1105             :   STATIC_ASSERT(kSmiTag == 0);
    1106             :   int value = source->value();
    1107      716417 :   if (value == 0) {
    1108        5189 :     xorl(kScratchRegister, kScratchRegister);
    1109        5189 :     return kScratchRegister;
    1110             :   }
    1111      711228 :   LoadSmiConstant(kScratchRegister, source);
    1112      711228 :   return kScratchRegister;
    1113             : }
    1114             : 
    1115             : 
    1116    11519134 : void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
    1117             :   STATIC_ASSERT(kSmiTag == 0);
    1118             :   int value = source->value();
    1119    11519134 :   if (value == 0) {
    1120     1139619 :     xorl(dst, dst);
    1121             :   } else {
    1122             :     Move(dst, source, Assembler::RelocInfoNone());
    1123             :   }
    1124    11519136 : }
    1125             : 
    1126             : 
    1127      204389 : void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
    1128             :   STATIC_ASSERT(kSmiTag == 0);
    1129      204389 :   if (!dst.is(src)) {
    1130      124085 :     movl(dst, src);
    1131             :   }
    1132      204389 :   shlp(dst, Immediate(kSmiShift));
    1133      204389 : }
    1134             : 
    1135             : 
    1136           0 : void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
    1137           0 :   if (emit_debug_code()) {
    1138           0 :     testb(dst, Immediate(0x01));
    1139             :     Label ok;
    1140           0 :     j(zero, &ok, Label::kNear);
    1141           0 :     Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
    1142           0 :     bind(&ok);
    1143             :   }
    1144             : 
    1145             :   if (SmiValuesAre32Bits()) {
    1146             :     DCHECK(kSmiShift % kBitsPerByte == 0);
    1147           0 :     movl(Operand(dst, kSmiShift / kBitsPerByte), src);
    1148             :   } else {
    1149             :     DCHECK(SmiValuesAre31Bits());
    1150             :     Integer32ToSmi(kScratchRegister, src);
    1151             :     movp(dst, kScratchRegister);
    1152             :   }
    1153           0 : }
    1154             : 
    1155             : 
    1156         168 : void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
    1157             :                                                 Register src,
    1158             :                                                 int constant) {
    1159         168 :   if (dst.is(src)) {
    1160          84 :     addl(dst, Immediate(constant));
    1161             :   } else {
    1162         168 :     leal(dst, Operand(src, constant));
    1163             :   }
    1164         168 :   shlp(dst, Immediate(kSmiShift));
    1165         168 : }
    1166             : 
    1167             : 
    1168      271295 : void MacroAssembler::SmiToInteger32(Register dst, Register src) {
    1169             :   STATIC_ASSERT(kSmiTag == 0);
    1170      271295 :   if (!dst.is(src)) {
    1171       92884 :     movp(dst, src);
    1172             :   }
    1173             : 
    1174             :   if (SmiValuesAre32Bits()) {
    1175      271295 :     shrp(dst, Immediate(kSmiShift));
    1176             :   } else {
    1177             :     DCHECK(SmiValuesAre31Bits());
    1178             :     sarl(dst, Immediate(kSmiShift));
    1179             :   }
    1180      271295 : }
    1181             : 
    1182             : 
    1183        8515 : void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
    1184             :   if (SmiValuesAre32Bits()) {
    1185       17030 :     movl(dst, Operand(src, kSmiShift / kBitsPerByte));
    1186             :   } else {
    1187             :     DCHECK(SmiValuesAre31Bits());
    1188             :     movl(dst, src);
    1189             :     sarl(dst, Immediate(kSmiShift));
    1190             :   }
    1191        8515 : }
    1192             : 
    1193             : 
    1194       11618 : void MacroAssembler::SmiToInteger64(Register dst, Register src) {
    1195             :   STATIC_ASSERT(kSmiTag == 0);
    1196       11618 :   if (!dst.is(src)) {
    1197       10154 :     movp(dst, src);
    1198             :   }
    1199       11618 :   sarp(dst, Immediate(kSmiShift));
    1200             :   if (kPointerSize == kInt32Size) {
    1201             :     // Sign extend to 64-bit.
    1202             :     movsxlq(dst, dst);
    1203             :   }
    1204       11618 : }
    1205             : 
    1206             : 
    1207           0 : void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
    1208             :   if (SmiValuesAre32Bits()) {
    1209           0 :     movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
    1210             :   } else {
    1211             :     DCHECK(SmiValuesAre31Bits());
    1212             :     movp(dst, src);
    1213             :     SmiToInteger64(dst, dst);
    1214             :   }
    1215           0 : }
    1216             : 
    1217             : 
    1218       18215 : void MacroAssembler::SmiTest(Register src) {
    1219       18215 :   AssertSmi(src);
    1220       18215 :   testp(src, src);
    1221       18215 : }
    1222             : 
    1223             : 
    1224         231 : void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
    1225         231 :   AssertSmi(smi1);
    1226         231 :   AssertSmi(smi2);
    1227         231 :   cmpp(smi1, smi2);
    1228         231 : }
    1229             : 
    1230             : 
    1231           0 : void MacroAssembler::SmiCompare(Register dst, Smi* src) {
    1232           0 :   AssertSmi(dst);
    1233           0 :   Cmp(dst, src);
    1234           0 : }
    1235             : 
    1236             : 
    1237       82905 : void MacroAssembler::Cmp(Register dst, Smi* src) {
    1238             :   DCHECK(!dst.is(kScratchRegister));
    1239       82905 :   if (src->value() == 0) {
    1240       79543 :     testp(dst, dst);
    1241             :   } else {
    1242        3362 :     Register constant_reg = GetSmiConstant(src);
    1243        3362 :     cmpp(dst, constant_reg);
    1244             :   }
    1245       82905 : }
    1246             : 
    1247             : 
    1248       15335 : void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
    1249       15335 :   AssertSmi(dst);
    1250       15335 :   AssertSmi(src);
    1251       15335 :   cmpp(dst, src);
    1252       15335 : }
    1253             : 
    1254             : 
    1255           0 : void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
    1256           0 :   AssertSmi(dst);
    1257           0 :   AssertSmi(src);
    1258           0 :   cmpp(dst, src);
    1259           0 : }
    1260             : 
    1261             : 
    1262       38670 : void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
    1263       38670 :   AssertSmi(dst);
    1264             :   if (SmiValuesAre32Bits()) {
    1265       77340 :     cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
    1266             :   } else {
    1267             :     DCHECK(SmiValuesAre31Bits());
    1268             :     cmpl(dst, Immediate(src));
    1269             :   }
    1270       38670 : }
    1271             : 
    1272             : 
    1273         215 : void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
    1274             :   // The Operand cannot use the smi register.
    1275         215 :   Register smi_reg = GetSmiConstant(src);
    1276             :   DCHECK(!dst.AddressUsesRegister(smi_reg));
    1277         215 :   cmpp(dst, smi_reg);
    1278         215 : }
    1279             : 
    1280             : 
    1281        1008 : void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
    1282             :                                                            Register src,
    1283             :                                                            int power) {
    1284             :   DCHECK(power >= 0);
    1285             :   DCHECK(power < 64);
    1286        1008 :   if (power == 0) {
    1287         126 :     SmiToInteger64(dst, src);
    1288        1134 :     return;
    1289             :   }
    1290         882 :   if (!dst.is(src)) {
    1291         441 :     movp(dst, src);
    1292             :   }
    1293         882 :   if (power < kSmiShift) {
    1294         882 :     sarp(dst, Immediate(kSmiShift - power));
    1295           0 :   } else if (power > kSmiShift) {
    1296           0 :     shlp(dst, Immediate(power - kSmiShift));
    1297             :   }
    1298             : }
    1299             : 
    1300             : 
    1301      124205 : Condition MacroAssembler::CheckSmi(Register src) {
    1302             :   STATIC_ASSERT(kSmiTag == 0);
    1303     1556415 :   testb(src, Immediate(kSmiTagMask));
    1304      124205 :   return zero;
    1305             : }
    1306             : 
    1307             : 
    1308        6572 : Condition MacroAssembler::CheckSmi(const Operand& src) {
    1309             :   STATIC_ASSERT(kSmiTag == 0);
    1310        6636 :   testb(src, Immediate(kSmiTagMask));
    1311        6572 :   return zero;
    1312             : }
    1313             : 
    1314             : 
    1315          49 : Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
    1316             :   STATIC_ASSERT(kSmiTag == 0);
    1317             :   // Test that both bits of the mask 0x8000000000000001 are zero.
    1318          49 :   movp(kScratchRegister, src);
    1319             :   rolp(kScratchRegister, Immediate(1));
    1320          49 :   testb(kScratchRegister, Immediate(3));
    1321          49 :   return zero;
    1322             : }
    1323             : 
    1324             : 
    1325       28531 : Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
    1326       28531 :   if (first.is(second)) {
    1327          14 :     return CheckSmi(first);
    1328             :   }
    1329             :   STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
    1330             :   if (SmiValuesAre32Bits()) {
    1331       57034 :     leal(kScratchRegister, Operand(first, second, times_1, 0));
    1332       28517 :     testb(kScratchRegister, Immediate(0x03));
    1333             :   } else {
    1334             :     DCHECK(SmiValuesAre31Bits());
    1335             :     movl(kScratchRegister, first);
    1336             :     orl(kScratchRegister, second);
    1337             :     testb(kScratchRegister, Immediate(kSmiTagMask));
    1338             :   }
    1339       28517 :   return zero;
    1340             : }
    1341             : 
    1342             : 
    1343           0 : Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
    1344             :                                                   Register second) {
    1345           0 :   if (first.is(second)) {
    1346           0 :     return CheckNonNegativeSmi(first);
    1347             :   }
    1348           0 :   movp(kScratchRegister, first);
    1349           0 :   orp(kScratchRegister, second);
    1350             :   rolp(kScratchRegister, Immediate(1));
    1351             :   testl(kScratchRegister, Immediate(3));
    1352           0 :   return zero;
    1353             : }
    1354             : 
    1355             : 
    1356       26462 : Condition MacroAssembler::CheckEitherSmi(Register first,
    1357             :                                          Register second,
    1358             :                                          Register scratch) {
    1359       26462 :   if (first.is(second)) {
    1360           0 :     return CheckSmi(first);
    1361             :   }
    1362       26462 :   if (scratch.is(second)) {
    1363           0 :     andl(scratch, first);
    1364             :   } else {
    1365       26462 :     if (!scratch.is(first)) {
    1366       26462 :       movl(scratch, first);
    1367             :     }
    1368       26462 :     andl(scratch, second);
    1369             :   }
    1370       26462 :   testb(scratch, Immediate(kSmiTagMask));
    1371       26462 :   return zero;
    1372             : }
    1373             : 
    1374             : 
    1375          28 : Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
    1376             :   if (SmiValuesAre32Bits()) {
    1377             :     // A 32-bit integer value can always be converted to a smi.
    1378             :     return always;
    1379             :   } else {
    1380             :     DCHECK(SmiValuesAre31Bits());
    1381             :     cmpl(src, Immediate(0xc0000000));
    1382             :     return positive;
    1383             :   }
    1384             : }
    1385             : 
    1386             : 
    1387         216 : Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
    1388             :   if (SmiValuesAre32Bits()) {
    1389             :     // An unsigned 32-bit integer value is valid as long as the high bit
    1390             :     // is not set.
    1391        5370 :     testl(src, src);
    1392             :     return positive;
    1393             :   } else {
    1394             :     DCHECK(SmiValuesAre31Bits());
    1395             :     testl(src, Immediate(0xc0000000));
    1396             :     return zero;
    1397             :   }
    1398             : }
    1399             : 
    1400             : 
    1401           0 : void MacroAssembler::JumpIfValidSmiValue(Register src,
    1402             :                                          Label* on_valid,
    1403             :                                          Label::Distance near_jump) {
    1404             :   Condition is_valid = CheckInteger32ValidSmiValue(src);
    1405           0 :   j(is_valid, on_valid, near_jump);
    1406           0 : }
    1407             : 
    1408             : 
    1409           0 : void MacroAssembler::JumpIfNotValidSmiValue(Register src,
    1410             :                                             Label* on_invalid,
    1411             :                                             Label::Distance near_jump) {
    1412             :   Condition is_valid = CheckInteger32ValidSmiValue(src);
    1413           0 :   j(NegateCondition(is_valid), on_invalid, near_jump);
    1414           0 : }
    1415             : 
    1416             : 
    1417        5154 : void MacroAssembler::JumpIfUIntValidSmiValue(Register src,
    1418             :                                              Label* on_valid,
    1419             :                                              Label::Distance near_jump) {
    1420             :   Condition is_valid = CheckUInteger32ValidSmiValue(src);
    1421        5154 :   j(is_valid, on_valid, near_jump);
    1422        5154 : }
    1423             : 
    1424             : 
    1425           0 : void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
    1426             :                                                 Label* on_invalid,
    1427             :                                                 Label::Distance near_jump) {
    1428             :   Condition is_valid = CheckUInteger32ValidSmiValue(src);
    1429           0 :   j(NegateCondition(is_valid), on_invalid, near_jump);
    1430           0 : }
    1431             : 
    1432             : 
    1433     1302818 : void MacroAssembler::JumpIfSmi(Register src,
    1434             :                                Label* on_smi,
    1435             :                                Label::Distance near_jump) {
    1436             :   Condition smi = CheckSmi(src);
    1437     1302818 :   j(smi, on_smi, near_jump);
    1438     1302819 : }
    1439             : 
    1440             : 
    1441      129211 : void MacroAssembler::JumpIfNotSmi(Register src,
    1442             :                                   Label* on_not_smi,
    1443             :                                   Label::Distance near_jump) {
    1444             :   Condition smi = CheckSmi(src);
    1445      129211 :   j(NegateCondition(smi), on_not_smi, near_jump);
    1446      129211 : }
    1447             : 
    1448          43 : void MacroAssembler::JumpIfNotSmi(Operand src, Label* on_not_smi,
    1449             :                                   Label::Distance near_jump) {
    1450             :   Condition smi = CheckSmi(src);
    1451          43 :   j(NegateCondition(smi), on_not_smi, near_jump);
    1452          43 : }
    1453             : 
    1454           0 : void MacroAssembler::JumpUnlessNonNegativeSmi(
    1455             :     Register src, Label* on_not_smi_or_negative,
    1456             :     Label::Distance near_jump) {
    1457           0 :   Condition non_negative_smi = CheckNonNegativeSmi(src);
    1458           0 :   j(NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
    1459           0 : }
    1460             : 
    1461             : 
    1462           0 : void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
    1463             :                                              Smi* constant,
    1464             :                                              Label* on_equals,
    1465             :                                              Label::Distance near_jump) {
    1466             :   SmiCompare(src, constant);
    1467           0 :   j(equal, on_equals, near_jump);
    1468           0 : }
    1469             : 
    1470             : 
    1471       28489 : void MacroAssembler::JumpIfNotBothSmi(Register src1,
    1472             :                                       Register src2,
    1473             :                                       Label* on_not_both_smi,
    1474             :                                       Label::Distance near_jump) {
    1475       28489 :   Condition both_smi = CheckBothSmi(src1, src2);
    1476       28489 :   j(NegateCondition(both_smi), on_not_both_smi, near_jump);
    1477       28489 : }
    1478             : 
    1479             : 
    1480           0 : void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
    1481             :                                                   Register src2,
    1482             :                                                   Label* on_not_both_smi,
    1483             :                                                   Label::Distance near_jump) {
    1484           0 :   Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
    1485           0 :   j(NegateCondition(both_smi), on_not_both_smi, near_jump);
    1486           0 : }
    1487             : 
    1488             : 
    1489         112 : void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
    1490         112 :   if (constant->value() == 0) {
    1491           0 :     if (!dst.is(src)) {
    1492           0 :       movp(dst, src);
    1493             :     }
    1494         112 :     return;
    1495         112 :   } else if (dst.is(src)) {
    1496             :     DCHECK(!dst.is(kScratchRegister));
    1497          56 :     Register constant_reg = GetSmiConstant(constant);
    1498          56 :     addp(dst, constant_reg);
    1499             :   } else {
    1500          56 :     LoadSmiConstant(dst, constant);
    1501          56 :     addp(dst, src);
    1502             :   }
    1503             : }
    1504             : 
    1505             : 
    1506     2265330 : void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
    1507     2265330 :   if (constant->value() != 0) {
    1508             :     if (SmiValuesAre32Bits()) {
    1509             :       addl(Operand(dst, kSmiShift / kBitsPerByte),
    1510     4530683 :            Immediate(constant->value()));
    1511             :     } else {
    1512             :       DCHECK(SmiValuesAre31Bits());
    1513             :       addp(dst, Immediate(constant));
    1514             :     }
    1515             :   }
    1516     2265337 : }
    1517             : 
    1518             : 
    1519       67216 : void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant,
    1520             :                                     SmiOperationConstraints constraints,
    1521             :                                     Label* bailout_label,
    1522             :                                     Label::Distance near_jump) {
    1523       67216 :   if (constant->value() == 0) {
    1524           0 :     if (!dst.is(src)) {
    1525           0 :       movp(dst, src);
    1526             :     }
    1527       67216 :   } else if (dst.is(src)) {
    1528             :     DCHECK(!dst.is(kScratchRegister));
    1529       67062 :     LoadSmiConstant(kScratchRegister, constant);
    1530       67062 :     addp(dst, kScratchRegister);
    1531       67062 :     if (constraints & SmiOperationConstraint::kBailoutOnNoOverflow) {
    1532       66908 :       j(no_overflow, bailout_label, near_jump);
    1533             :       DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
    1534       66908 :       subp(dst, kScratchRegister);
    1535         154 :     } else if (constraints & SmiOperationConstraint::kBailoutOnOverflow) {
    1536         154 :       if (constraints & SmiOperationConstraint::kPreserveSourceRegister) {
    1537             :         Label done;
    1538         105 :         j(no_overflow, &done, Label::kNear);
    1539         105 :         subp(dst, kScratchRegister);
    1540         105 :         jmp(bailout_label, near_jump);
    1541         105 :         bind(&done);
    1542             :       } else {
    1543             :         // Bailout if overflow without reserving src.
    1544          49 :         j(overflow, bailout_label, near_jump);
    1545             :       }
    1546             :     } else {
    1547           0 :       UNREACHABLE();
    1548             :     }
    1549             :   } else {
    1550             :     DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
    1551             :     DCHECK(constraints & SmiOperationConstraint::kBailoutOnOverflow);
    1552         154 :     LoadSmiConstant(dst, constant);
    1553         154 :     addp(dst, src);
    1554         154 :     j(overflow, bailout_label, near_jump);
    1555             :   }
    1556       67216 : }
    1557             : 
    1558             : 
    1559         126 : void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
    1560         126 :   if (constant->value() == 0) {
    1561           0 :     if (!dst.is(src)) {
    1562           0 :       movp(dst, src);
    1563             :     }
    1564         126 :   } else if (dst.is(src)) {
    1565             :     DCHECK(!dst.is(kScratchRegister));
    1566          63 :     Register constant_reg = GetSmiConstant(constant);
    1567          63 :     subp(dst, constant_reg);
    1568             :   } else {
    1569          63 :     if (constant->value() == Smi::kMinValue) {
    1570           7 :       LoadSmiConstant(dst, constant);
    1571             :       // Adding and subtracting the min-value gives the same result, it only
    1572             :       // differs on the overflow bit, which we don't check here.
    1573           7 :       addp(dst, src);
    1574             :     } else {
    1575             :       // Subtract by adding the negation.
    1576         112 :       LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
    1577          56 :       addp(dst, src);
    1578             :     }
    1579             :   }
    1580         126 : }
    1581             : 
    1582             : 
    1583        4062 : void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant,
    1584             :                                     SmiOperationConstraints constraints,
    1585             :                                     Label* bailout_label,
    1586             :                                     Label::Distance near_jump) {
    1587        4062 :   if (constant->value() == 0) {
    1588           0 :     if (!dst.is(src)) {
    1589           0 :       movp(dst, src);
    1590             :     }
    1591        4062 :   } else if (dst.is(src)) {
    1592             :     DCHECK(!dst.is(kScratchRegister));
    1593        3950 :     LoadSmiConstant(kScratchRegister, constant);
    1594        3950 :     subp(dst, kScratchRegister);
    1595        3950 :     if (constraints & SmiOperationConstraint::kBailoutOnNoOverflow) {
    1596        3740 :       j(no_overflow, bailout_label, near_jump);
    1597             :       DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
    1598        3740 :       addp(dst, kScratchRegister);
    1599         210 :     } else if (constraints & SmiOperationConstraint::kBailoutOnOverflow) {
    1600         210 :       if (constraints & SmiOperationConstraint::kPreserveSourceRegister) {
    1601             :         Label done;
    1602         161 :         j(no_overflow, &done, Label::kNear);
    1603         161 :         addp(dst, kScratchRegister);
    1604         161 :         jmp(bailout_label, near_jump);
    1605         161 :         bind(&done);
    1606             :       } else {
    1607             :         // Bailout if overflow without reserving src.
    1608          49 :         j(overflow, bailout_label, near_jump);
    1609             :       }
    1610             :     } else {
    1611           0 :       UNREACHABLE();
    1612             :     }
    1613             :   } else {
    1614             :     DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
    1615             :     DCHECK(constraints & SmiOperationConstraint::kBailoutOnOverflow);
    1616         112 :     if (constant->value() == Smi::kMinValue) {
    1617             :       DCHECK(!dst.is(kScratchRegister));
    1618          14 :       movp(dst, src);
    1619          14 :       LoadSmiConstant(kScratchRegister, constant);
    1620          14 :       subp(dst, kScratchRegister);
    1621          14 :       j(overflow, bailout_label, near_jump);
    1622             :     } else {
    1623             :       // Subtract by adding the negation.
    1624         196 :       LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
    1625          98 :       addp(dst, src);
    1626          98 :       j(overflow, bailout_label, near_jump);
    1627             :     }
    1628             :   }
    1629        4062 : }
    1630             : 
    1631             : 
    1632         112 : void MacroAssembler::SmiNeg(Register dst,
    1633             :                             Register src,
    1634             :                             Label* on_smi_result,
    1635             :                             Label::Distance near_jump) {
    1636         112 :   if (dst.is(src)) {
    1637             :     DCHECK(!dst.is(kScratchRegister));
    1638          56 :     movp(kScratchRegister, src);
    1639             :     negp(dst);  // Low 32 bits are retained as zero by negation.
    1640             :     // Test if result is zero or Smi::kMinValue.
    1641          56 :     cmpp(dst, kScratchRegister);
    1642          56 :     j(not_equal, on_smi_result, near_jump);
    1643             :     movp(src, kScratchRegister);
    1644             :   } else {
    1645          56 :     movp(dst, src);
    1646             :     negp(dst);
    1647          56 :     cmpp(dst, src);
    1648             :     // If the result is zero or Smi::kMinValue, negation failed to create a smi.
    1649          56 :     j(not_equal, on_smi_result, near_jump);
    1650             :   }
    1651         112 : }
    1652             : 
    1653             : 
    1654             : template<class T>
    1655      149964 : static void SmiAddHelper(MacroAssembler* masm,
    1656             :                          Register dst,
    1657             :                          Register src1,
    1658             :                          T src2,
    1659             :                          Label* on_not_smi_result,
    1660             :                          Label::Distance near_jump) {
    1661      149964 :   if (dst.is(src1)) {
    1662             :     Label done;
    1663         154 :     masm->addp(dst, src2);
    1664         154 :     masm->j(no_overflow, &done, Label::kNear);
    1665             :     // Restore src1.
    1666         154 :     masm->subp(dst, src2);
    1667         154 :     masm->jmp(on_not_smi_result, near_jump);
    1668         154 :     masm->bind(&done);
    1669             :   } else {
    1670      149810 :     masm->movp(dst, src1);
    1671      149810 :     masm->addp(dst, src2);
    1672      149810 :     masm->j(overflow, on_not_smi_result, near_jump);
    1673             :   }
    1674      149964 : }
    1675             : 
    1676             : 
    1677      149964 : void MacroAssembler::SmiAdd(Register dst,
    1678             :                             Register src1,
    1679             :                             Register src2,
    1680             :                             Label* on_not_smi_result,
    1681             :                             Label::Distance near_jump) {
    1682             :   DCHECK_NOT_NULL(on_not_smi_result);
    1683             :   DCHECK(!dst.is(src2));
    1684      149964 :   SmiAddHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
    1685      149964 : }
    1686             : 
    1687             : 
    1688           0 : void MacroAssembler::SmiAdd(Register dst,
    1689             :                             Register src1,
    1690             :                             const Operand& src2,
    1691             :                             Label* on_not_smi_result,
    1692             :                             Label::Distance near_jump) {
    1693             :   DCHECK_NOT_NULL(on_not_smi_result);
    1694             :   DCHECK(!src2.AddressUsesRegister(dst));
    1695           0 :   SmiAddHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
    1696           0 : }
    1697             : 
    1698             : 
    1699           0 : void MacroAssembler::SmiAdd(Register dst,
    1700             :                             Register src1,
    1701             :                             Register src2) {
    1702             :   // No overflow checking. Use only when it's known that
    1703             :   // overflowing is impossible.
    1704           0 :   if (!dst.is(src1)) {
    1705           0 :     if (emit_debug_code()) {
    1706           0 :       movp(kScratchRegister, src1);
    1707           0 :       addp(kScratchRegister, src2);
    1708           0 :       Check(no_overflow, kSmiAdditionOverflow);
    1709             :     }
    1710           0 :     leap(dst, Operand(src1, src2, times_1, 0));
    1711             :   } else {
    1712           0 :     addp(dst, src2);
    1713             :     Assert(no_overflow, kSmiAdditionOverflow);
    1714             :   }
    1715           0 : }
    1716             : 
    1717             : 
    1718             : template<class T>
    1719       10218 : static void SmiSubHelper(MacroAssembler* masm,
    1720             :                          Register dst,
    1721             :                          Register src1,
    1722             :                          T src2,
    1723             :                          Label* on_not_smi_result,
    1724             :                          Label::Distance near_jump) {
    1725       10218 :   if (dst.is(src1)) {
    1726             :     Label done;
    1727         161 :     masm->subp(dst, src2);
    1728         161 :     masm->j(no_overflow, &done, Label::kNear);
    1729             :     // Restore src1.
    1730         161 :     masm->addp(dst, src2);
    1731         161 :     masm->jmp(on_not_smi_result, near_jump);
    1732         161 :     masm->bind(&done);
    1733             :   } else {
    1734       10057 :     masm->movp(dst, src1);
    1735       10057 :     masm->subp(dst, src2);
    1736       10057 :     masm->j(overflow, on_not_smi_result, near_jump);
    1737             :   }
    1738       10218 : }
    1739             : 
    1740             : 
    1741       10218 : void MacroAssembler::SmiSub(Register dst,
    1742             :                             Register src1,
    1743             :                             Register src2,
    1744             :                             Label* on_not_smi_result,
    1745             :                             Label::Distance near_jump) {
    1746             :   DCHECK_NOT_NULL(on_not_smi_result);
    1747             :   DCHECK(!dst.is(src2));
    1748       10218 :   SmiSubHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
    1749       10218 : }
    1750             : 
    1751             : 
    1752           0 : void MacroAssembler::SmiSub(Register dst,
    1753             :                             Register src1,
    1754             :                             const Operand& src2,
    1755             :                             Label* on_not_smi_result,
    1756             :                             Label::Distance near_jump) {
    1757             :   DCHECK_NOT_NULL(on_not_smi_result);
    1758             :   DCHECK(!src2.AddressUsesRegister(dst));
    1759           0 :   SmiSubHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
    1760           0 : }
    1761             : 
    1762             : 
    1763             : template<class T>
    1764        2944 : static void SmiSubNoOverflowHelper(MacroAssembler* masm,
    1765             :                                    Register dst,
    1766             :                                    Register src1,
    1767             :                                    T src2) {
    1768             :   // No overflow checking. Use only when it's known that
    1769             :   // overflowing is impossible (e.g., subtracting two positive smis).
    1770        2944 :   if (!dst.is(src1)) {
    1771           0 :     masm->movp(dst, src1);
    1772             :   }
    1773        2944 :   masm->subp(dst, src2);
    1774             :   masm->Assert(no_overflow, kSmiSubtractionOverflow);
    1775        2944 : }
    1776             : 
    1777             : 
    1778        1472 : void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
    1779             :   DCHECK(!dst.is(src2));
    1780        1472 :   SmiSubNoOverflowHelper<Register>(this, dst, src1, src2);
    1781        1472 : }
    1782             : 
    1783             : 
    1784        1472 : void MacroAssembler::SmiSub(Register dst,
    1785             :                             Register src1,
    1786             :                             const Operand& src2) {
    1787        1472 :   SmiSubNoOverflowHelper<Operand>(this, dst, src1, src2);
    1788        1472 : }
    1789             : 
    1790             : 
    1791       10189 : void MacroAssembler::SmiMul(Register dst,
    1792             :                             Register src1,
    1793             :                             Register src2,
    1794             :                             Label* on_not_smi_result,
    1795             :                             Label::Distance near_jump) {
    1796             :   DCHECK(!dst.is(src2));
    1797             :   DCHECK(!dst.is(kScratchRegister));
    1798             :   DCHECK(!src1.is(kScratchRegister));
    1799             :   DCHECK(!src2.is(kScratchRegister));
    1800             : 
    1801       10189 :   if (dst.is(src1)) {
    1802             :     Label failure, zero_correct_result;
    1803          98 :     movp(kScratchRegister, src1);  // Create backup for later testing.
    1804          98 :     SmiToInteger64(dst, src1);
    1805             :     imulp(dst, src2);
    1806          98 :     j(overflow, &failure, Label::kNear);
    1807             : 
    1808             :     // Check for negative zero result.  If product is zero, and one
    1809             :     // argument is negative, go to slow case.
    1810             :     Label correct_result;
    1811             :     testp(dst, dst);
    1812          98 :     j(not_zero, &correct_result, Label::kNear);
    1813             : 
    1814             :     movp(dst, kScratchRegister);
    1815             :     xorp(dst, src2);
    1816             :     // Result was positive zero.
    1817          98 :     j(positive, &zero_correct_result, Label::kNear);
    1818             : 
    1819          98 :     bind(&failure);  // Reused failure exit, restores src1.
    1820             :     movp(src1, kScratchRegister);
    1821          98 :     jmp(on_not_smi_result, near_jump);
    1822             : 
    1823          98 :     bind(&zero_correct_result);
    1824          98 :     Set(dst, 0);
    1825             : 
    1826          98 :     bind(&correct_result);
    1827             :   } else {
    1828       10091 :     SmiToInteger64(dst, src1);
    1829       10091 :     imulp(dst, src2);
    1830       10091 :     j(overflow, on_not_smi_result, near_jump);
    1831             :     // Check for negative zero result.  If product is zero, and one
    1832             :     // argument is negative, go to slow case.
    1833             :     Label correct_result;
    1834             :     testp(dst, dst);
    1835       10091 :     j(not_zero, &correct_result, Label::kNear);
    1836             :     // One of src1 and src2 is zero, the check whether the other is
    1837             :     // negative.
    1838             :     movp(kScratchRegister, src1);
    1839             :     xorp(kScratchRegister, src2);
    1840       10091 :     j(negative, on_not_smi_result, near_jump);
    1841       10091 :     bind(&correct_result);
    1842             :   }
    1843       10189 : }
    1844             : 
    1845             : 
    1846         280 : void MacroAssembler::SmiDiv(Register dst,
    1847             :                             Register src1,
    1848             :                             Register src2,
    1849             :                             Label* on_not_smi_result,
    1850             :                             Label::Distance near_jump) {
    1851             :   DCHECK(!src1.is(kScratchRegister));
    1852             :   DCHECK(!src2.is(kScratchRegister));
    1853             :   DCHECK(!dst.is(kScratchRegister));
    1854             :   DCHECK(!src2.is(rax));
    1855             :   DCHECK(!src2.is(rdx));
    1856             :   DCHECK(!src1.is(rdx));
    1857             : 
    1858             :   // Check for 0 divisor (result is +/-Infinity).
    1859         280 :   testp(src2, src2);
    1860         280 :   j(zero, on_not_smi_result, near_jump);
    1861             : 
    1862         280 :   if (src1.is(rax)) {
    1863             :     movp(kScratchRegister, src1);
    1864             :   }
    1865         280 :   SmiToInteger32(rax, src1);
    1866             :   // We need to rule out dividing Smi::kMinValue by -1, since that would
    1867             :   // overflow in idiv and raise an exception.
    1868             :   // We combine this with negative zero test (negative zero only happens
    1869             :   // when dividing zero by a negative number).
    1870             : 
    1871             :   // We overshoot a little and go to slow case if we divide min-value
    1872             :   // by any negative value, not just -1.
    1873             :   Label safe_div;
    1874             :   testl(rax, Immediate(~Smi::kMinValue));
    1875         280 :   j(not_zero, &safe_div, Label::kNear);
    1876             :   testp(src2, src2);
    1877         280 :   if (src1.is(rax)) {
    1878           0 :     j(positive, &safe_div, Label::kNear);
    1879             :     movp(src1, kScratchRegister);
    1880           0 :     jmp(on_not_smi_result, near_jump);
    1881             :   } else {
    1882         280 :     j(negative, on_not_smi_result, near_jump);
    1883             :   }
    1884         280 :   bind(&safe_div);
    1885             : 
    1886         280 :   SmiToInteger32(src2, src2);
    1887             :   // Sign extend src1 into edx:eax.
    1888         280 :   cdq();
    1889             :   idivl(src2);
    1890         280 :   Integer32ToSmi(src2, src2);
    1891             :   // Check that the remainder is zero.
    1892             :   testl(rdx, rdx);
    1893         280 :   if (src1.is(rax)) {
    1894             :     Label smi_result;
    1895           0 :     j(zero, &smi_result, Label::kNear);
    1896             :     movp(src1, kScratchRegister);
    1897           0 :     jmp(on_not_smi_result, near_jump);
    1898           0 :     bind(&smi_result);
    1899             :   } else {
    1900         280 :     j(not_zero, on_not_smi_result, near_jump);
    1901             :   }
    1902         280 :   if (!dst.is(src1) && src1.is(rax)) {
    1903             :     movp(src1, kScratchRegister);
    1904             :   }
    1905         280 :   Integer32ToSmi(dst, rax);
    1906         280 : }
    1907             : 
    1908             : 
    1909         280 : void MacroAssembler::SmiMod(Register dst,
    1910             :                             Register src1,
    1911             :                             Register src2,
    1912             :                             Label* on_not_smi_result,
    1913             :                             Label::Distance near_jump) {
    1914             :   DCHECK(!dst.is(kScratchRegister));
    1915             :   DCHECK(!src1.is(kScratchRegister));
    1916             :   DCHECK(!src2.is(kScratchRegister));
    1917             :   DCHECK(!src2.is(rax));
    1918             :   DCHECK(!src2.is(rdx));
    1919             :   DCHECK(!src1.is(rdx));
    1920             :   DCHECK(!src1.is(src2));
    1921             : 
    1922         280 :   testp(src2, src2);
    1923         280 :   j(zero, on_not_smi_result, near_jump);
    1924             : 
    1925         280 :   if (src1.is(rax)) {
    1926             :     movp(kScratchRegister, src1);
    1927             :   }
    1928         280 :   SmiToInteger32(rax, src1);
    1929         280 :   SmiToInteger32(src2, src2);
    1930             : 
    1931             :   // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
    1932             :   Label safe_div;
    1933         280 :   cmpl(rax, Immediate(Smi::kMinValue));
    1934         280 :   j(not_equal, &safe_div, Label::kNear);
    1935         280 :   cmpl(src2, Immediate(-1));
    1936         280 :   j(not_equal, &safe_div, Label::kNear);
    1937             :   // Retag inputs and go slow case.
    1938         280 :   Integer32ToSmi(src2, src2);
    1939         280 :   if (src1.is(rax)) {
    1940             :     movp(src1, kScratchRegister);
    1941             :   }
    1942         280 :   jmp(on_not_smi_result, near_jump);
    1943         280 :   bind(&safe_div);
    1944             : 
    1945             :   // Sign extend eax into edx:eax.
    1946         280 :   cdq();
    1947             :   idivl(src2);
    1948             :   // Restore smi tags on inputs.
    1949         280 :   Integer32ToSmi(src2, src2);
    1950         280 :   if (src1.is(rax)) {
    1951             :     movp(src1, kScratchRegister);
    1952             :   }
    1953             :   // Check for a negative zero result.  If the result is zero, and the
    1954             :   // dividend is negative, go slow to return a floating point negative zero.
    1955             :   Label smi_result;
    1956             :   testl(rdx, rdx);
    1957         280 :   j(not_zero, &smi_result, Label::kNear);
    1958             :   testp(src1, src1);
    1959         280 :   j(negative, on_not_smi_result, near_jump);
    1960         280 :   bind(&smi_result);
    1961         280 :   Integer32ToSmi(dst, rdx);
    1962         280 : }
    1963             : 
    1964             : 
    1965         112 : void MacroAssembler::SmiNot(Register dst, Register src) {
    1966             :   DCHECK(!dst.is(kScratchRegister));
    1967             :   DCHECK(!src.is(kScratchRegister));
    1968             :   if (SmiValuesAre32Bits()) {
    1969             :     // Set tag and padding bits before negating, so that they are zero
    1970             :     // afterwards.
    1971         112 :     movl(kScratchRegister, Immediate(~0));
    1972             :   } else {
    1973             :     DCHECK(SmiValuesAre31Bits());
    1974             :     movl(kScratchRegister, Immediate(1));
    1975             :   }
    1976         112 :   if (dst.is(src)) {
    1977             :     xorp(dst, kScratchRegister);
    1978             :   } else {
    1979         112 :     leap(dst, Operand(src, kScratchRegister, times_1, 0));
    1980             :   }
    1981             :   notp(dst);
    1982         112 : }
    1983             : 
    1984             : 
    1985        3952 : void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
    1986             :   DCHECK(!dst.is(src2));
    1987        3952 :   if (!dst.is(src1)) {
    1988        3875 :     movp(dst, src1);
    1989             :   }
    1990        3952 :   andp(dst, src2);
    1991        3952 : }
    1992             : 
    1993             : 
    1994         154 : void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
    1995         154 :   if (constant->value() == 0) {
    1996          42 :     Set(dst, 0);
    1997         112 :   } else if (dst.is(src)) {
    1998             :     DCHECK(!dst.is(kScratchRegister));
    1999          56 :     Register constant_reg = GetSmiConstant(constant);
    2000          56 :     andp(dst, constant_reg);
    2001             :   } else {
    2002          56 :     LoadSmiConstant(dst, constant);
    2003          56 :     andp(dst, src);
    2004             :   }
    2005         154 : }
    2006             : 
    2007             : 
    2008       37443 : void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
    2009       37443 :   if (!dst.is(src1)) {
    2010             :     DCHECK(!src1.is(src2));
    2011       37352 :     movp(dst, src1);
    2012             :   }
    2013       37443 :   orp(dst, src2);
    2014       37443 : }
    2015             : 
    2016             : 
    2017         182 : void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
    2018         182 :   if (dst.is(src)) {
    2019             :     DCHECK(!dst.is(kScratchRegister));
    2020          91 :     Register constant_reg = GetSmiConstant(constant);
    2021          91 :     orp(dst, constant_reg);
    2022             :   } else {
    2023          91 :     LoadSmiConstant(dst, constant);
    2024          91 :     orp(dst, src);
    2025             :   }
    2026         182 : }
    2027             : 
    2028             : 
    2029         486 : void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
    2030         486 :   if (!dst.is(src1)) {
    2031             :     DCHECK(!src1.is(src2));
    2032         395 :     movp(dst, src1);
    2033             :   }
    2034         486 :   xorp(dst, src2);
    2035         486 : }
    2036             : 
    2037             : 
    2038         182 : void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
    2039         182 :   if (dst.is(src)) {
    2040             :     DCHECK(!dst.is(kScratchRegister));
    2041          91 :     Register constant_reg = GetSmiConstant(constant);
    2042          91 :     xorp(dst, constant_reg);
    2043             :   } else {
    2044          91 :     LoadSmiConstant(dst, constant);
    2045          91 :     xorp(dst, src);
    2046             :   }
    2047         182 : }
    2048             : 
    2049             : 
    2050         245 : void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
    2051             :                                                      Register src,
    2052             :                                                      int shift_value) {
    2053             :   DCHECK(is_uint5(shift_value));
    2054         245 :   if (shift_value > 0) {
    2055         196 :     if (dst.is(src)) {
    2056         196 :       sarp(dst, Immediate(shift_value + kSmiShift));
    2057             :       shlp(dst, Immediate(kSmiShift));
    2058             :     } else {
    2059           0 :       UNIMPLEMENTED();  // Not used.
    2060             :     }
    2061             :   }
    2062         245 : }
    2063             : 
    2064             : 
    2065         490 : void MacroAssembler::SmiShiftLeftConstant(Register dst,
    2066             :                                           Register src,
    2067             :                                           int shift_value,
    2068             :                                           Label* on_not_smi_result,
    2069             :                                           Label::Distance near_jump) {
    2070             :   if (SmiValuesAre32Bits()) {
    2071         490 :     if (!dst.is(src)) {
    2072         245 :       movp(dst, src);
    2073             :     }
    2074         490 :     if (shift_value > 0) {
    2075             :       // Shift amount specified by lower 5 bits, not six as the shl opcode.
    2076         392 :       shlq(dst, Immediate(shift_value & 0x1f));
    2077             :     }
    2078             :   } else {
    2079             :     DCHECK(SmiValuesAre31Bits());
    2080             :     if (dst.is(src)) {
    2081             :       UNIMPLEMENTED();  // Not used.
    2082             :     } else {
    2083             :       SmiToInteger32(dst, src);
    2084             :       shll(dst, Immediate(shift_value));
    2085             :       JumpIfNotValidSmiValue(dst, on_not_smi_result, near_jump);
    2086             :       Integer32ToSmi(dst, dst);
    2087             :     }
    2088             :   }
    2089         490 : }
    2090             : 
    2091             : 
    2092         245 : void MacroAssembler::SmiShiftLogicalRightConstant(
    2093             :     Register dst, Register src, int shift_value,
    2094             :     Label* on_not_smi_result, Label::Distance near_jump) {
    2095             :   // Logic right shift interprets its result as an *unsigned* number.
    2096         245 :   if (dst.is(src)) {
    2097           0 :     UNIMPLEMENTED();  // Not used.
    2098             :   } else {
    2099         245 :     if (shift_value == 0) {
    2100          49 :       testp(src, src);
    2101          49 :       j(negative, on_not_smi_result, near_jump);
    2102             :     }
    2103             :     if (SmiValuesAre32Bits()) {
    2104         245 :       movp(dst, src);
    2105         245 :       shrp(dst, Immediate(shift_value + kSmiShift));
    2106             :       shlp(dst, Immediate(kSmiShift));
    2107             :     } else {
    2108             :       DCHECK(SmiValuesAre31Bits());
    2109             :       SmiToInteger32(dst, src);
    2110             :       shrp(dst, Immediate(shift_value));
    2111             :       JumpIfUIntNotValidSmiValue(dst, on_not_smi_result, near_jump);
    2112             :       Integer32ToSmi(dst, dst);
    2113             :     }
    2114             :   }
    2115         245 : }
    2116             : 
    2117             : 
    2118        5852 : void MacroAssembler::SmiShiftLeft(Register dst,
    2119             :                                   Register src1,
    2120             :                                   Register src2,
    2121             :                                   Label* on_not_smi_result,
    2122             :                                   Label::Distance near_jump) {
    2123             :   if (SmiValuesAre32Bits()) {
    2124             :     DCHECK(!dst.is(rcx));
    2125        5852 :     if (!dst.is(src1)) {
    2126        5607 :       movp(dst, src1);
    2127             :     }
    2128             :     // Untag shift amount.
    2129        5852 :     SmiToInteger32(rcx, src2);
    2130             :     // Shift amount specified by lower 5 bits, not six as the shl opcode.
    2131        5852 :     andp(rcx, Immediate(0x1f));
    2132             :     shlq_cl(dst);
    2133             :   } else {
    2134             :     DCHECK(SmiValuesAre31Bits());
    2135             :     DCHECK(!dst.is(kScratchRegister));
    2136             :     DCHECK(!src1.is(kScratchRegister));
    2137             :     DCHECK(!src2.is(kScratchRegister));
    2138             :     DCHECK(!dst.is(src2));
    2139             :     DCHECK(!dst.is(rcx));
    2140             : 
    2141             :     if (src1.is(rcx) || src2.is(rcx)) {
    2142             :       movq(kScratchRegister, rcx);
    2143             :     }
    2144             :     if (dst.is(src1)) {
    2145             :       UNIMPLEMENTED();  // Not used.
    2146             :     } else {
    2147             :       Label valid_result;
    2148             :       SmiToInteger32(dst, src1);
    2149             :       SmiToInteger32(rcx, src2);
    2150             :       shll_cl(dst);
    2151             :       JumpIfValidSmiValue(dst, &valid_result, Label::kNear);
    2152             :       // As src1 or src2 could not be dst, we do not need to restore them for
    2153             :       // clobbering dst.
    2154             :       if (src1.is(rcx) || src2.is(rcx)) {
    2155             :         if (src1.is(rcx)) {
    2156             :           movq(src1, kScratchRegister);
    2157             :         } else {
    2158             :           movq(src2, kScratchRegister);
    2159             :         }
    2160             :       }
    2161             :       jmp(on_not_smi_result, near_jump);
    2162             :       bind(&valid_result);
    2163             :       Integer32ToSmi(dst, dst);
    2164             :     }
    2165             :   }
    2166        5852 : }
    2167             : 
    2168             : 
    2169        5154 : void MacroAssembler::SmiShiftLogicalRight(Register dst,
    2170             :                                           Register src1,
    2171             :                                           Register src2,
    2172             :                                           Label* on_not_smi_result,
    2173             :                                           Label::Distance near_jump) {
    2174             :   DCHECK(!dst.is(kScratchRegister));
    2175             :   DCHECK(!src1.is(kScratchRegister));
    2176             :   DCHECK(!src2.is(kScratchRegister));
    2177             :   DCHECK(!dst.is(src2));
    2178             :   DCHECK(!dst.is(rcx));
    2179        5154 :   if (src1.is(rcx) || src2.is(rcx)) {
    2180        4923 :     movq(kScratchRegister, rcx);
    2181             :   }
    2182        5154 :   if (dst.is(src1)) {
    2183           0 :     UNIMPLEMENTED();  // Not used.
    2184             :   } else {
    2185             :     Label valid_result;
    2186        5154 :     SmiToInteger32(dst, src1);
    2187        5154 :     SmiToInteger32(rcx, src2);
    2188        5154 :     shrl_cl(dst);
    2189        5154 :     JumpIfUIntValidSmiValue(dst, &valid_result, Label::kNear);
    2190             :     // As src1 or src2 could not be dst, we do not need to restore them for
    2191             :     // clobbering dst.
    2192        5154 :     if (src1.is(rcx) || src2.is(rcx)) {
    2193        4923 :       if (src1.is(rcx)) {
    2194             :         movq(src1, kScratchRegister);
    2195             :       } else {
    2196             :         movq(src2, kScratchRegister);
    2197             :       }
    2198             :      }
    2199        5154 :     jmp(on_not_smi_result, near_jump);
    2200        5154 :     bind(&valid_result);
    2201        5154 :     Integer32ToSmi(dst, dst);
    2202             :   }
    2203        5154 : }
    2204             : 
    2205             : 
    2206       21030 : void MacroAssembler::SmiShiftArithmeticRight(Register dst,
    2207             :                                              Register src1,
    2208             :                                              Register src2) {
    2209             :   DCHECK(!dst.is(kScratchRegister));
    2210             :   DCHECK(!src1.is(kScratchRegister));
    2211             :   DCHECK(!src2.is(kScratchRegister));
    2212             :   DCHECK(!dst.is(rcx));
    2213             : 
    2214       21030 :   SmiToInteger32(rcx, src2);
    2215       21030 :   if (!dst.is(src1)) {
    2216       20785 :     movp(dst, src1);
    2217             :   }
    2218       21030 :   SmiToInteger32(dst, dst);
    2219       21030 :   sarl_cl(dst);
    2220       21030 :   Integer32ToSmi(dst, dst);
    2221       21030 : }
    2222             : 
    2223             : 
    2224       10788 : void MacroAssembler::SelectNonSmi(Register dst,
    2225             :                                   Register src1,
    2226             :                                   Register src2,
    2227             :                                   Label* on_not_smis,
    2228             :                                   Label::Distance near_jump) {
    2229             :   DCHECK(!dst.is(kScratchRegister));
    2230             :   DCHECK(!src1.is(kScratchRegister));
    2231             :   DCHECK(!src2.is(kScratchRegister));
    2232             :   DCHECK(!dst.is(src1));
    2233             :   DCHECK(!dst.is(src2));
    2234             :   // Both operands must not be smis.
    2235             : #ifdef DEBUG
    2236             :   Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
    2237             :   Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
    2238             : #endif
    2239             :   STATIC_ASSERT(kSmiTag == 0);
    2240             :   DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
    2241       10788 :   movl(kScratchRegister, Immediate(kSmiTagMask));
    2242       10788 :   andp(kScratchRegister, src1);
    2243             :   testl(kScratchRegister, src2);
    2244             :   // If non-zero then both are smis.
    2245       10788 :   j(not_zero, on_not_smis, near_jump);
    2246             : 
    2247             :   // Exactly one operand is a smi.
    2248             :   DCHECK_EQ(1, static_cast<int>(kSmiTagMask));
    2249             :   // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
    2250       10788 :   subp(kScratchRegister, Immediate(1));
    2251             :   // If src1 is a smi, then scratch register all 1s, else it is all 0s.
    2252             :   movp(dst, src1);
    2253             :   xorp(dst, src2);
    2254       10788 :   andp(dst, kScratchRegister);
    2255             :   // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
    2256             :   xorp(dst, src1);
    2257             :   // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
    2258       10788 : }
    2259             : 
    2260             : 
    2261        3390 : SmiIndex MacroAssembler::SmiToIndex(Register dst,
    2262             :                                     Register src,
    2263             :                                     int shift) {
    2264             :   if (SmiValuesAre32Bits()) {
    2265             :     DCHECK(is_uint6(shift));
    2266             :     // There is a possible optimization if shift is in the range 60-63, but that
    2267             :     // will (and must) never happen.
    2268        3390 :     if (!dst.is(src)) {
    2269         280 :       movp(dst, src);
    2270             :     }
    2271        3390 :     if (shift < kSmiShift) {
    2272        3390 :       sarp(dst, Immediate(kSmiShift - shift));
    2273             :     } else {
    2274           0 :       shlp(dst, Immediate(shift - kSmiShift));
    2275             :     }
    2276        3390 :     return SmiIndex(dst, times_1);
    2277             :   } else {
    2278             :     DCHECK(SmiValuesAre31Bits());
    2279             :     DCHECK(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1));
    2280             :     if (!dst.is(src)) {
    2281             :       movp(dst, src);
    2282             :     }
    2283             :     // We have to sign extend the index register to 64-bit as the SMI might
    2284             :     // be negative.
    2285             :     movsxlq(dst, dst);
    2286             :     if (shift == times_1) {
    2287             :       sarq(dst, Immediate(kSmiShift));
    2288             :       return SmiIndex(dst, times_1);
    2289             :     }
    2290             :     return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
    2291             :   }
    2292             : }
    2293             : 
    2294             : 
    2295         560 : SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
    2296             :                                             Register src,
    2297             :                                             int shift) {
    2298             :   if (SmiValuesAre32Bits()) {
    2299             :     // Register src holds a positive smi.
    2300             :     DCHECK(is_uint6(shift));
    2301         560 :     if (!dst.is(src)) {
    2302         280 :       movp(dst, src);
    2303             :     }
    2304         560 :     negp(dst);
    2305         560 :     if (shift < kSmiShift) {
    2306         560 :       sarp(dst, Immediate(kSmiShift - shift));
    2307             :     } else {
    2308           0 :       shlp(dst, Immediate(shift - kSmiShift));
    2309             :     }
    2310         560 :     return SmiIndex(dst, times_1);
    2311             :   } else {
    2312             :     DCHECK(SmiValuesAre31Bits());
    2313             :     DCHECK(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1));
    2314             :     if (!dst.is(src)) {
    2315             :       movp(dst, src);
    2316             :     }
    2317             :     negq(dst);
    2318             :     if (shift == times_1) {
    2319             :       sarq(dst, Immediate(kSmiShift));
    2320             :       return SmiIndex(dst, times_1);
    2321             :     }
    2322             :     return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
    2323             :   }
    2324             : }
    2325             : 
    2326             : 
    2327           0 : void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
    2328             :   if (SmiValuesAre32Bits()) {
    2329             :     DCHECK_EQ(0, kSmiShift % kBitsPerByte);
    2330           0 :     addl(dst, Operand(src, kSmiShift / kBitsPerByte));
    2331             :   } else {
    2332             :     DCHECK(SmiValuesAre31Bits());
    2333             :     SmiToInteger32(kScratchRegister, src);
    2334             :     addl(dst, kScratchRegister);
    2335             :   }
    2336           0 : }
    2337             : 
    2338             : 
    2339     2303369 : void MacroAssembler::Push(Smi* source) {
    2340     2303369 :   intptr_t smi = reinterpret_cast<intptr_t>(source);
    2341     2303369 :   if (is_int32(smi)) {
    2342      203048 :     Push(Immediate(static_cast<int32_t>(smi)));
    2343      203048 :     return;
    2344             :   }
    2345     4200642 :   int first_byte_set = base::bits::CountTrailingZeros64(smi) / 8;
    2346     2100321 :   int last_byte_set = (63 - base::bits::CountLeadingZeros64(smi)) / 8;
    2347     2100321 :   if (first_byte_set == last_byte_set && kPointerSize == kInt64Size) {
    2348             :     // This sequence has only 7 bytes, compared to the 12 bytes below.
    2349             :     Push(Immediate(0));
    2350             :     movb(Operand(rsp, first_byte_set),
    2351     2797766 :          Immediate(static_cast<int8_t>(smi >> (8 * first_byte_set))));
    2352     1398879 :     return;
    2353             :   }
    2354      701436 :   Register constant = GetSmiConstant(source);
    2355             :   Push(constant);
    2356             : }
    2357             : 
    2358             : 
    2359           0 : void MacroAssembler::PushRegisterAsTwoSmis(Register src, Register scratch) {
    2360             :   DCHECK(!src.is(scratch));
    2361           0 :   movp(scratch, src);
    2362             :   // High bits.
    2363             :   shrp(src, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
    2364             :   shlp(src, Immediate(kSmiShift));
    2365             :   Push(src);
    2366             :   // Low bits.
    2367             :   shlp(scratch, Immediate(kSmiShift));
    2368             :   Push(scratch);
    2369           0 : }
    2370             : 
    2371             : 
    2372           0 : void MacroAssembler::PopRegisterAsTwoSmis(Register dst, Register scratch) {
    2373             :   DCHECK(!dst.is(scratch));
    2374             :   Pop(scratch);
    2375             :   // Low bits.
    2376             :   shrp(scratch, Immediate(kSmiShift));
    2377             :   Pop(dst);
    2378             :   shrp(dst, Immediate(kSmiShift));
    2379             :   // High bits.
    2380             :   shlp(dst, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
    2381           0 :   orp(dst, scratch);
    2382           0 : }
    2383             : 
    2384             : 
    2385           0 : void MacroAssembler::Test(const Operand& src, Smi* source) {
    2386             :   if (SmiValuesAre32Bits()) {
    2387           0 :     testl(Operand(src, kIntSize), Immediate(source->value()));
    2388             :   } else {
    2389             :     DCHECK(SmiValuesAre31Bits());
    2390             :     testl(src, Immediate(source));
    2391             :   }
    2392           0 : }
    2393             : 
    2394             : 
    2395             : // ----------------------------------------------------------------------------
    2396             : 
    2397             : 
    2398           0 : void MacroAssembler::JumpIfNotString(Register object,
    2399             :                                      Register object_map,
    2400             :                                      Label* not_string,
    2401             :                                      Label::Distance near_jump) {
    2402             :   Condition is_smi = CheckSmi(object);
    2403           0 :   j(is_smi, not_string, near_jump);
    2404           0 :   CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
    2405           0 :   j(above_equal, not_string, near_jump);
    2406           0 : }
    2407             : 
    2408             : 
    2409       16743 : void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(
    2410             :     Register first_object, Register second_object, Register scratch1,
    2411             :     Register scratch2, Label* on_fail, Label::Distance near_jump) {
    2412             :   // Check that both objects are not smis.
    2413       16743 :   Condition either_smi = CheckEitherSmi(first_object, second_object);
    2414       16743 :   j(either_smi, on_fail, near_jump);
    2415             : 
    2416             :   // Load instance type for both strings.
    2417             :   movp(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
    2418             :   movp(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
    2419             :   movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
    2420             :   movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
    2421             : 
    2422             :   // Check that both are flat one-byte strings.
    2423             :   DCHECK(kNotStringTag != 0);
    2424             :   const int kFlatOneByteStringMask =
    2425             :       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
    2426             :   const int kFlatOneByteStringTag =
    2427             :       kStringTag | kOneByteStringTag | kSeqStringTag;
    2428             : 
    2429       16743 :   andl(scratch1, Immediate(kFlatOneByteStringMask));
    2430       16743 :   andl(scratch2, Immediate(kFlatOneByteStringMask));
    2431             :   // Interleave the bits to check both scratch1 and scratch2 in one test.
    2432             :   const int kShift = 8;
    2433             :   DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << kShift));
    2434             :   shlp(scratch2, Immediate(kShift));
    2435       16743 :   orp(scratch1, scratch2);
    2436             :   cmpl(scratch1,
    2437       16743 :        Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << kShift)));
    2438       16743 :   j(not_equal, on_fail, near_jump);
    2439       16743 : }
    2440             : 
    2441           0 : void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
    2442             :     Register first_object_instance_type, Register second_object_instance_type,
    2443             :     Register scratch1, Register scratch2, Label* on_fail,
    2444             :     Label::Distance near_jump) {
    2445             :   // Load instance type for both strings.
    2446           0 :   movp(scratch1, first_object_instance_type);
    2447             :   movp(scratch2, second_object_instance_type);
    2448             : 
    2449             :   // Check that both are flat one-byte strings.
    2450             :   DCHECK(kNotStringTag != 0);
    2451             :   const int kFlatOneByteStringMask =
    2452             :       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
    2453             :   const int kFlatOneByteStringTag =
    2454             :       kStringTag | kOneByteStringTag | kSeqStringTag;
    2455             : 
    2456           0 :   andl(scratch1, Immediate(kFlatOneByteStringMask));
    2457           0 :   andl(scratch2, Immediate(kFlatOneByteStringMask));
    2458             :   // Interleave the bits to check both scratch1 and scratch2 in one test.
    2459             :   DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
    2460           0 :   leap(scratch1, Operand(scratch1, scratch2, times_8, 0));
    2461             :   cmpl(scratch1,
    2462           0 :        Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << 3)));
    2463           0 :   j(not_equal, on_fail, near_jump);
    2464           0 : }
    2465             : 
    2466             : 
    2467             : template<class T>
    2468        5694 : static void JumpIfNotUniqueNameHelper(MacroAssembler* masm,
    2469             :                                       T operand_or_register,
    2470             :                                       Label* not_unique_name,
    2471             :                                       Label::Distance distance) {
    2472             :   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
    2473             :   Label succeed;
    2474        5694 :   masm->testb(operand_or_register,
    2475             :               Immediate(kIsNotStringMask | kIsNotInternalizedMask));
    2476        5694 :   masm->j(zero, &succeed, Label::kNear);
    2477             :   masm->cmpb(operand_or_register, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
    2478        5694 :   masm->j(not_equal, not_unique_name, distance);
    2479             : 
    2480        5694 :   masm->bind(&succeed);
    2481        5694 : }
    2482             : 
    2483             : 
    2484        5610 : void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
    2485             :                                                      Label* not_unique_name,
    2486             :                                                      Label::Distance distance) {
    2487        5610 :   JumpIfNotUniqueNameHelper<Operand>(this, operand, not_unique_name, distance);
    2488        5610 : }
    2489             : 
    2490             : 
    2491          84 : void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
    2492             :                                                      Label* not_unique_name,
    2493             :                                                      Label::Distance distance) {
    2494          84 :   JumpIfNotUniqueNameHelper<Register>(this, reg, not_unique_name, distance);
    2495          84 : }
    2496             : 
    2497             : 
    2498     4859353 : void MacroAssembler::Move(Register dst, Register src) {
    2499     4859353 :   if (!dst.is(src)) {
    2500     1628517 :     movp(dst, src);
    2501             :   }
    2502     4859353 : }
    2503             : 
    2504             : 
    2505    13647012 : void MacroAssembler::Move(Register dst, Handle<Object> source) {
    2506             :   AllowDeferredHandleDereference smi_check;
    2507    13647012 :   if (source->IsSmi()) {
    2508             :     Move(dst, Smi::cast(*source));
    2509             :   } else {
    2510             :     MoveHeapObject(dst, source);
    2511             :   }
    2512    13647013 : }
    2513             : 
    2514             : 
    2515       86422 : void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
    2516             :   AllowDeferredHandleDereference smi_check;
    2517       86422 :   if (source->IsSmi()) {
    2518          93 :     Move(dst, Smi::cast(*source));
    2519             :   } else {
    2520             :     MoveHeapObject(kScratchRegister, source);
    2521             :     movp(dst, kScratchRegister);
    2522             :   }
    2523       86422 : }
    2524             : 
    2525             : 
    2526        9334 : void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
    2527        9334 :   if (src == 0) {
    2528         739 :     Xorpd(dst, dst);
    2529             :   } else {
    2530             :     unsigned pop = base::bits::CountPopulation32(src);
    2531             :     DCHECK_NE(0u, pop);
    2532        8595 :     if (pop == 32) {
    2533           0 :       Pcmpeqd(dst, dst);
    2534             :     } else {
    2535        8595 :       movl(kScratchRegister, Immediate(src));
    2536        8595 :       Movq(dst, kScratchRegister);
    2537             :     }
    2538             :   }
    2539        9334 : }
    2540             : 
    2541             : 
    2542      355502 : void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
    2543      355502 :   if (src == 0) {
    2544      107657 :     Xorpd(dst, dst);
    2545             :   } else {
    2546             :     unsigned nlz = base::bits::CountLeadingZeros64(src);
    2547             :     unsigned ntz = base::bits::CountTrailingZeros64(src);
    2548             :     unsigned pop = base::bits::CountPopulation64(src);
    2549             :     DCHECK_NE(0u, pop);
    2550      247845 :     if (pop == 64) {
    2551           0 :       Pcmpeqd(dst, dst);
    2552      247845 :     } else if (pop + ntz == 64) {
    2553       16566 :       Pcmpeqd(dst, dst);
    2554       16566 :       Psllq(dst, ntz);
    2555      231279 :     } else if (pop + nlz == 64) {
    2556          32 :       Pcmpeqd(dst, dst);
    2557          32 :       Psrlq(dst, nlz);
    2558             :     } else {
    2559      231247 :       uint32_t lower = static_cast<uint32_t>(src);
    2560      231247 :       uint32_t upper = static_cast<uint32_t>(src >> 32);
    2561      231247 :       if (upper == 0) {
    2562           2 :         Move(dst, lower);
    2563             :       } else {
    2564      231245 :         movq(kScratchRegister, src);
    2565      231245 :         Movq(dst, kScratchRegister);
    2566             :       }
    2567             :     }
    2568             :   }
    2569      355502 : }
    2570             : 
    2571             : 
    2572          44 : void MacroAssembler::Movaps(XMMRegister dst, XMMRegister src) {
    2573          44 :   if (CpuFeatures::IsSupported(AVX)) {
    2574             :     CpuFeatureScope scope(this, AVX);
    2575          44 :     vmovaps(dst, src);
    2576             :   } else {
    2577           0 :     movaps(dst, src);
    2578             :   }
    2579          44 : }
    2580             : 
    2581           0 : void MacroAssembler::Movups(XMMRegister dst, XMMRegister src) {
    2582           0 :   if (CpuFeatures::IsSupported(AVX)) {
    2583             :     CpuFeatureScope scope(this, AVX);
    2584           0 :     vmovups(dst, src);
    2585             :   } else {
    2586           0 :     movups(dst, src);
    2587             :   }
    2588           0 : }
    2589             : 
    2590          14 : void MacroAssembler::Movups(XMMRegister dst, const Operand& src) {
    2591          14 :   if (CpuFeatures::IsSupported(AVX)) {
    2592             :     CpuFeatureScope scope(this, AVX);
    2593          14 :     vmovups(dst, src);
    2594             :   } else {
    2595           0 :     movups(dst, src);
    2596             :   }
    2597          14 : }
    2598             : 
    2599          14 : void MacroAssembler::Movups(const Operand& dst, XMMRegister src) {
    2600          14 :   if (CpuFeatures::IsSupported(AVX)) {
    2601             :     CpuFeatureScope scope(this, AVX);
    2602          14 :     vmovups(dst, src);
    2603             :   } else {
    2604           0 :     movups(dst, src);
    2605             :   }
    2606          14 : }
    2607             : 
    2608      117165 : void MacroAssembler::Movapd(XMMRegister dst, XMMRegister src) {
    2609      117165 :   if (CpuFeatures::IsSupported(AVX)) {
    2610             :     CpuFeatureScope scope(this, AVX);
    2611      116980 :     vmovapd(dst, src);
    2612             :   } else {
    2613         185 :     movapd(dst, src);
    2614             :   }
    2615      117165 : }
    2616             : 
    2617          14 : void MacroAssembler::Movupd(XMMRegister dst, const Operand& src) {
    2618          14 :   if (CpuFeatures::IsSupported(AVX)) {
    2619             :     CpuFeatureScope scope(this, AVX);
    2620          14 :     vmovupd(dst, src);
    2621             :   } else {
    2622           0 :     movupd(dst, src);
    2623             :   }
    2624          14 : }
    2625             : 
    2626          14 : void MacroAssembler::Movupd(const Operand& dst, XMMRegister src) {
    2627          14 :   if (CpuFeatures::IsSupported(AVX)) {
    2628             :     CpuFeatureScope scope(this, AVX);
    2629          14 :     vmovupd(dst, src);
    2630             :   } else {
    2631           0 :     movupd(dst, src);
    2632             :   }
    2633          14 : }
    2634             : 
    2635        1132 : void MacroAssembler::Movsd(XMMRegister dst, XMMRegister src) {
    2636        1132 :   if (CpuFeatures::IsSupported(AVX)) {
    2637             :     CpuFeatureScope scope(this, AVX);
    2638        1122 :     vmovsd(dst, dst, src);
    2639             :   } else {
    2640          10 :     movsd(dst, src);
    2641             :   }
    2642        1132 : }
    2643             : 
    2644             : 
    2645     3050877 : void MacroAssembler::Movsd(XMMRegister dst, const Operand& src) {
    2646     3050877 :   if (CpuFeatures::IsSupported(AVX)) {
    2647             :     CpuFeatureScope scope(this, AVX);
    2648     3042962 :     vmovsd(dst, src);
    2649             :   } else {
    2650        7915 :     movsd(dst, src);
    2651             :   }
    2652     3050877 : }
    2653             : 
    2654             : 
    2655     2721665 : void MacroAssembler::Movsd(const Operand& dst, XMMRegister src) {
    2656     2721665 :   if (CpuFeatures::IsSupported(AVX)) {
    2657             :     CpuFeatureScope scope(this, AVX);
    2658     2714252 :     vmovsd(dst, src);
    2659             :   } else {
    2660        7413 :     movsd(dst, src);
    2661             :   }
    2662     2721665 : }
    2663             : 
    2664             : 
    2665          58 : void MacroAssembler::Movss(XMMRegister dst, XMMRegister src) {
    2666          58 :   if (CpuFeatures::IsSupported(AVX)) {
    2667             :     CpuFeatureScope scope(this, AVX);
    2668          58 :     vmovss(dst, dst, src);
    2669             :   } else {
    2670           0 :     movss(dst, src);
    2671             :   }
    2672          58 : }
    2673             : 
    2674             : 
    2675       17969 : void MacroAssembler::Movss(XMMRegister dst, const Operand& src) {
    2676       17969 :   if (CpuFeatures::IsSupported(AVX)) {
    2677             :     CpuFeatureScope scope(this, AVX);
    2678       17969 :     vmovss(dst, src);
    2679             :   } else {
    2680           0 :     movss(dst, src);
    2681             :   }
    2682       17969 : }
    2683             : 
    2684             : 
    2685     1288878 : void MacroAssembler::Movss(const Operand& dst, XMMRegister src) {
    2686     1288878 :   if (CpuFeatures::IsSupported(AVX)) {
    2687             :     CpuFeatureScope scope(this, AVX);
    2688     1288863 :     vmovss(dst, src);
    2689             :   } else {
    2690          15 :     movss(dst, src);
    2691             :   }
    2692     1288878 : }
    2693             : 
    2694             : 
    2695          52 : void MacroAssembler::Movd(XMMRegister dst, Register src) {
    2696          52 :   if (CpuFeatures::IsSupported(AVX)) {
    2697             :     CpuFeatureScope scope(this, AVX);
    2698          52 :     vmovd(dst, src);
    2699             :   } else {
    2700           0 :     movd(dst, src);
    2701             :   }
    2702          52 : }
    2703             : 
    2704             : 
    2705           0 : void MacroAssembler::Movd(XMMRegister dst, const Operand& src) {
    2706           0 :   if (CpuFeatures::IsSupported(AVX)) {
    2707             :     CpuFeatureScope scope(this, AVX);
    2708           0 :     vmovd(dst, src);
    2709             :   } else {
    2710           0 :     movd(dst, src);
    2711             :   }
    2712           0 : }
    2713             : 
    2714             : 
    2715         246 : void MacroAssembler::Movd(Register dst, XMMRegister src) {
    2716         246 :   if (CpuFeatures::IsSupported(AVX)) {
    2717             :     CpuFeatureScope scope(this, AVX);
    2718         246 :     vmovd(dst, src);
    2719             :   } else {
    2720           0 :     movd(dst, src);
    2721             :   }
    2722         246 : }
    2723             : 
    2724             : 
    2725      241402 : void MacroAssembler::Movq(XMMRegister dst, Register src) {
    2726      241402 :   if (CpuFeatures::IsSupported(AVX)) {
    2727             :     CpuFeatureScope scope(this, AVX);
    2728      241128 :     vmovq(dst, src);
    2729             :   } else {
    2730         274 :     movq(dst, src);
    2731             :   }
    2732      241402 : }
    2733             : 
    2734             : 
    2735       19176 : void MacroAssembler::Movq(Register dst, XMMRegister src) {
    2736       19176 :   if (CpuFeatures::IsSupported(AVX)) {
    2737             :     CpuFeatureScope scope(this, AVX);
    2738       19176 :     vmovq(dst, src);
    2739             :   } else {
    2740           0 :     movq(dst, src);
    2741             :   }
    2742       19176 : }
    2743             : 
    2744          58 : void MacroAssembler::Movmskps(Register dst, XMMRegister src) {
    2745          58 :   if (CpuFeatures::IsSupported(AVX)) {
    2746             :     CpuFeatureScope scope(this, AVX);
    2747          58 :     vmovmskps(dst, src);
    2748             :   } else {
    2749           0 :     movmskps(dst, src);
    2750             :   }
    2751          58 : }
    2752             : 
    2753        7150 : void MacroAssembler::Movmskpd(Register dst, XMMRegister src) {
    2754        7150 :   if (CpuFeatures::IsSupported(AVX)) {
    2755             :     CpuFeatureScope scope(this, AVX);
    2756        7148 :     vmovmskpd(dst, src);
    2757             :   } else {
    2758           2 :     movmskpd(dst, src);
    2759             :   }
    2760        7150 : }
    2761             : 
    2762         121 : void MacroAssembler::Xorps(XMMRegister dst, XMMRegister src) {
    2763         121 :   if (CpuFeatures::IsSupported(AVX)) {
    2764             :     CpuFeatureScope scope(this, AVX);
    2765         121 :     vxorps(dst, dst, src);
    2766             :   } else {
    2767           0 :     xorps(dst, src);
    2768             :   }
    2769         121 : }
    2770             : 
    2771          14 : void MacroAssembler::Xorps(XMMRegister dst, const Operand& src) {
    2772          14 :   if (CpuFeatures::IsSupported(AVX)) {
    2773             :     CpuFeatureScope scope(this, AVX);
    2774          14 :     vxorps(dst, dst, src);
    2775             :   } else {
    2776           0 :     xorps(dst, src);
    2777             :   }
    2778          14 : }
    2779             : 
    2780         239 : void MacroAssembler::Roundss(XMMRegister dst, XMMRegister src,
    2781             :                              RoundingMode mode) {
    2782         239 :   if (CpuFeatures::IsSupported(AVX)) {
    2783             :     CpuFeatureScope scope(this, AVX);
    2784         239 :     vroundss(dst, dst, src, mode);
    2785             :   } else {
    2786           0 :     roundss(dst, src, mode);
    2787             :   }
    2788         239 : }
    2789             : 
    2790             : 
    2791       41881 : void MacroAssembler::Roundsd(XMMRegister dst, XMMRegister src,
    2792             :                              RoundingMode mode) {
    2793       41881 :   if (CpuFeatures::IsSupported(AVX)) {
    2794             :     CpuFeatureScope scope(this, AVX);
    2795       41881 :     vroundsd(dst, dst, src, mode);
    2796             :   } else {
    2797           0 :     roundsd(dst, src, mode);
    2798             :   }
    2799       41881 : }
    2800             : 
    2801             : 
    2802         359 : void MacroAssembler::Sqrtsd(XMMRegister dst, XMMRegister src) {
    2803         359 :   if (CpuFeatures::IsSupported(AVX)) {
    2804             :     CpuFeatureScope scope(this, AVX);
    2805         358 :     vsqrtsd(dst, dst, src);
    2806             :   } else {
    2807           1 :     sqrtsd(dst, src);
    2808             :   }
    2809         359 : }
    2810             : 
    2811             : 
    2812           3 : void MacroAssembler::Sqrtsd(XMMRegister dst, const Operand& src) {
    2813           3 :   if (CpuFeatures::IsSupported(AVX)) {
    2814             :     CpuFeatureScope scope(this, AVX);
    2815           3 :     vsqrtsd(dst, dst, src);
    2816             :   } else {
    2817           0 :     sqrtsd(dst, src);
    2818             :   }
    2819           3 : }
    2820             : 
    2821             : 
    2822          78 : void MacroAssembler::Ucomiss(XMMRegister src1, XMMRegister src2) {
    2823          78 :   if (CpuFeatures::IsSupported(AVX)) {
    2824             :     CpuFeatureScope scope(this, AVX);
    2825          78 :     vucomiss(src1, src2);
    2826             :   } else {
    2827           0 :     ucomiss(src1, src2);
    2828             :   }
    2829          78 : }
    2830             : 
    2831             : 
    2832          56 : void MacroAssembler::Ucomiss(XMMRegister src1, const Operand& src2) {
    2833          56 :   if (CpuFeatures::IsSupported(AVX)) {
    2834             :     CpuFeatureScope scope(this, AVX);
    2835          56 :     vucomiss(src1, src2);
    2836             :   } else {
    2837           0 :     ucomiss(src1, src2);
    2838             :   }
    2839          56 : }
    2840             : 
    2841             : 
    2842      108255 : void MacroAssembler::Ucomisd(XMMRegister src1, XMMRegister src2) {
    2843      108255 :   if (CpuFeatures::IsSupported(AVX)) {
    2844             :     CpuFeatureScope scope(this, AVX);
    2845      107607 :     vucomisd(src1, src2);
    2846             :   } else {
    2847         648 :     ucomisd(src1, src2);
    2848             :   }
    2849      108255 : }
    2850             : 
    2851             : 
    2852       69903 : void MacroAssembler::Ucomisd(XMMRegister src1, const Operand& src2) {
    2853       69903 :   if (CpuFeatures::IsSupported(AVX)) {
    2854             :     CpuFeatureScope scope(this, AVX);
    2855       69903 :     vucomisd(src1, src2);
    2856             :   } else {
    2857           0 :     ucomisd(src1, src2);
    2858             :   }
    2859       69903 : }
    2860             : 
    2861             : // ----------------------------------------------------------------------------
    2862             : 
    2863           7 : void MacroAssembler::Absps(XMMRegister dst) {
    2864             :   Andps(dst,
    2865           7 :         ExternalOperand(ExternalReference::address_of_float_abs_constant()));
    2866           7 : }
    2867             : 
    2868           7 : void MacroAssembler::Negps(XMMRegister dst) {
    2869             :   Xorps(dst,
    2870           7 :         ExternalOperand(ExternalReference::address_of_float_neg_constant()));
    2871           7 : }
    2872             : 
    2873           7 : void MacroAssembler::Abspd(XMMRegister dst) {
    2874             :   Andps(dst,
    2875           7 :         ExternalOperand(ExternalReference::address_of_double_abs_constant()));
    2876           7 : }
    2877             : 
    2878           7 : void MacroAssembler::Negpd(XMMRegister dst) {
    2879             :   Xorps(dst,
    2880           7 :         ExternalOperand(ExternalReference::address_of_double_neg_constant()));
    2881           7 : }
    2882             : 
    2883       85682 : void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
    2884             :   AllowDeferredHandleDereference smi_check;
    2885       85682 :   if (source->IsSmi()) {
    2886         484 :     Cmp(dst, Smi::cast(*source));
    2887             :   } else {
    2888             :     MoveHeapObject(kScratchRegister, source);
    2889       85198 :     cmpp(dst, kScratchRegister);
    2890             :   }
    2891       85682 : }
    2892             : 
    2893             : 
    2894      130826 : void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
    2895             :   AllowDeferredHandleDereference smi_check;
    2896      130826 :   if (source->IsSmi()) {
    2897           0 :     Cmp(dst, Smi::cast(*source));
    2898             :   } else {
    2899             :     MoveHeapObject(kScratchRegister, source);
    2900             :     cmpp(dst, kScratchRegister);
    2901             :   }
    2902      130826 : }
    2903             : 
    2904             : 
    2905     2886882 : void MacroAssembler::Push(Handle<Object> source) {
    2906             :   AllowDeferredHandleDereference smi_check;
    2907     2886882 :   if (source->IsSmi()) {
    2908      189525 :     Push(Smi::cast(*source));
    2909             :   } else {
    2910             :     MoveHeapObject(kScratchRegister, source);
    2911             :     Push(kScratchRegister);
    2912             :   }
    2913     2886887 : }
    2914             : 
    2915             : 
    2916           0 : void MacroAssembler::MoveHeapObject(Register result,
    2917             :                                     Handle<Object> object) {
    2918             :   DCHECK(object->IsHeapObject());
    2919             :   Move(result, object, RelocInfo::EMBEDDED_OBJECT);
    2920           0 : }
    2921             : 
    2922             : 
    2923       20500 : void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
    2924             :   Move(value, cell, RelocInfo::EMBEDDED_OBJECT);
    2925             :   movp(value, FieldOperand(value, WeakCell::kValueOffset));
    2926       20500 : }
    2927             : 
    2928             : 
    2929       18341 : void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
    2930             :                                    Label* miss) {
    2931       18341 :   GetWeakValue(value, cell);
    2932       18341 :   JumpIfSmi(value, miss);
    2933       18341 : }
    2934             : 
    2935             : 
    2936     1214399 : void MacroAssembler::Drop(int stack_elements) {
    2937     1214399 :   if (stack_elements > 0) {
    2938     2340736 :     addp(rsp, Immediate(stack_elements * kPointerSize));
    2939             :   }
    2940     1214403 : }
    2941             : 
    2942             : 
    2943         129 : void MacroAssembler::DropUnderReturnAddress(int stack_elements,
    2944             :                                             Register scratch) {
    2945             :   DCHECK(stack_elements > 0);
    2946         129 :   if (kPointerSize == kInt64Size && stack_elements == 1) {
    2947         129 :     popq(MemOperand(rsp, 0));
    2948         258 :     return;
    2949             :   }
    2950             : 
    2951             :   PopReturnAddressTo(scratch);
    2952           0 :   Drop(stack_elements);
    2953             :   PushReturnAddressFrom(scratch);
    2954             : }
    2955             : 
    2956             : 
    2957     6608177 : void MacroAssembler::Push(Register src) {
    2958             :   if (kPointerSize == kInt64Size) {
    2959    12001183 :     pushq(src);
    2960             :   } else {
    2961             :     // x32 uses 64-bit push for rbp in the prologue.
    2962             :     DCHECK(src.code() != rbp.code());
    2963             :     leal(rsp, Operand(rsp, -4));
    2964             :     movp(Operand(rsp, 0), src);
    2965             :   }
    2966     6608183 : }
    2967             : 
    2968             : 
    2969     3781720 : void MacroAssembler::Push(const Operand& src) {
    2970             :   if (kPointerSize == kInt64Size) {
    2971     4690259 :     pushq(src);
    2972             :   } else {
    2973             :     movp(kScratchRegister, src);
    2974             :     leal(rsp, Operand(rsp, -4));
    2975             :     movp(Operand(rsp, 0), kScratchRegister);
    2976             :   }
    2977     3781721 : }
    2978             : 
    2979             : 
    2980     1533150 : void MacroAssembler::PushQuad(const Operand& src) {
    2981             :   if (kPointerSize == kInt64Size) {
    2982     1533150 :     pushq(src);
    2983             :   } else {
    2984             :     movp(kScratchRegister, src);
    2985             :     pushq(kScratchRegister);
    2986             :   }
    2987     1533150 : }
    2988             : 
    2989             : 
    2990      189039 : void MacroAssembler::Push(Immediate value) {
    2991             :   if (kPointerSize == kInt64Size) {
    2992     2142752 :     pushq(value);
    2993             :   } else {
    2994             :     leal(rsp, Operand(rsp, -4));
    2995             :     movp(Operand(rsp, 0), value);
    2996             :   }
    2997      189039 : }
    2998             : 
    2999             : 
    3000           0 : void MacroAssembler::PushImm32(int32_t imm32) {
    3001             :   if (kPointerSize == kInt64Size) {
    3002           0 :     pushq_imm32(imm32);
    3003             :   } else {
    3004             :     leal(rsp, Operand(rsp, -4));
    3005             :     movp(Operand(rsp, 0), Immediate(imm32));
    3006             :   }
    3007           0 : }
    3008             : 
    3009             : 
    3010     4555697 : void MacroAssembler::Pop(Register dst) {
    3011             :   if (kPointerSize == kInt64Size) {
    3012     4643272 :     popq(dst);
    3013             :   } else {
    3014             :     // x32 uses 64-bit pop for rbp in the epilogue.
    3015             :     DCHECK(dst.code() != rbp.code());
    3016             :     movp(dst, Operand(rsp, 0));
    3017             :     leal(rsp, Operand(rsp, 4));
    3018             :   }
    3019     4555698 : }
    3020             : 
    3021             : 
    3022       85437 : void MacroAssembler::Pop(const Operand& dst) {
    3023             :   if (kPointerSize == kInt64Size) {
    3024       85523 :     popq(dst);
    3025             :   } else {
    3026             :     Register scratch = dst.AddressUsesRegister(kScratchRegister)
    3027             :         ? kRootRegister : kScratchRegister;
    3028             :     movp(scratch, Operand(rsp, 0));
    3029             :     movp(dst, scratch);
    3030             :     leal(rsp, Operand(rsp, 4));
    3031             :     if (scratch.is(kRootRegister)) {
    3032             :       // Restore kRootRegister.
    3033             :       InitializeRootRegister();
    3034             :     }
    3035             :   }
    3036       85437 : }
    3037             : 
    3038             : 
    3039     1362800 : void MacroAssembler::PopQuad(const Operand& dst) {
    3040             :   if (kPointerSize == kInt64Size) {
    3041     1362800 :     popq(dst);
    3042             :   } else {
    3043             :     popq(kScratchRegister);
    3044             :     movp(dst, kScratchRegister);
    3045             :   }
    3046     1362800 : }
    3047             : 
    3048             : 
    3049        5625 : void MacroAssembler::LoadSharedFunctionInfoSpecialField(Register dst,
    3050             :                                                         Register base,
    3051             :                                                         int offset) {
    3052             :   DCHECK(offset > SharedFunctionInfo::kLengthOffset &&
    3053             :          offset <= SharedFunctionInfo::kSize &&
    3054             :          (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1));
    3055             :   if (kPointerSize == kInt64Size) {
    3056        5625 :     movsxlq(dst, FieldOperand(base, offset));
    3057             :   } else {
    3058             :     movp(dst, FieldOperand(base, offset));
    3059             :     SmiToInteger32(dst, dst);
    3060             :   }
    3061        5625 : }
    3062             : 
    3063             : 
    3064           0 : void MacroAssembler::Jump(ExternalReference ext) {
    3065           0 :   LoadAddress(kScratchRegister, ext);
    3066           0 :   jmp(kScratchRegister);
    3067           0 : }
    3068             : 
    3069             : 
    3070           9 : void MacroAssembler::Jump(const Operand& op) {
    3071             :   if (kPointerSize == kInt64Size) {
    3072           9 :     jmp(op);
    3073             :   } else {
    3074             :     movp(kScratchRegister, op);
    3075             :     jmp(kScratchRegister);
    3076             :   }
    3077           9 : }
    3078             : 
    3079             : 
    3080           0 : void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
    3081             :   Move(kScratchRegister, destination, rmode);
    3082           0 :   jmp(kScratchRegister);
    3083           0 : }
    3084             : 
    3085             : 
    3086        5784 : void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
    3087             :   // TODO(X64): Inline this
    3088       11431 :   jmp(code_object, rmode);
    3089        5784 : }
    3090             : 
    3091             : 
    3092           0 : int MacroAssembler::CallSize(ExternalReference ext) {
    3093             :   // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
    3094           0 :   return LoadAddressSize(ext) +
    3095           0 :          Assembler::kCallScratchRegisterInstructionLength;
    3096             : }
    3097             : 
    3098             : 
    3099           0 : void MacroAssembler::Call(ExternalReference ext) {
    3100             : #ifdef DEBUG
    3101             :   int end_position = pc_offset() + CallSize(ext);
    3102             : #endif
    3103           0 :   LoadAddress(kScratchRegister, ext);
    3104           0 :   call(kScratchRegister);
    3105             : #ifdef DEBUG
    3106             :   CHECK_EQ(end_position, pc_offset());
    3107             : #endif
    3108           0 : }
    3109             : 
    3110             : 
    3111      258289 : void MacroAssembler::Call(const Operand& op) {
    3112      258289 :   if (kPointerSize == kInt64Size && !CpuFeatures::IsSupported(ATOM)) {
    3113      258289 :     call(op);
    3114             :   } else {
    3115           0 :     movp(kScratchRegister, op);
    3116           0 :     call(kScratchRegister);
    3117             :   }
    3118      258289 : }
    3119             : 
    3120             : 
    3121     2915366 : void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
    3122             : #ifdef DEBUG
    3123             :   int end_position = pc_offset() + CallSize(destination);
    3124             : #endif
    3125             :   Move(kScratchRegister, destination, rmode);
    3126     2915366 :   call(kScratchRegister);
    3127             : #ifdef DEBUG
    3128             :   CHECK_EQ(pc_offset(), end_position);
    3129             : #endif
    3130     2915366 : }
    3131             : 
    3132             : 
    3133    10614603 : void MacroAssembler::Call(Handle<Code> code_object,
    3134             :                           RelocInfo::Mode rmode,
    3135             :                           TypeFeedbackId ast_id) {
    3136             : #ifdef DEBUG
    3137             :   int end_position = pc_offset() + CallSize(code_object);
    3138             : #endif
    3139             :   DCHECK(RelocInfo::IsCodeTarget(rmode) ||
    3140             :       rmode == RelocInfo::CODE_AGE_SEQUENCE);
    3141    13883267 :   call(code_object, rmode, ast_id);
    3142             : #ifdef DEBUG
    3143             :   CHECK_EQ(end_position, pc_offset());
    3144             : #endif
    3145    10614611 : }
    3146             : 
    3147             : 
    3148       12468 : void MacroAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
    3149       12468 :   if (imm8 == 0) {
    3150         126 :     Movd(dst, src);
    3151         126 :     return;
    3152             :   }
    3153       12342 :   if (CpuFeatures::IsSupported(SSE4_1)) {
    3154             :     CpuFeatureScope sse_scope(this, SSE4_1);
    3155       12281 :     pextrd(dst, src, imm8);
    3156             :     return;
    3157             :   }
    3158             :   DCHECK_EQ(1, imm8);
    3159          61 :   movq(dst, src);
    3160             :   shrq(dst, Immediate(32));
    3161             : }
    3162             : 
    3163             : 
    3164          54 : void MacroAssembler::Pinsrd(XMMRegister dst, Register src, int8_t imm8) {
    3165          54 :   if (CpuFeatures::IsSupported(SSE4_1)) {
    3166             :     CpuFeatureScope sse_scope(this, SSE4_1);
    3167          54 :     pinsrd(dst, src, imm8);
    3168          54 :     return;
    3169             :   }
    3170           0 :   Movd(kScratchDoubleReg, src);
    3171           0 :   if (imm8 == 1) {
    3172           0 :     punpckldq(dst, kScratchDoubleReg);
    3173             :   } else {
    3174             :     DCHECK_EQ(0, imm8);
    3175           0 :     Movss(dst, kScratchDoubleReg);
    3176             :   }
    3177             : }
    3178             : 
    3179             : 
    3180           0 : void MacroAssembler::Pinsrd(XMMRegister dst, const Operand& src, int8_t imm8) {
    3181             :   DCHECK(imm8 == 0 || imm8 == 1);
    3182           0 :   if (CpuFeatures::IsSupported(SSE4_1)) {
    3183             :     CpuFeatureScope sse_scope(this, SSE4_1);
    3184           0 :     pinsrd(dst, src, imm8);
    3185           0 :     return;
    3186             :   }
    3187           0 :   Movd(kScratchDoubleReg, src);
    3188           0 :   if (imm8 == 1) {
    3189           0 :     punpckldq(dst, kScratchDoubleReg);
    3190             :   } else {
    3191             :     DCHECK_EQ(0, imm8);
    3192           0 :     Movss(dst, kScratchDoubleReg);
    3193             :   }
    3194             : }
    3195             : 
    3196             : 
    3197         844 : void MacroAssembler::Lzcntl(Register dst, Register src) {
    3198         844 :   if (CpuFeatures::IsSupported(LZCNT)) {
    3199             :     CpuFeatureScope scope(this, LZCNT);
    3200         843 :     lzcntl(dst, src);
    3201         844 :     return;
    3202             :   }
    3203             :   Label not_zero_src;
    3204           1 :   bsrl(dst, src);
    3205           1 :   j(not_zero, &not_zero_src, Label::kNear);
    3206           1 :   Set(dst, 63);  // 63^31 == 32
    3207           1 :   bind(&not_zero_src);
    3208           1 :   xorl(dst, Immediate(31));  // for x in [0..31], 31^x == 31 - x
    3209             : }
    3210             : 
    3211             : 
    3212          45 : void MacroAssembler::Lzcntl(Register dst, const Operand& src) {
    3213          45 :   if (CpuFeatures::IsSupported(LZCNT)) {
    3214             :     CpuFeatureScope scope(this, LZCNT);
    3215          45 :     lzcntl(dst, src);
    3216          45 :     return;
    3217             :   }
    3218             :   Label not_zero_src;
    3219           0 :   bsrl(dst, src);
    3220           0 :   j(not_zero, &not_zero_src, Label::kNear);
    3221           0 :   Set(dst, 63);  // 63^31 == 32
    3222           0 :   bind(&not_zero_src);
    3223           0 :   xorl(dst, Immediate(31));  // for x in [0..31], 31^x == 31 - x
    3224             : }
    3225             : 
    3226             : 
    3227          28 : void MacroAssembler::Lzcntq(Register dst, Register src) {
    3228          28 :   if (CpuFeatures::IsSupported(LZCNT)) {
    3229             :     CpuFeatureScope scope(this, LZCNT);
    3230          28 :     lzcntq(dst, src);
    3231          28 :     return;
    3232             :   }
    3233             :   Label not_zero_src;
    3234           0 :   bsrq(dst, src);
    3235           0 :   j(not_zero, &not_zero_src, Label::kNear);
    3236           0 :   Set(dst, 127);  // 127^63 == 64
    3237           0 :   bind(&not_zero_src);
    3238           0 :   xorl(dst, Immediate(63));  // for x in [0..63], 63^x == 63 - x
    3239             : }
    3240             : 
    3241             : 
    3242           0 : void MacroAssembler::Lzcntq(Register dst, const Operand& src) {
    3243           0 :   if (CpuFeatures::IsSupported(LZCNT)) {
    3244             :     CpuFeatureScope scope(this, LZCNT);
    3245           0 :     lzcntq(dst, src);
    3246           0 :     return;
    3247             :   }
    3248             :   Label not_zero_src;
    3249           0 :   bsrq(dst, src);
    3250           0 :   j(not_zero, &not_zero_src, Label::kNear);
    3251           0 :   Set(dst, 127);  // 127^63 == 64
    3252           0 :   bind(&not_zero_src);
    3253           0 :   xorl(dst, Immediate(63));  // for x in [0..63], 63^x == 63 - x
    3254             : }
    3255             : 
    3256             : 
    3257          13 : void MacroAssembler::Tzcntq(Register dst, Register src) {
    3258          13 :   if (CpuFeatures::IsSupported(BMI1)) {
    3259             :     CpuFeatureScope scope(this, BMI1);
    3260          13 :     tzcntq(dst, src);
    3261          13 :     return;
    3262             :   }
    3263             :   Label not_zero_src;
    3264           0 :   bsfq(dst, src);
    3265           0 :   j(not_zero, &not_zero_src, Label::kNear);
    3266             :   // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
    3267           0 :   Set(dst, 64);
    3268           0 :   bind(&not_zero_src);
    3269             : }
    3270             : 
    3271             : 
    3272           0 : void MacroAssembler::Tzcntq(Register dst, const Operand& src) {
    3273           0 :   if (CpuFeatures::IsSupported(BMI1)) {
    3274             :     CpuFeatureScope scope(this, BMI1);
    3275           0 :     tzcntq(dst, src);
    3276           0 :     return;
    3277             :   }
    3278             :   Label not_zero_src;
    3279           0 :   bsfq(dst, src);
    3280           0 :   j(not_zero, &not_zero_src, Label::kNear);
    3281             :   // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
    3282           0 :   Set(dst, 64);
    3283           0 :   bind(&not_zero_src);
    3284             : }
    3285             : 
    3286             : 
    3287         468 : void MacroAssembler::Tzcntl(Register dst, Register src) {
    3288         468 :   if (CpuFeatures::IsSupported(BMI1)) {
    3289             :     CpuFeatureScope scope(this, BMI1);
    3290         468 :     tzcntl(dst, src);
    3291         468 :     return;
    3292             :   }
    3293             :   Label not_zero_src;
    3294           0 :   bsfl(dst, src);
    3295           0 :   j(not_zero, &not_zero_src, Label::kNear);
    3296           0 :   Set(dst, 32);  // The result of tzcnt is 32 if src = 0.
    3297           0 :   bind(&not_zero_src);
    3298             : }
    3299             : 
    3300             : 
    3301           0 : void MacroAssembler::Tzcntl(Register dst, const Operand& src) {
    3302           0 :   if (CpuFeatures::IsSupported(BMI1)) {
    3303             :     CpuFeatureScope scope(this, BMI1);
    3304           0 :     tzcntl(dst, src);
    3305           0 :     return;
    3306             :   }
    3307             :   Label not_zero_src;
    3308           0 :   bsfl(dst, src);
    3309           0 :   j(not_zero, &not_zero_src, Label::kNear);
    3310           0 :   Set(dst, 32);  // The result of tzcnt is 32 if src = 0.
    3311           0 :   bind(&not_zero_src);
    3312             : }
    3313             : 
    3314             : 
    3315          98 : void MacroAssembler::Popcntl(Register dst, Register src) {
    3316          98 :   if (CpuFeatures::IsSupported(POPCNT)) {
    3317             :     CpuFeatureScope scope(this, POPCNT);
    3318          98 :     popcntl(dst, src);
    3319          98 :     return;
    3320             :   }
    3321           0 :   UNREACHABLE();
    3322             : }
    3323             : 
    3324             : 
    3325           0 : void MacroAssembler::Popcntl(Register dst, const Operand& src) {
    3326           0 :   if (CpuFeatures::IsSupported(POPCNT)) {
    3327             :     CpuFeatureScope scope(this, POPCNT);
    3328           0 :     popcntl(dst, src);
    3329           0 :     return;
    3330             :   }
    3331           0 :   UNREACHABLE();
    3332             : }
    3333             : 
    3334             : 
    3335          35 : void MacroAssembler::Popcntq(Register dst, Register src) {
    3336          35 :   if (CpuFeatures::IsSupported(POPCNT)) {
    3337             :     CpuFeatureScope scope(this, POPCNT);
    3338          35 :     popcntq(dst, src);
    3339          35 :     return;
    3340             :   }
    3341           0 :   UNREACHABLE();
    3342             : }
    3343             : 
    3344             : 
    3345           0 : void MacroAssembler::Popcntq(Register dst, const Operand& src) {
    3346           0 :   if (CpuFeatures::IsSupported(POPCNT)) {
    3347             :     CpuFeatureScope scope(this, POPCNT);
    3348           0 :     popcntq(dst, src);
    3349           0 :     return;
    3350             :   }
    3351           0 :   UNREACHABLE();
    3352             : }
    3353             : 
    3354             : 
    3355       85425 : void MacroAssembler::Pushad() {
    3356             :   Push(rax);
    3357             :   Push(rcx);
    3358             :   Push(rdx);
    3359             :   Push(rbx);
    3360             :   // Not pushing rsp or rbp.
    3361             :   Push(rsi);
    3362             :   Push(rdi);
    3363             :   Push(r8);
    3364             :   Push(r9);
    3365             :   // r10 is kScratchRegister.
    3366             :   Push(r11);
    3367             :   Push(r12);
    3368             :   // r13 is kRootRegister.
    3369             :   Push(r14);
    3370             :   Push(r15);
    3371             :   STATIC_ASSERT(12 == kNumSafepointSavedRegisters);
    3372             :   // Use lea for symmetry with Popad.
    3373             :   int sp_delta =
    3374             :       (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
    3375      170850 :   leap(rsp, Operand(rsp, -sp_delta));
    3376       85425 : }
    3377             : 
    3378             : 
    3379       85425 : void MacroAssembler::Popad() {
    3380             :   // Popad must not change the flags, so use lea instead of addq.
    3381             :   int sp_delta =
    3382             :       (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
    3383      170850 :   leap(rsp, Operand(rsp, sp_delta));
    3384             :   Pop(r15);
    3385             :   Pop(r14);
    3386             :   Pop(r12);
    3387             :   Pop(r11);
    3388             :   Pop(r9);
    3389             :   Pop(r8);
    3390             :   Pop(rdi);
    3391             :   Pop(rsi);
    3392             :   Pop(rbx);
    3393             :   Pop(rdx);
    3394             :   Pop(rcx);
    3395             :   Pop(rax);
    3396       85425 : }
    3397             : 
    3398             : 
    3399             : // Order general registers are pushed by Pushad:
    3400             : // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
    3401             : const int
    3402             : MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
    3403             :     0,
    3404             :     1,
    3405             :     2,
    3406             :     3,
    3407             :     -1,
    3408             :     -1,
    3409             :     4,
    3410             :     5,
    3411             :     6,
    3412             :     7,
    3413             :     -1,
    3414             :     8,
    3415             :     9,
    3416             :     -1,
    3417             :     10,
    3418             :     11
    3419             : };
    3420             : 
    3421             : 
    3422           0 : void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
    3423             :                                                   const Immediate& imm) {
    3424           0 :   movp(SafepointRegisterSlot(dst), imm);
    3425           0 : }
    3426             : 
    3427             : 
    3428       25355 : void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
    3429       25355 :   movp(SafepointRegisterSlot(dst), src);
    3430       25355 : }
    3431             : 
    3432             : 
    3433          79 : void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
    3434          79 :   movp(dst, SafepointRegisterSlot(src));
    3435          79 : }
    3436             : 
    3437             : 
    3438       25434 : Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
    3439       25434 :   return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
    3440             : }
    3441             : 
    3442             : 
    3443          86 : void MacroAssembler::PushStackHandler() {
    3444             :   // Adjust this code if not the case.
    3445             :   STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
    3446             :   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    3447             : 
    3448             :   // Link the current handler as the next handler.
    3449          86 :   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
    3450         172 :   Push(ExternalOperand(handler_address));
    3451             : 
    3452             :   // Set this new handler as the current one.
    3453          86 :   movp(ExternalOperand(handler_address), rsp);
    3454          86 : }
    3455             : 
    3456             : 
    3457          86 : void MacroAssembler::PopStackHandler() {
    3458             :   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
    3459          86 :   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
    3460         172 :   Pop(ExternalOperand(handler_address));
    3461          86 :   addp(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
    3462          86 : }
    3463             : 
    3464             : 
    3465       31697 : void MacroAssembler::Ret() {
    3466       31697 :   ret(0);
    3467       31697 : }
    3468             : 
    3469             : 
    3470     2333374 : void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
    3471     2333374 :   if (is_uint16(bytes_dropped)) {
    3472     2333362 :     ret(bytes_dropped);
    3473             :   } else {
    3474             :     PopReturnAddressTo(scratch);
    3475          12 :     addp(rsp, Immediate(bytes_dropped));
    3476             :     PushReturnAddressFrom(scratch);
    3477          12 :     ret(0);
    3478             :   }
    3479     2333384 : }
    3480             : 
    3481             : 
    3482           0 : void MacroAssembler::FCmp() {
    3483           0 :   fucomip();
    3484           0 :   fstp(0);
    3485           0 : }
    3486             : 
    3487             : 
    3488      161624 : void MacroAssembler::CmpObjectType(Register heap_object,
    3489             :                                    InstanceType type,
    3490             :                                    Register map) {
    3491      161624 :   movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    3492      161624 :   CmpInstanceType(map, type);
    3493      161624 : }
    3494             : 
    3495             : 
    3496      491323 : void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
    3497             :   cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
    3498     1473969 :        Immediate(static_cast<int8_t>(type)));
    3499      491323 : }
    3500             : 
    3501       82429 : void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
    3502       82429 :   Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
    3503       82429 : }
    3504             : 
    3505             : 
    3506          64 : void MacroAssembler::CheckMap(Register obj,
    3507             :                               Handle<Map> map,
    3508             :                               Label* fail,
    3509             :                               SmiCheckType smi_check_type) {
    3510          64 :   if (smi_check_type == DO_SMI_CHECK) {
    3511           0 :     JumpIfSmi(obj, fail);
    3512             :   }
    3513             : 
    3514          64 :   CompareMap(obj, map);
    3515          64 :   j(not_equal, fail);
    3516          64 : }
    3517             : 
    3518             : 
    3519         105 : void MacroAssembler::ClampUint8(Register reg) {
    3520             :   Label done;
    3521         105 :   testl(reg, Immediate(0xFFFFFF00));
    3522         105 :   j(zero, &done, Label::kNear);
    3523         105 :   setcc(negative, reg);  // 1 if negative, 0 if positive.
    3524         105 :   decb(reg);  // 0 if negative, 255 if positive.
    3525         105 :   bind(&done);
    3526         105 : }
    3527             : 
    3528             : 
    3529         244 : void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
    3530             :                                         XMMRegister temp_xmm_reg,
    3531             :                                         Register result_reg) {
    3532             :   Label done;
    3533             :   Label conv_failure;
    3534         244 :   Xorpd(temp_xmm_reg, temp_xmm_reg);
    3535         244 :   Cvtsd2si(result_reg, input_reg);
    3536         244 :   testl(result_reg, Immediate(0xFFFFFF00));
    3537         244 :   j(zero, &done, Label::kNear);
    3538         244 :   cmpl(result_reg, Immediate(1));
    3539         244 :   j(overflow, &conv_failure, Label::kNear);
    3540             :   movl(result_reg, Immediate(0));
    3541         244 :   setcc(sign, result_reg);
    3542         244 :   subl(result_reg, Immediate(1));
    3543         244 :   andl(result_reg, Immediate(255));
    3544         244 :   jmp(&done, Label::kNear);
    3545         244 :   bind(&conv_failure);
    3546         244 :   Set(result_reg, 0);
    3547         244 :   Ucomisd(input_reg, temp_xmm_reg);
    3548         244 :   j(below, &done, Label::kNear);
    3549         244 :   Set(result_reg, 255);
    3550         244 :   bind(&done);
    3551         244 : }
    3552             : 
    3553             : 
    3554        1426 : void MacroAssembler::LoadUint32(XMMRegister dst,
    3555             :                                 Register src) {
    3556        1426 :   if (FLAG_debug_code) {
    3557           0 :     cmpq(src, Immediate(0xffffffff));
    3558             :     Assert(below_equal, kInputGPRIsExpectedToHaveUpper32Cleared);
    3559             :   }
    3560        1426 :   Cvtqsi2sd(dst, src);
    3561        1426 : }
    3562             : 
    3563             : 
    3564       93526 : void MacroAssembler::SlowTruncateToI(Register result_reg,
    3565             :                                      Register input_reg,
    3566       93526 :                                      int offset) {
    3567             :   DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
    3568       93526 :   call(stub.GetCode(), RelocInfo::CODE_TARGET);
    3569       93526 : }
    3570             : 
    3571             : 
    3572       16381 : void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
    3573             :                                            Register input_reg) {
    3574             :   Label done;
    3575       16381 :   Movsd(kScratchDoubleReg, FieldOperand(input_reg, HeapNumber::kValueOffset));
    3576       16381 :   Cvttsd2siq(result_reg, kScratchDoubleReg);
    3577       16381 :   cmpq(result_reg, Immediate(1));
    3578       16381 :   j(no_overflow, &done, Label::kNear);
    3579             : 
    3580             :   // Slow case.
    3581       16381 :   if (input_reg.is(result_reg)) {
    3582       16381 :     subp(rsp, Immediate(kDoubleSize));
    3583       16381 :     Movsd(MemOperand(rsp, 0), kScratchDoubleReg);
    3584       16381 :     SlowTruncateToI(result_reg, rsp, 0);
    3585       16381 :     addp(rsp, Immediate(kDoubleSize));
    3586             :   } else {
    3587           0 :     SlowTruncateToI(result_reg, input_reg);
    3588             :   }
    3589             : 
    3590       16381 :   bind(&done);
    3591             :   // Keep our invariant that the upper 32 bits are zero.
    3592             :   movl(result_reg, result_reg);
    3593       16381 : }
    3594             : 
    3595             : 
    3596        6885 : void MacroAssembler::TruncateDoubleToI(Register result_reg,
    3597             :                                        XMMRegister input_reg) {
    3598             :   Label done;
    3599        6885 :   Cvttsd2siq(result_reg, input_reg);
    3600        6885 :   cmpq(result_reg, Immediate(1));
    3601        6885 :   j(no_overflow, &done, Label::kNear);
    3602             : 
    3603        6885 :   subp(rsp, Immediate(kDoubleSize));
    3604        6885 :   Movsd(MemOperand(rsp, 0), input_reg);
    3605        6885 :   SlowTruncateToI(result_reg, rsp, 0);
    3606        6885 :   addp(rsp, Immediate(kDoubleSize));
    3607             : 
    3608        6885 :   bind(&done);
    3609             :   // Keep our invariant that the upper 32 bits are zero.
    3610             :   movl(result_reg, result_reg);
    3611        6885 : }
    3612             : 
    3613             : 
    3614        2074 : void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
    3615             :                                XMMRegister scratch,
    3616             :                                MinusZeroMode minus_zero_mode,
    3617             :                                Label* lost_precision, Label* is_nan,
    3618             :                                Label* minus_zero, Label::Distance dst) {
    3619        2074 :   Cvttsd2si(result_reg, input_reg);
    3620        2074 :   Cvtlsi2sd(kScratchDoubleReg, result_reg);
    3621        2074 :   Ucomisd(kScratchDoubleReg, input_reg);
    3622        2074 :   j(not_equal, lost_precision, dst);
    3623        2074 :   j(parity_even, is_nan, dst);  // NaN.
    3624        2074 :   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
    3625             :     Label done;
    3626             :     // The integer converted back is equal to the original. We
    3627             :     // only have to test if we got -0 as an input.
    3628             :     testl(result_reg, result_reg);
    3629          64 :     j(not_zero, &done, Label::kNear);
    3630          64 :     Movmskpd(result_reg, input_reg);
    3631             :     // Bit 0 contains the sign of the double in input_reg.
    3632             :     // If input was positive, we are ok and return 0, otherwise
    3633             :     // jump to minus_zero.
    3634          64 :     andl(result_reg, Immediate(1));
    3635          64 :     j(not_zero, minus_zero, dst);
    3636          64 :     bind(&done);
    3637             :   }
    3638        2074 : }
    3639             : 
    3640             : 
    3641       22711 : void MacroAssembler::LoadInstanceDescriptors(Register map,
    3642             :                                              Register descriptors) {
    3643       22711 :   movp(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
    3644       22711 : }
    3645             : 
    3646             : 
    3647           0 : void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
    3648           0 :   movl(dst, FieldOperand(map, Map::kBitField3Offset));
    3649           0 :   DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
    3650           0 : }
    3651             : 
    3652             : 
    3653       10948 : void MacroAssembler::EnumLength(Register dst, Register map) {
    3654             :   STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
    3655       10948 :   movl(dst, FieldOperand(map, Map::kBitField3Offset));
    3656       10948 :   andl(dst, Immediate(Map::EnumLengthBits::kMask));
    3657       10948 :   Integer32ToSmi(dst, dst);
    3658       10948 : }
    3659             : 
    3660             : 
    3661       18411 : void MacroAssembler::LoadAccessor(Register dst, Register holder,
    3662             :                                   int accessor_index,
    3663             :                                   AccessorComponent accessor) {
    3664       18411 :   movp(dst, FieldOperand(holder, HeapObject::kMapOffset));
    3665       18411 :   LoadInstanceDescriptors(dst, dst);
    3666             :   movp(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
    3667             :   int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
    3668       18411 :                                            : AccessorPair::kSetterOffset;
    3669             :   movp(dst, FieldOperand(dst, offset));
    3670       18411 : }
    3671             : 
    3672             : 
    3673      919662 : void MacroAssembler::AssertNotSmi(Register object) {
    3674      919662 :   if (emit_debug_code()) {
    3675             :     Condition is_smi = CheckSmi(object);
    3676         142 :     Check(NegateCondition(is_smi), kOperandIsASmi);
    3677             :   }
    3678      919662 : }
    3679             : 
    3680             : 
    3681       74700 : void MacroAssembler::AssertSmi(Register object) {
    3682       74700 :   if (emit_debug_code()) {
    3683             :     Condition is_smi = CheckSmi(object);
    3684          25 :     Check(is_smi, kOperandIsNotASmi);
    3685             :   }
    3686       74700 : }
    3687             : 
    3688             : 
    3689       54005 : void MacroAssembler::AssertSmi(const Operand& object) {
    3690       54005 :   if (emit_debug_code()) {
    3691             :     Condition is_smi = CheckSmi(object);
    3692          21 :     Check(is_smi, kOperandIsNotASmi);
    3693             :   }
    3694       54005 : }
    3695             : 
    3696             : 
    3697      643768 : void MacroAssembler::AssertZeroExtended(Register int32_register) {
    3698      643768 :   if (emit_debug_code()) {
    3699             :     DCHECK(!int32_register.is(kScratchRegister));
    3700          19 :     movq(kScratchRegister, V8_INT64_C(0x0000000100000000));
    3701          19 :     cmpq(kScratchRegister, int32_register);
    3702          19 :     Check(above_equal, k32BitValueInRegisterIsNotZeroExtended);
    3703             :   }
    3704      643768 : }
    3705             : 
    3706             : 
    3707       10148 : void MacroAssembler::AssertFunction(Register object) {
    3708       10148 :   if (emit_debug_code()) {
    3709           0 :     testb(object, Immediate(kSmiTagMask));
    3710           0 :     Check(not_equal, kOperandIsASmiAndNotAFunction);
    3711             :     Push(object);
    3712           0 :     CmpObjectType(object, JS_FUNCTION_TYPE, object);
    3713             :     Pop(object);
    3714           0 :     Check(equal, kOperandIsNotAFunction);
    3715             :   }
    3716       10148 : }
    3717             : 
    3718             : 
    3719         129 : void MacroAssembler::AssertBoundFunction(Register object) {
    3720         129 :   if (emit_debug_code()) {
    3721           0 :     testb(object, Immediate(kSmiTagMask));
    3722           0 :     Check(not_equal, kOperandIsASmiAndNotABoundFunction);
    3723             :     Push(object);
    3724           0 :     CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
    3725             :     Pop(object);
    3726           0 :     Check(equal, kOperandIsNotABoundFunction);
    3727             :   }
    3728         129 : }
    3729             : 
    3730          43 : void MacroAssembler::AssertGeneratorObject(Register object, Register flags) {
    3731             :   // `flags` should be an untagged integer. See `SuspendFlags` in src/globals.h
    3732          86 :   if (!emit_debug_code()) return;
    3733           0 :   testb(object, Immediate(kSmiTagMask));
    3734           0 :   Check(not_equal, kOperandIsASmiAndNotAGeneratorObject);
    3735             : 
    3736             :   // Load map
    3737           0 :   Register map = object;
    3738             :   Push(object);
    3739             :   movp(map, FieldOperand(object, HeapObject::kMapOffset));
    3740             : 
    3741             :   Label async, do_check;
    3742           0 :   testb(flags, Immediate(static_cast<int>(SuspendFlags::kGeneratorTypeMask)));
    3743           0 :   j(not_zero, &async);
    3744             : 
    3745             :   // Check if JSGeneratorObject
    3746           0 :   CmpInstanceType(map, JS_GENERATOR_OBJECT_TYPE);
    3747           0 :   jmp(&do_check);
    3748             : 
    3749           0 :   bind(&async);
    3750             :   // Check if JSAsyncGeneratorObject
    3751           0 :   CmpInstanceType(map, JS_ASYNC_GENERATOR_OBJECT_TYPE);
    3752             : 
    3753           0 :   bind(&do_check);
    3754             :   // Restore generator object to register and perform assertion
    3755             :   Pop(object);
    3756           0 :   Check(equal, kOperandIsNotAGeneratorObject);
    3757             : }
    3758             : 
    3759        4761 : void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
    3760        4739 :   if (emit_debug_code()) {
    3761             :     Label done_checking;
    3762          11 :     AssertNotSmi(object);
    3763          11 :     Cmp(object, isolate()->factory()->undefined_value());
    3764          11 :     j(equal, &done_checking);
    3765          11 :     Cmp(FieldOperand(object, 0), isolate()->factory()->allocation_site_map());
    3766             :     Assert(equal, kExpectedUndefinedOrCell);
    3767          11 :     bind(&done_checking);
    3768             :   }
    3769        4739 : }
    3770             : 
    3771             : 
    3772         426 : Condition MacroAssembler::IsObjectStringType(Register heap_object,
    3773             :                                              Register map,
    3774             :                                              Register instance_type) {
    3775         426 :   movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
    3776             :   movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
    3777             :   STATIC_ASSERT(kNotStringTag != 0);
    3778         426 :   testb(instance_type, Immediate(kIsNotStringMask));
    3779         426 :   return zero;
    3780             : }
    3781             : 
    3782       36481 : void MacroAssembler::GetMapConstructor(Register result, Register map,
    3783             :                                        Register temp) {
    3784             :   Label done, loop;
    3785       36481 :   movp(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
    3786       36481 :   bind(&loop);
    3787       36481 :   JumpIfSmi(result, &done, Label::kNear);
    3788       36481 :   CmpObjectType(result, MAP_TYPE, temp);
    3789       36481 :   j(not_equal, &done, Label::kNear);
    3790             :   movp(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
    3791       36481 :   jmp(&loop);
    3792       36481 :   bind(&done);
    3793       36481 : }
    3794             : 
    3795           0 : void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
    3796           0 :   if (FLAG_native_code_counters && counter->Enabled()) {
    3797           0 :     Operand counter_operand = ExternalOperand(ExternalReference(counter));
    3798           0 :     movl(counter_operand, Immediate(value));
    3799             :   }
    3800           0 : }
    3801             : 
    3802             : 
    3803      734393 : void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
    3804             :   DCHECK(value > 0);
    3805      734393 :   if (FLAG_native_code_counters && counter->Enabled()) {
    3806           0 :     Operand counter_operand = ExternalOperand(ExternalReference(counter));
    3807           0 :     if (value == 1) {
    3808           0 :       incl(counter_operand);
    3809             :     } else {
    3810           0 :       addl(counter_operand, Immediate(value));
    3811             :     }
    3812             :   }
    3813      734393 : }
    3814             : 
    3815             : 
    3816        1275 : void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
    3817             :   DCHECK(value > 0);
    3818        1275 :   if (FLAG_native_code_counters && counter->Enabled()) {
    3819           0 :     Operand counter_operand = ExternalOperand(ExternalReference(counter));
    3820           0 :     if (value == 1) {
    3821           0 :       decl(counter_operand);
    3822             :     } else {
    3823           0 :       subl(counter_operand, Immediate(value));
    3824             :     }
    3825             :   }
    3826        1275 : }
    3827             : 
    3828         258 : void MacroAssembler::MaybeDropFrames() {
    3829             :   // Check whether we need to drop frames to restart a function on the stack.
    3830             :   ExternalReference restart_fp =
    3831         129 :       ExternalReference::debug_restart_fp_address(isolate());
    3832         129 :   Load(rbx, restart_fp);
    3833         129 :   testp(rbx, rbx);
    3834             :   j(not_zero, isolate()->builtins()->FrameDropperTrampoline(),
    3835         129 :     RelocInfo::CODE_TARGET);
    3836         129 : }
    3837             : 
    3838        1450 : void MacroAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
    3839             :                                         Register caller_args_count_reg,
    3840             :                                         Register scratch0, Register scratch1,
    3841             :                                         ReturnAddressState ra_state) {
    3842             : #if DEBUG
    3843             :   if (callee_args_count.is_reg()) {
    3844             :     DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
    3845             :                        scratch1));
    3846             :   } else {
    3847             :     DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
    3848             :   }
    3849             : #endif
    3850             : 
    3851             :   // Calculate the destination address where we will put the return address
    3852             :   // after we drop current frame.
    3853        1378 :   Register new_sp_reg = scratch0;
    3854        1378 :   if (callee_args_count.is_reg()) {
    3855        1342 :     subp(caller_args_count_reg, callee_args_count.reg());
    3856             :     leap(new_sp_reg, Operand(rbp, caller_args_count_reg, times_pointer_size,
    3857        2684 :                              StandardFrameConstants::kCallerPCOffset));
    3858             :   } else {
    3859             :     leap(new_sp_reg, Operand(rbp, caller_args_count_reg, times_pointer_size,
    3860             :                              StandardFrameConstants::kCallerPCOffset -
    3861          72 :                                  callee_args_count.immediate() * kPointerSize));
    3862             :   }
    3863             : 
    3864        1378 :   if (FLAG_debug_code) {
    3865           0 :     cmpp(rsp, new_sp_reg);
    3866           0 :     Check(below, kStackAccessBelowStackPointer);
    3867             :   }
    3868             : 
    3869             :   // Copy return address from caller's frame to current frame's return address
    3870             :   // to avoid its trashing and let the following loop copy it to the right
    3871             :   // place.
    3872        1378 :   Register tmp_reg = scratch1;
    3873        1378 :   if (ra_state == ReturnAddressState::kOnStack) {
    3874        2684 :     movp(tmp_reg, Operand(rbp, StandardFrameConstants::kCallerPCOffset));
    3875        2684 :     movp(Operand(rsp, 0), tmp_reg);
    3876             :   } else {
    3877             :     DCHECK(ReturnAddressState::kNotOnStack == ra_state);
    3878          72 :     Push(Operand(rbp, StandardFrameConstants::kCallerPCOffset));
    3879             :   }
    3880             : 
    3881             :   // Restore caller's frame pointer now as it could be overwritten by
    3882             :   // the copying loop.
    3883        2756 :   movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
    3884             : 
    3885             :   // +2 here is to copy both receiver and return address.
    3886        1378 :   Register count_reg = caller_args_count_reg;
    3887        1378 :   if (callee_args_count.is_reg()) {
    3888        2684 :     leap(count_reg, Operand(callee_args_count.reg(), 2));
    3889             :   } else {
    3890          36 :     movp(count_reg, Immediate(callee_args_count.immediate() + 2));
    3891             :     // TODO(ishell): Unroll copying loop for small immediate values.
    3892             :   }
    3893             : 
    3894             :   // Now copy callee arguments to the caller frame going backwards to avoid
    3895             :   // callee arguments corruption (source and destination areas could overlap).
    3896             :   Label loop, entry;
    3897        1378 :   jmp(&entry, Label::kNear);
    3898        1378 :   bind(&loop);
    3899             :   decp(count_reg);
    3900        2756 :   movp(tmp_reg, Operand(rsp, count_reg, times_pointer_size, 0));
    3901        2756 :   movp(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
    3902        1378 :   bind(&entry);
    3903        1378 :   cmpp(count_reg, Immediate(0));
    3904        1378 :   j(not_equal, &loop, Label::kNear);
    3905             : 
    3906             :   // Leave current frame.
    3907             :   movp(rsp, new_sp_reg);
    3908        1378 : }
    3909             : 
    3910        4851 : void MacroAssembler::InvokeFunction(Register function,
    3911             :                                     Register new_target,
    3912             :                                     const ParameterCount& actual,
    3913             :                                     InvokeFlag flag,
    3914             :                                     const CallWrapper& call_wrapper) {
    3915        4851 :   movp(rbx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
    3916             :   LoadSharedFunctionInfoSpecialField(
    3917        4851 :       rbx, rbx, SharedFunctionInfo::kFormalParameterCountOffset);
    3918             : 
    3919             :   ParameterCount expected(rbx);
    3920        4851 :   InvokeFunction(function, new_target, expected, actual, flag, call_wrapper);
    3921        4851 : }
    3922             : 
    3923             : 
    3924           0 : void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
    3925             :                                     const ParameterCount& expected,
    3926             :                                     const ParameterCount& actual,
    3927             :                                     InvokeFlag flag,
    3928             :                                     const CallWrapper& call_wrapper) {
    3929           0 :   Move(rdi, function);
    3930           0 :   InvokeFunction(rdi, no_reg, expected, actual, flag, call_wrapper);
    3931           0 : }
    3932             : 
    3933             : 
    3934       18165 : void MacroAssembler::InvokeFunction(Register function,
    3935             :                                     Register new_target,
    3936             :                                     const ParameterCount& expected,
    3937             :                                     const ParameterCount& actual,
    3938             :                                     InvokeFlag flag,
    3939             :                                     const CallWrapper& call_wrapper) {
    3940             :   DCHECK(function.is(rdi));
    3941       18165 :   movp(rsi, FieldOperand(function, JSFunction::kContextOffset));
    3942       18165 :   InvokeFunctionCode(rdi, new_target, expected, actual, flag, call_wrapper);
    3943       18165 : }
    3944             : 
    3945             : 
    3946       18423 : void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
    3947             :                                         const ParameterCount& expected,
    3948             :                                         const ParameterCount& actual,
    3949             :                                         InvokeFlag flag,
    3950             :                                         const CallWrapper& call_wrapper) {
    3951             :   // You can't call a function without a valid frame.
    3952             :   DCHECK(flag == JUMP_FUNCTION || has_frame());
    3953             :   DCHECK(function.is(rdi));
    3954             :   DCHECK_IMPLIES(new_target.is_valid(), new_target.is(rdx));
    3955             : 
    3956       18423 :   if (call_wrapper.NeedsDebugHookCheck()) {
    3957         516 :     CheckDebugHook(function, new_target, expected, actual);
    3958             :   }
    3959             : 
    3960             :   // Clear the new.target register if not given.
    3961       18423 :   if (!new_target.is_valid()) {
    3962       18208 :     LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
    3963             :   }
    3964             : 
    3965             :   Label done;
    3966       18423 :   bool definitely_mismatches = false;
    3967             :   InvokePrologue(expected,
    3968             :                  actual,
    3969             :                  &done,
    3970             :                  &definitely_mismatches,
    3971             :                  flag,
    3972             :                  Label::kNear,
    3973       18423 :                  call_wrapper);
    3974       18423 :   if (!definitely_mismatches) {
    3975             :     // We call indirectly through the code field in the function to
    3976             :     // allow recompilation to take effect without changing any of the
    3977             :     // call sites.
    3978        5152 :     Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
    3979        5152 :     if (flag == CALL_FUNCTION) {
    3980        9702 :       call_wrapper.BeforeCall(CallSize(code));
    3981        4851 :       call(code);
    3982        4851 :       call_wrapper.AfterCall();
    3983             :     } else {
    3984             :       DCHECK(flag == JUMP_FUNCTION);
    3985         301 :       jmp(code);
    3986             :     }
    3987        5152 :     bind(&done);
    3988             :   }
    3989       18423 : }
    3990             : 
    3991             : 
    3992       31694 : void MacroAssembler::InvokePrologue(const ParameterCount& expected,
    3993       35542 :                                     const ParameterCount& actual,
    3994             :                                     Label* done,
    3995             :                                     bool* definitely_mismatches,
    3996             :                                     InvokeFlag flag,
    3997             :                                     Label::Distance near_jump,
    3998       18380 :                                     const CallWrapper& call_wrapper) {
    3999             :   bool definitely_matches = false;
    4000       18423 :   *definitely_mismatches = false;
    4001             :   Label invoke;
    4002       18423 :   if (expected.is_immediate()) {
    4003             :     DCHECK(actual.is_immediate());
    4004       13271 :     Set(rax, actual.immediate());
    4005       13271 :     if (expected.immediate() == actual.immediate()) {
    4006             :       definitely_matches = true;
    4007             :     } else {
    4008       13271 :       if (expected.immediate() ==
    4009             :               SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
    4010             :         // Don't worry about adapting arguments for built-ins that
    4011             :         // don't want that done. Skip adaption code by making it look
    4012             :         // like we have a match between expected and actual number of
    4013             :         // arguments.
    4014             :         definitely_matches = true;
    4015             :       } else {
    4016       13271 :         *definitely_mismatches = true;
    4017       13271 :         Set(rbx, expected.immediate());
    4018             :       }
    4019             :     }
    4020             :   } else {
    4021        5152 :     if (actual.is_immediate()) {
    4022             :       // Expected is in register, actual is immediate. This is the
    4023             :       // case when we invoke function values without going through the
    4024             :       // IC mechanism.
    4025        4500 :       Set(rax, actual.immediate());
    4026        4500 :       cmpp(expected.reg(), Immediate(actual.immediate()));
    4027        4500 :       j(equal, &invoke, Label::kNear);
    4028             :       DCHECK(expected.reg().is(rbx));
    4029         652 :     } else if (!expected.reg().is(actual.reg())) {
    4030             :       // Both expected and actual are in (different) registers. This
    4031             :       // is the case when we invoke functions using call and apply.
    4032         609 :       cmpp(expected.reg(), actual.reg());
    4033         609 :       j(equal, &invoke, Label::kNear);
    4034             :       DCHECK(actual.reg().is(rax));
    4035             :       DCHECK(expected.reg().is(rbx));
    4036             :     } else {
    4037             :       definitely_matches = true;
    4038          43 :       Move(rax, actual.reg());
    4039             :     }
    4040             :   }
    4041             : 
    4042       18423 :   if (!definitely_matches) {
    4043       18380 :     Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
    4044       18380 :     if (flag == CALL_FUNCTION) {
    4045       18108 :       call_wrapper.BeforeCall(CallSize(adaptor));
    4046             :       Call(adaptor, RelocInfo::CODE_TARGET);
    4047       18108 :       call_wrapper.AfterCall();
    4048       18108 :       if (!*definitely_mismatches) {
    4049        4851 :         jmp(done, near_jump);
    4050             :       }
    4051             :     } else {
    4052             :       Jump(adaptor, RelocInfo::CODE_TARGET);
    4053             :     }
    4054       18380 :     bind(&invoke);
    4055             :   }
    4056       18423 : }
    4057             : 
    4058         516 : void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
    4059             :                                     const ParameterCount& expected,
    4060        1032 :                                     const ParameterCount& actual) {
    4061             :   Label skip_hook;
    4062             :   ExternalReference debug_hook_active =
    4063         516 :       ExternalReference::debug_hook_on_function_call_address(isolate());
    4064         516 :   Operand debug_hook_active_operand = ExternalOperand(debug_hook_active);
    4065         516 :   cmpb(debug_hook_active_operand, Immediate(0));
    4066         516 :   j(equal, &skip_hook);
    4067             :   {
    4068             :     FrameScope frame(this,
    4069         516 :                      has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
    4070         516 :     if (expected.is_reg()) {
    4071         516 :       Integer32ToSmi(expected.reg(), expected.reg());
    4072             :       Push(expected.reg());
    4073             :     }
    4074         516 :     if (actual.is_reg()) {
    4075         516 :       Integer32ToSmi(actual.reg(), actual.reg());
    4076             :       Push(actual.reg());
    4077             :     }
    4078         516 :     if (new_target.is_valid()) {
    4079             :       Push(new_target);
    4080             :     }
    4081             :     Push(fun);
    4082             :     Push(fun);
    4083         516 :     CallRuntime(Runtime::kDebugOnFunctionCall);
    4084             :     Pop(fun);
    4085         516 :     if (new_target.is_valid()) {
    4086             :       Pop(new_target);
    4087             :     }
    4088         516 :     if (actual.is_reg()) {
    4089             :       Pop(actual.reg());
    4090         516 :       SmiToInteger64(actual.reg(), actual.reg());
    4091             :     }
    4092         516 :     if (expected.is_reg()) {
    4093             :       Pop(expected.reg());
    4094         516 :       SmiToInteger64(expected.reg(), expected.reg());
    4095         516 :     }
    4096             :   }
    4097         516 :   bind(&skip_hook);
    4098         516 : }
    4099             : 
    4100      198317 : void MacroAssembler::StubPrologue(StackFrame::Type type) {
    4101      198317 :   pushq(rbp);  // Caller's frame pointer.
    4102             :   movp(rbp, rsp);
    4103             :   Push(Immediate(StackFrame::TypeToMarker(type)));
    4104      198317 : }
    4105             : 
    4106     1765177 : void MacroAssembler::Prologue(bool code_pre_aging) {
    4107             :   PredictableCodeSizeScope predictible_code_size_scope(this,
    4108     1765163 :       kNoCodeAgeSequenceLength);
    4109     1765164 :   if (code_pre_aging) {
    4110             :       // Pre-age the code.
    4111             :     Call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
    4112          14 :          RelocInfo::CODE_AGE_SEQUENCE);
    4113          14 :     Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength);
    4114             :   } else {
    4115     1765150 :     pushq(rbp);  // Caller's frame pointer.
    4116             :     movp(rbp, rsp);
    4117             :     Push(rsi);  // Callee's context.
    4118             :     Push(rdi);  // Callee's JS function.
    4119     1765166 :   }
    4120     1765169 : }
    4121             : 
    4122     1509529 : void MacroAssembler::EmitLoadFeedbackVector(Register vector) {
    4123     3019061 :   movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
    4124             :   movp(vector, FieldOperand(vector, JSFunction::kFeedbackVectorOffset));
    4125             :   movp(vector, FieldOperand(vector, Cell::kValueOffset));
    4126     1509534 : }
    4127             : 
    4128             : 
    4129           0 : void MacroAssembler::EnterFrame(StackFrame::Type type,
    4130             :                                 bool load_constant_pool_pointer_reg) {
    4131             :   // Out-of-line constant pool not implemented on x64.
    4132           0 :   UNREACHABLE();
    4133             : }
    4134             : 
    4135             : 
    4136      131714 : void MacroAssembler::EnterFrame(StackFrame::Type type) {
    4137      131632 :   pushq(rbp);
    4138             :   movp(rbp, rsp);
    4139             :   Push(Immediate(StackFrame::TypeToMarker(type)));
    4140      131632 :   if (type == StackFrame::INTERNAL) {
    4141             :     Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
    4142             :     Push(kScratchRegister);
    4143             :   }
    4144      131632 :   if (emit_debug_code()) {
    4145             :     Move(kScratchRegister,
    4146             :          isolate()->factory()->undefined_value(),
    4147             :          RelocInfo::EMBEDDED_OBJECT);
    4148         164 :     cmpp(Operand(rsp, 0), kScratchRegister);
    4149          82 :     Check(not_equal, kCodeObjectNotProperlyPatched);
    4150             :   }
    4151      131632 : }
    4152             : 
    4153             : 
    4154      125923 : void MacroAssembler::LeaveFrame(StackFrame::Type type) {
    4155      125923 :   if (emit_debug_code()) {
    4156             :     cmpp(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
    4157         164 :          Immediate(StackFrame::TypeToMarker(type)));
    4158          82 :     Check(equal, kStackFrameTypesMustMatch);
    4159             :   }
    4160      125923 :   movp(rsp, rbp);
    4161      125923 :   popq(rbp);
    4162      125923 : }
    4163             : 
    4164         258 : void MacroAssembler::EnterBuiltinFrame(Register context, Register target,
    4165             :                                        Register argc) {
    4166             :   Push(rbp);
    4167         258 :   Move(rbp, rsp);
    4168             :   Push(context);
    4169             :   Push(target);
    4170             :   Push(argc);
    4171         258 : }
    4172             : 
    4173         258 : void MacroAssembler::LeaveBuiltinFrame(Register context, Register target,
    4174             :                                        Register argc) {
    4175             :   Pop(argc);
    4176             :   Pop(target);
    4177             :   Pop(context);
    4178         258 :   leave();
    4179         258 : }
    4180             : 
    4181       21831 : void MacroAssembler::EnterExitFramePrologue(bool save_rax,
    4182       65493 :                                             StackFrame::Type frame_type) {
    4183             :   DCHECK(frame_type == StackFrame::EXIT ||
    4184             :          frame_type == StackFrame::BUILTIN_EXIT);
    4185             : 
    4186             :   // Set up the frame structure on the stack.
    4187             :   // All constants are relative to the frame pointer of the exit frame.
    4188             :   DCHECK_EQ(kFPOnStackSize + kPCOnStackSize,
    4189             :             ExitFrameConstants::kCallerSPDisplacement);
    4190             :   DCHECK_EQ(kFPOnStackSize, ExitFrameConstants::kCallerPCOffset);
    4191             :   DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
    4192       21831 :   pushq(rbp);
    4193             :   movp(rbp, rsp);
    4194             : 
    4195             :   // Reserve room for entry stack pointer and push the code object.
    4196             :   Push(Immediate(StackFrame::TypeToMarker(frame_type)));
    4197             :   DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
    4198             :   Push(Immediate(0));  // Saved entry sp, patched before call.
    4199             :   Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
    4200             :   Push(kScratchRegister);  // Accessed from ExitFrame::code_slot.
    4201             : 
    4202             :   // Save the frame pointer and the context in top.
    4203       21831 :   if (save_rax) {
    4204             :     movp(r14, rax);  // Backup rax in callee-save register.
    4205             :   }
    4206             : 
    4207       21831 :   Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp);
    4208       21831 :   Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi);
    4209       21831 :   Store(ExternalReference(Isolate::kCFunctionAddress, isolate()), rbx);
    4210       21831 : }
    4211             : 
    4212             : 
    4213       21831 : void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
    4214             :                                             bool save_doubles) {
    4215             : #ifdef _WIN64
    4216             :   const int kShadowSpace = 4;
    4217             :   arg_stack_space += kShadowSpace;
    4218             : #endif
    4219             :   // Optionally save all XMM registers.
    4220       21831 :   if (save_doubles) {
    4221          49 :     int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
    4222          49 :                 arg_stack_space * kRegisterSize;
    4223          49 :     subp(rsp, Immediate(space));
    4224             :     int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
    4225        1568 :     const RegisterConfiguration* config = RegisterConfiguration::Crankshaft();
    4226        1568 :     for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
    4227             :       DoubleRegister reg =
    4228         735 :           DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
    4229         735 :       Movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
    4230             :     }
    4231       21782 :   } else if (arg_stack_space > 0) {
    4232       12434 :     subp(rsp, Immediate(arg_stack_space * kRegisterSize));
    4233             :   }
    4234             : 
    4235             :   // Get the required frame alignment for the OS.
    4236       21831 :   const int kFrameAlignment = base::OS::ActivationFrameAlignment();
    4237       21831 :   if (kFrameAlignment > 0) {
    4238             :     DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
    4239             :     DCHECK(is_int8(kFrameAlignment));
    4240       43662 :     andp(rsp, Immediate(-kFrameAlignment));
    4241             :   }
    4242             : 
    4243             :   // Patch the saved entry sp.
    4244       43662 :   movp(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
    4245       21831 : }
    4246             : 
    4247       15571 : void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles,
    4248             :                                     StackFrame::Type frame_type) {
    4249       15571 :   EnterExitFramePrologue(true, frame_type);
    4250             : 
    4251             :   // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
    4252             :   // so it must be retained across the C-call.
    4253             :   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
    4254       31142 :   leap(r15, Operand(rbp, r14, times_pointer_size, offset));
    4255             : 
    4256       15571 :   EnterExitFrameEpilogue(arg_stack_space, save_doubles);
    4257       15571 : }
    4258             : 
    4259             : 
    4260        6260 : void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
    4261        6260 :   EnterExitFramePrologue(false, StackFrame::EXIT);
    4262        6260 :   EnterExitFrameEpilogue(arg_stack_space, false);
    4263        6260 : }
    4264             : 
    4265             : 
    4266       15657 : void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
    4267             :   // Registers:
    4268             :   // r15 : argv
    4269       15657 :   if (save_doubles) {
    4270             :     int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
    4271        1568 :     const RegisterConfiguration* config = RegisterConfiguration::Crankshaft();
    4272        1568 :     for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
    4273             :       DoubleRegister reg =
    4274         735 :           DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
    4275         735 :       Movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
    4276             :     }
    4277             :   }
    4278             : 
    4279       15657 :   if (pop_arguments) {
    4280             :     // Get the return address from the stack and restore the frame pointer.
    4281       31142 :     movp(rcx, Operand(rbp, kFPOnStackSize));
    4282       31142 :     movp(rbp, Operand(rbp, 0 * kPointerSize));
    4283             : 
    4284             :     // Drop everything up to and including the arguments and the receiver
    4285             :     // from the caller stack.
    4286       31142 :     leap(rsp, Operand(r15, 1 * kPointerSize));
    4287             : 
    4288             :     PushReturnAddressFrom(rcx);
    4289             :   } else {
    4290             :     // Otherwise just leave the exit frame.
    4291          86 :     leave();
    4292             :   }
    4293             : 
    4294       15657 :   LeaveExitFrameEpilogue(true);
    4295       15657 : }
    4296             : 
    4297             : 
    4298        6174 : void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
    4299        6174 :   movp(rsp, rbp);
    4300        6174 :   popq(rbp);
    4301             : 
    4302        6174 :   LeaveExitFrameEpilogue(restore_context);
    4303        6174 : }
    4304             : 
    4305             : 
    4306       43662 : void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
    4307             :   // Restore current context from top and clear it in debug mode.
    4308       21831 :   ExternalReference context_address(Isolate::kContextAddress, isolate());
    4309       21831 :   Operand context_operand = ExternalOperand(context_address);
    4310       21831 :   if (restore_context) {
    4311       15700 :     movp(rsi, context_operand);
    4312             :   }
    4313             : #ifdef DEBUG
    4314             :   movp(context_operand, Immediate(0));
    4315             : #endif
    4316             : 
    4317             :   // Clear the top frame.
    4318             :   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
    4319       21831 :                                        isolate());
    4320       21831 :   Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
    4321       21831 :   movp(c_entry_fp_operand, Immediate(0));
    4322       21831 : }
    4323             : 
    4324             : 
    4325             : // Compute the hash code from the untagged key.  This must be kept in sync with
    4326             : // ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
    4327             : // code-stub-hydrogen.cc
    4328         217 : void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
    4329             :   // First of all we assign the hash seed to scratch.
    4330         217 :   LoadRoot(scratch, Heap::kHashSeedRootIndex);
    4331         217 :   SmiToInteger32(scratch, scratch);
    4332             : 
    4333             :   // Xor original key with a seed.
    4334         217 :   xorl(r0, scratch);
    4335             : 
    4336             :   // Compute the hash code from the untagged key.  This must be kept in sync
    4337             :   // with ComputeIntegerHash in utils.h.
    4338             :   //
    4339             :   // hash = ~hash + (hash << 15);
    4340             :   movl(scratch, r0);
    4341             :   notl(r0);
    4342             :   shll(scratch, Immediate(15));
    4343         217 :   addl(r0, scratch);
    4344             :   // hash = hash ^ (hash >> 12);
    4345             :   movl(scratch, r0);
    4346             :   shrl(scratch, Immediate(12));
    4347             :   xorl(r0, scratch);
    4348             :   // hash = hash + (hash << 2);
    4349         434 :   leal(r0, Operand(r0, r0, times_4, 0));
    4350             :   // hash = hash ^ (hash >> 4);
    4351             :   movl(scratch, r0);
    4352             :   shrl(scratch, Immediate(4));
    4353             :   xorl(r0, scratch);
    4354             :   // hash = hash * 2057;
    4355             :   imull(r0, r0, Immediate(2057));
    4356             :   // hash = hash ^ (hash >> 16);
    4357             :   movl(scratch, r0);
    4358             :   shrl(scratch, Immediate(16));
    4359             :   xorl(r0, scratch);
    4360         217 :   andl(r0, Immediate(0x3fffffff));
    4361         217 : }
    4362             : 
    4363       52774 : void MacroAssembler::LoadAllocationTopHelper(Register result,
    4364             :                                              Register scratch,
    4365       52774 :                                              AllocationFlags flags) {
    4366             :   ExternalReference allocation_top =
    4367       52774 :       AllocationUtils::GetAllocationTopReference(isolate(), flags);
    4368             : 
    4369             :   // Just return if allocation top is already known.
    4370       52774 :   if ((flags & RESULT_CONTAINS_TOP) != 0) {
    4371             :     // No use of scratch if allocation top is provided.
    4372             :     DCHECK(!scratch.is_valid());
    4373             : #ifdef DEBUG
    4374             :     // Assert that result actually contains top on entry.
    4375             :     Operand top_operand = ExternalOperand(allocation_top);
    4376             :     cmpp(result, top_operand);
    4377             :     Check(equal, kUnexpectedAllocationTop);
    4378             : #endif
    4379           0 :     return;
    4380             :   }
    4381             : 
    4382             :   // Move address of new object to result. Use scratch register if available,
    4383             :   // and keep address in scratch until call to UpdateAllocationTopHelper.
    4384       52774 :   if (scratch.is_valid()) {
    4385          45 :     LoadAddress(scratch, allocation_top);
    4386          90 :     movp(result, Operand(scratch, 0));
    4387             :   } else {
    4388       52729 :     Load(result, allocation_top);
    4389             :   }
    4390             : }
    4391             : 
    4392             : 
    4393         878 : void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
    4394             :                                                  Register scratch,
    4395             :                                                  Label* gc_required,
    4396             :                                                  AllocationFlags flags) {
    4397             :   if (kPointerSize == kDoubleSize) {
    4398         878 :     if (FLAG_debug_code) {
    4399           0 :       testl(result, Immediate(kDoubleAlignmentMask));
    4400           0 :       Check(zero, kAllocationIsNotDoubleAligned);
    4401             :     }
    4402             :   } else {
    4403             :     // Align the next allocation. Storing the filler map without checking top
    4404             :     // is safe in new-space because the limit of the heap is aligned there.
    4405             :     DCHECK(kPointerSize * 2 == kDoubleSize);
    4406             :     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
    4407             :     // Make sure scratch is not clobbered by this function as it might be
    4408             :     // used in UpdateAllocationTopHelper later.
    4409             :     DCHECK(!scratch.is(kScratchRegister));
    4410             :     Label aligned;
    4411             :     testl(result, Immediate(kDoubleAlignmentMask));
    4412             :     j(zero, &aligned, Label::kNear);
    4413             :     if (((flags & ALLOCATION_FOLDED) == 0) && ((flags & PRETENURE) != 0)) {
    4414             :       ExternalReference allocation_limit =
    4415             :           AllocationUtils::GetAllocationLimitReference(isolate(), flags);
    4416             :       cmpp(result, ExternalOperand(allocation_limit));
    4417             :       j(above_equal, gc_required);
    4418             :     }
    4419             :     LoadRoot(kScratchRegister, Heap::kOnePointerFillerMapRootIndex);
    4420             :     movp(Operand(result, 0), kScratchRegister);
    4421             :     addp(result, Immediate(kDoubleSize / 2));
    4422             :     bind(&aligned);
    4423             :   }
    4424         878 : }
    4425             : 
    4426             : 
    4427       50739 : void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
    4428             :                                                Register scratch,
    4429       50739 :                                                AllocationFlags flags) {
    4430       50739 :   if (emit_debug_code()) {
    4431           1 :     testp(result_end, Immediate(kObjectAlignmentMask));
    4432           1 :     Check(zero, kUnalignedAllocationInNewSpace);
    4433             :   }
    4434             : 
    4435             :   ExternalReference allocation_top =
    4436       50739 :       AllocationUtils::GetAllocationTopReference(isolate(), flags);
    4437             : 
    4438             :   // Update new top.
    4439       50739 :   if (scratch.is_valid()) {
    4440             :     // Scratch already contains address of allocation top.
    4441          90 :     movp(Operand(scratch, 0), result_end);
    4442             :   } else {
    4443       50694 :     Store(allocation_top, result_end);
    4444             :   }
    4445       50739 : }
    4446             : 
    4447             : 
    4448       40313 : void MacroAssembler::Allocate(int object_size,
    4449             :                               Register result,
    4450             :                               Register result_end,
    4451             :                               Register scratch,
    4452             :                               Label* gc_required,
    4453       40263 :                               AllocationFlags flags) {
    4454             :   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
    4455             :   DCHECK(object_size <= kMaxRegularHeapObjectSize);
    4456             :   DCHECK((flags & ALLOCATION_FOLDED) == 0);
    4457       40313 :   if (!FLAG_inline_new) {
    4458          50 :     if (emit_debug_code()) {
    4459             :       // Trash the registers to simulate an allocation failure.
    4460           0 :       movl(result, Immediate(0x7091));
    4461           0 :       if (result_end.is_valid()) {
    4462             :         movl(result_end, Immediate(0x7191));
    4463             :       }
    4464           0 :       if (scratch.is_valid()) {
    4465             :         movl(scratch, Immediate(0x7291));
    4466             :       }
    4467             :     }
    4468          50 :     jmp(gc_required);
    4469       40363 :     return;
    4470             :   }
    4471             :   DCHECK(!result.is(result_end));
    4472             : 
    4473             :   // Load address of new object into result.
    4474       40263 :   LoadAllocationTopHelper(result, scratch, flags);
    4475             : 
    4476       40263 :   if ((flags & DOUBLE_ALIGNMENT) != 0) {
    4477         394 :     MakeSureDoubleAlignedHelper(result, scratch, gc_required, flags);
    4478             :   }
    4479             : 
    4480             :   // Calculate new top and bail out if new space is exhausted.
    4481             :   ExternalReference allocation_limit =
    4482       40263 :       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
    4483             : 
    4484       40263 :   Register top_reg = result_end.is_valid() ? result_end : result;
    4485             : 
    4486       40263 :   if (!top_reg.is(result)) {
    4487       40263 :     movp(top_reg, result);
    4488             :   }
    4489       40263 :   addp(top_reg, Immediate(object_size));
    4490       40263 :   Operand limit_operand = ExternalOperand(allocation_limit);
    4491             :   cmpp(top_reg, limit_operand);
    4492       40263 :   j(above, gc_required);
    4493             : 
    4494       40263 :   if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
    4495             :     // The top pointer is not updated for allocation folding dominators.
    4496       38228 :     UpdateAllocationTopHelper(top_reg, scratch, flags);
    4497             :   }
    4498             : 
    4499       40263 :   if (top_reg.is(result)) {
    4500           0 :     subp(result, Immediate(object_size - kHeapObjectTag));
    4501             :   } else {
    4502             :     // Tag the result.
    4503             :     DCHECK(kHeapObjectTag == 1);
    4504             :     incp(result);
    4505             :   }
    4506             : }
    4507             : 
    4508             : 
    4509           0 : void MacroAssembler::Allocate(int header_size,
    4510             :                               ScaleFactor element_size,
    4511             :                               Register element_count,
    4512             :                               Register result,
    4513             :                               Register result_end,
    4514             :                               Register scratch,
    4515             :                               Label* gc_required,
    4516             :                               AllocationFlags flags) {
    4517             :   DCHECK((flags & SIZE_IN_WORDS) == 0);
    4518             :   DCHECK((flags & ALLOCATION_FOLDING_DOMINATOR) == 0);
    4519             :   DCHECK((flags & ALLOCATION_FOLDED) == 0);
    4520           0 :   leap(result_end, Operand(element_count, element_size, header_size));
    4521           0 :   Allocate(result_end, result, result_end, scratch, gc_required, flags);
    4522           0 : }
    4523             : 
    4524             : 
    4525        7690 : void MacroAssembler::Allocate(Register object_size,
    4526             :                               Register result,
    4527             :                               Register result_end,
    4528             :                               Register scratch,
    4529             :                               Label* gc_required,
    4530        7690 :                               AllocationFlags flags) {
    4531             :   DCHECK((flags & SIZE_IN_WORDS) == 0);
    4532             :   DCHECK((flags & ALLOCATION_FOLDED) == 0);
    4533        7690 :   if (!FLAG_inline_new) {
    4534           0 :     if (emit_debug_code()) {
    4535             :       // Trash the registers to simulate an allocation failure.
    4536           0 :       movl(result, Immediate(0x7091));
    4537             :       movl(result_end, Immediate(0x7191));
    4538           0 :       if (scratch.is_valid()) {
    4539             :         movl(scratch, Immediate(0x7291));
    4540             :       }
    4541             :       // object_size is left unchanged by this function.
    4542             :     }
    4543           0 :     jmp(gc_required);
    4544        7690 :     return;
    4545             :   }
    4546             :   DCHECK(!result.is(result_end));
    4547             : 
    4548             :   // Load address of new object into result.
    4549        7690 :   LoadAllocationTopHelper(result, scratch, flags);
    4550             : 
    4551        7690 :   if ((flags & DOUBLE_ALIGNMENT) != 0) {
    4552          77 :     MakeSureDoubleAlignedHelper(result, scratch, gc_required, flags);
    4553             :   }
    4554             : 
    4555             :   ExternalReference allocation_limit =
    4556        7690 :       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
    4557        7690 :   if (!object_size.is(result_end)) {
    4558        7690 :     movp(result_end, object_size);
    4559             :   }
    4560        7690 :   addp(result_end, result);
    4561        7690 :   Operand limit_operand = ExternalOperand(allocation_limit);
    4562             :   cmpp(result_end, limit_operand);
    4563        7690 :   j(above, gc_required);
    4564             : 
    4565        7690 :   if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
    4566             :     // The top pointer is not updated for allocation folding dominators.
    4567        7690 :     UpdateAllocationTopHelper(result_end, scratch, flags);
    4568             :   }
    4569             : 
    4570             :   // Tag the result.
    4571        7690 :   addp(result, Immediate(kHeapObjectTag));
    4572             : }
    4573             : 
    4574        4821 : void MacroAssembler::FastAllocate(int object_size, Register result,
    4575             :                                   Register result_end, AllocationFlags flags) {
    4576             :   DCHECK(!result.is(result_end));
    4577             :   // Load address of new object into result.
    4578        4821 :   LoadAllocationTopHelper(result, no_reg, flags);
    4579             : 
    4580        4821 :   if ((flags & DOUBLE_ALIGNMENT) != 0) {
    4581         407 :     MakeSureDoubleAlignedHelper(result, no_reg, NULL, flags);
    4582             :   }
    4583             : 
    4584        9642 :   leap(result_end, Operand(result, object_size));
    4585             : 
    4586        4821 :   UpdateAllocationTopHelper(result_end, no_reg, flags);
    4587             : 
    4588        4821 :   addp(result, Immediate(kHeapObjectTag));
    4589        4821 : }
    4590             : 
    4591           0 : void MacroAssembler::FastAllocate(Register object_size, Register result,
    4592             :                                   Register result_end, AllocationFlags flags) {
    4593             :   DCHECK(!result.is(result_end));
    4594             :   // Load address of new object into result.
    4595           0 :   LoadAllocationTopHelper(result, no_reg, flags);
    4596             : 
    4597           0 :   if ((flags & DOUBLE_ALIGNMENT) != 0) {
    4598           0 :     MakeSureDoubleAlignedHelper(result, no_reg, NULL, flags);
    4599             :   }
    4600             : 
    4601           0 :   leap(result_end, Operand(result, object_size, times_1, 0));
    4602             : 
    4603           0 :   UpdateAllocationTopHelper(result_end, no_reg, flags);
    4604             : 
    4605           0 :   addp(result, Immediate(kHeapObjectTag));
    4606           0 : }
    4607             : 
    4608       27344 : void MacroAssembler::AllocateHeapNumber(Register result,
    4609             :                                         Register scratch,
    4610             :                                         Label* gc_required,
    4611             :                                         MutableMode mode) {
    4612             :   // Allocate heap number in new space.
    4613             :   Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required,
    4614       27344 :            NO_ALLOCATION_FLAGS);
    4615             : 
    4616             :   Heap::RootListIndex map_index = mode == MUTABLE
    4617             :       ? Heap::kMutableHeapNumberMapRootIndex
    4618       27344 :       : Heap::kHeapNumberMapRootIndex;
    4619             : 
    4620             :   // Set the map.
    4621       27344 :   LoadRoot(kScratchRegister, map_index);
    4622       27344 :   movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
    4623       27344 : }
    4624             : 
    4625          86 : void MacroAssembler::AllocateJSValue(Register result, Register constructor,
    4626             :                                      Register value, Register scratch,
    4627             :                                      Label* gc_required) {
    4628             :   DCHECK(!result.is(constructor));
    4629             :   DCHECK(!result.is(scratch));
    4630             :   DCHECK(!result.is(value));
    4631             : 
    4632             :   // Allocate JSValue in new space.
    4633             :   Allocate(JSValue::kSize, result, scratch, no_reg, gc_required,
    4634          86 :            NO_ALLOCATION_FLAGS);
    4635             : 
    4636             :   // Initialize the JSValue.
    4637          86 :   LoadGlobalFunctionInitialMap(constructor, scratch);
    4638          86 :   movp(FieldOperand(result, HeapObject::kMapOffset), scratch);
    4639          86 :   LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
    4640             :   movp(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
    4641             :   movp(FieldOperand(result, JSObject::kElementsOffset), scratch);
    4642             :   movp(FieldOperand(result, JSValue::kValueOffset), value);
    4643             :   STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
    4644          86 : }
    4645             : 
    4646           0 : void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
    4647             :                                                 Register end_address,
    4648             :                                                 Register filler) {
    4649             :   Label loop, entry;
    4650           0 :   jmp(&entry, Label::kNear);
    4651           0 :   bind(&loop);
    4652           0 :   movp(Operand(current_address, 0), filler);
    4653           0 :   addp(current_address, Immediate(kPointerSize));
    4654           0 :   bind(&entry);
    4655           0 :   cmpp(current_address, end_address);
    4656           0 :   j(below, &loop, Label::kNear);
    4657           0 : }
    4658             : 
    4659             : 
    4660     2092941 : void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
    4661     2092941 :   if (context_chain_length > 0) {
    4662             :     // Move up the chain of contexts to the context containing the slot.
    4663       50588 :     movp(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
    4664       27316 :     for (int i = 1; i < context_chain_length; i++) {
    4665        4044 :       movp(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
    4666             :     }
    4667             :   } else {
    4668             :     // Slot is in the current function context.  Move it into the
    4669             :     // destination register in case we store into it (the write barrier
    4670             :     // cannot be allowed to destroy the context in rsi).
    4671     2067647 :     movp(dst, rsi);
    4672             :   }
    4673             : 
    4674             :   // We should not have found a with context by walking the context
    4675             :   // chain (i.e., the static scope chain and runtime context chain do
    4676             :   // not agree).  A variable occurring in such a scope should have
    4677             :   // slot type LOOKUP and not CONTEXT.
    4678     2092941 :   if (emit_debug_code()) {
    4679             :     CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
    4680         105 :                 Heap::kWithContextMapRootIndex);
    4681         105 :     Check(not_equal, kVariableResolvedToWithContext);
    4682             :   }
    4683     2092941 : }
    4684             : 
    4685             : #ifdef _WIN64
    4686             : static const int kRegisterPassedArguments = 4;
    4687             : #else
    4688             : static const int kRegisterPassedArguments = 6;
    4689             : #endif
    4690             : 
    4691             : 
    4692       90683 : void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
    4693       90683 :   movp(dst, NativeContextOperand());
    4694       90683 :   movp(dst, ContextOperand(dst, index));
    4695       90683 : }
    4696             : 
    4697             : 
    4698          86 : void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
    4699           0 :                                                   Register map) {
    4700             :   // Load the initial map.  The global functions all have initial maps.
    4701          86 :   movp(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
    4702          86 :   if (emit_debug_code()) {
    4703             :     Label ok, fail;
    4704           0 :     CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
    4705           0 :     jmp(&ok);
    4706           0 :     bind(&fail);
    4707           0 :     Abort(kGlobalFunctionsMustHaveInitialMap);
    4708           0 :     bind(&ok);
    4709             :   }
    4710          86 : }
    4711             : 
    4712             : 
    4713           0 : int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
    4714             :   // On Windows 64 stack slots are reserved by the caller for all arguments
    4715             :   // including the ones passed in registers, and space is always allocated for
    4716             :   // the four register arguments even if the function takes fewer than four
    4717             :   // arguments.
    4718             :   // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
    4719             :   // and the caller does not reserve stack slots for them.
    4720             :   DCHECK(num_arguments >= 0);
    4721             : #ifdef _WIN64
    4722             :   const int kMinimumStackSlots = kRegisterPassedArguments;
    4723             :   if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
    4724             :   return num_arguments;
    4725             : #else
    4726     1119242 :   if (num_arguments < kRegisterPassedArguments) return 0;
    4727      171950 :   return num_arguments - kRegisterPassedArguments;
    4728             : #endif
    4729             : }
    4730             : 
    4731             : 
    4732           0 : void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
    4733             :                                                Register index,
    4734             :                                                Register value,
    4735             :                                                uint32_t encoding_mask) {
    4736             :   Label is_object;
    4737           0 :   JumpIfNotSmi(string, &is_object);
    4738           0 :   Abort(kNonObject);
    4739           0 :   bind(&is_object);
    4740             : 
    4741             :   Push(value);
    4742             :   movp(value, FieldOperand(string, HeapObject::kMapOffset));
    4743             :   movzxbp(value, FieldOperand(value, Map::kInstanceTypeOffset));
    4744             : 
    4745             :   andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
    4746           0 :   cmpp(value, Immediate(encoding_mask));
    4747             :   Pop(value);
    4748           0 :   Check(equal, kUnexpectedStringType);
    4749             : 
    4750             :   // The index is assumed to be untagged coming in, tag it to compare with the
    4751             :   // string length without using a temp register, it is restored at the end of
    4752             :   // this function.
    4753           0 :   Integer32ToSmi(index, index);
    4754           0 :   SmiCompare(index, FieldOperand(string, String::kLengthOffset));
    4755           0 :   Check(less, kIndexIsTooLarge);
    4756             : 
    4757             :   SmiCompare(index, Smi::kZero);
    4758           0 :   Check(greater_equal, kIndexIsNegative);
    4759             : 
    4760             :   // Restore the index
    4761           0 :   SmiToInteger32(index, index);
    4762           0 : }
    4763             : 
    4764             : 
    4765      559621 : void MacroAssembler::PrepareCallCFunction(int num_arguments) {
    4766      559621 :   int frame_alignment = base::OS::ActivationFrameAlignment();
    4767             :   DCHECK(frame_alignment != 0);
    4768             :   DCHECK(num_arguments >= 0);
    4769             : 
    4770             :   // Make stack end at alignment and allocate space for arguments and old rsp.
    4771      559621 :   movp(kScratchRegister, rsp);
    4772             :   DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
    4773             :   int argument_slots_on_stack =
    4774             :       ArgumentStackSlotsForCFunctionCall(num_arguments);
    4775     1119242 :   subp(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize));
    4776     1119242 :   andp(rsp, Immediate(-frame_alignment));
    4777     1119242 :   movp(Operand(rsp, argument_slots_on_stack * kRegisterSize), kScratchRegister);
    4778      559621 : }
    4779             : 
    4780             : 
    4781      559161 : void MacroAssembler::CallCFunction(ExternalReference function,
    4782             :                                    int num_arguments) {
    4783      559161 :   LoadAddress(rax, function);
    4784      559161 :   CallCFunction(rax, num_arguments);
    4785      559161 : }
    4786             : 
    4787             : 
    4788      559621 : void MacroAssembler::CallCFunction(Register function, int num_arguments) {
    4789             :   DCHECK_LE(num_arguments, kMaxCParameters);
    4790             :   DCHECK(has_frame());
    4791             :   // Check stack alignment.
    4792      559621 :   if (emit_debug_code()) {
    4793          35 :     CheckStackAlignment();
    4794             :   }
    4795             : 
    4796      559621 :   call(function);
    4797             :   DCHECK(base::OS::ActivationFrameAlignment() != 0);
    4798             :   DCHECK(num_arguments >= 0);
    4799             :   int argument_slots_on_stack =
    4800             :       ArgumentStackSlotsForCFunctionCall(num_arguments);
    4801     1119242 :   movp(rsp, Operand(rsp, argument_slots_on_stack * kRegisterSize));
    4802      559621 : }
    4803             : 
    4804             : 
    4805             : #ifdef DEBUG
    4806             : bool AreAliased(Register reg1,
    4807             :                 Register reg2,
    4808             :                 Register reg3,
    4809             :                 Register reg4,
    4810             :                 Register reg5,
    4811             :                 Register reg6,
    4812             :                 Register reg7,
    4813             :                 Register reg8) {
    4814             :   int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
    4815             :       reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
    4816             :       reg7.is_valid() + reg8.is_valid();
    4817             : 
    4818             :   RegList regs = 0;
    4819             :   if (reg1.is_valid()) regs |= reg1.bit();
    4820             :   if (reg2.is_valid()) regs |= reg2.bit();
    4821             :   if (reg3.is_valid()) regs |= reg3.bit();
    4822             :   if (reg4.is_valid()) regs |= reg4.bit();
    4823             :   if (reg5.is_valid()) regs |= reg5.bit();
    4824             :   if (reg6.is_valid()) regs |= reg6.bit();
    4825             :   if (reg7.is_valid()) regs |= reg7.bit();
    4826             :   if (reg8.is_valid()) regs |= reg8.bit();
    4827             :   int n_of_non_aliasing_regs = NumRegs(regs);
    4828             : 
    4829             :   return n_of_valid_regs != n_of_non_aliasing_regs;
    4830             : }
    4831             : #endif
    4832             : 
    4833             : 
    4834    10818621 : CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
    4835             :     : address_(address),
    4836             :       size_(size),
    4837    10818621 :       masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
    4838             :   // Create a new macro assembler pointing to the address of the code to patch.
    4839             :   // The size is adjusted with kGap on order for the assembler to generate size
    4840             :   // bytes of instructions without failing with buffer size constraints.
    4841             :   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
    4842    10818621 : }
    4843             : 
    4844             : 
    4845    10818621 : CodePatcher::~CodePatcher() {
    4846             :   // Indicate that code has changed.
    4847    10818621 :   Assembler::FlushICache(masm_.isolate(), address_, size_);
    4848             : 
    4849             :   // Check that the code was patched as expected.
    4850             :   DCHECK(masm_.pc_ == address_ + size_);
    4851             :   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
    4852    10818621 : }
    4853             : 
    4854             : 
    4855     2305172 : void MacroAssembler::CheckPageFlag(
    4856             :     Register object,
    4857             :     Register scratch,
    4858             :     int mask,
    4859             :     Condition cc,
    4860             :     Label* condition_met,
    4861             :     Label::Distance condition_met_distance) {
    4862             :   DCHECK(cc == zero || cc == not_zero);
    4863     2305172 :   if (scratch.is(object)) {
    4864      769070 :     andp(scratch, Immediate(~Page::kPageAlignmentMask));
    4865             :   } else {
    4866     1536102 :     movp(scratch, Immediate(~Page::kPageAlignmentMask));
    4867     1536102 :     andp(scratch, object);
    4868             :   }
    4869     2305172 :   if (mask < (1 << kBitsPerByte)) {
    4870             :     testb(Operand(scratch, MemoryChunk::kFlagsOffset),
    4871     4610344 :           Immediate(static_cast<uint8_t>(mask)));
    4872             :   } else {
    4873           0 :     testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
    4874             :   }
    4875     2305172 :   j(cc, condition_met, condition_met_distance);
    4876     2305172 : }
    4877             : 
    4878             : 
    4879       83620 : void MacroAssembler::JumpIfBlack(Register object,
    4880             :                                  Register bitmap_scratch,
    4881             :                                  Register mask_scratch,
    4882             :                                  Label* on_black,
    4883             :                                  Label::Distance on_black_distance) {
    4884             :   DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, rcx));
    4885             : 
    4886       83620 :   GetMarkBits(object, bitmap_scratch, mask_scratch);
    4887             : 
    4888             :   DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
    4889             :   // The mask_scratch register contains a 1 at the position of the first bit
    4890             :   // and a 1 at a position of the second bit. All other positions are zero.
    4891       83620 :   movp(rcx, mask_scratch);
    4892      167240 :   andp(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
    4893       83620 :   cmpp(mask_scratch, rcx);
    4894       83620 :   j(equal, on_black, on_black_distance);
    4895       83620 : }
    4896             : 
    4897             : 
    4898      167240 : void MacroAssembler::GetMarkBits(Register addr_reg,
    4899             :                                  Register bitmap_reg,
    4900             :                                  Register mask_reg) {
    4901             :   DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx));
    4902      167240 :   movp(bitmap_reg, addr_reg);
    4903             :   // Sign extended 32 bit immediate.
    4904      167240 :   andp(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
    4905             :   movp(rcx, addr_reg);
    4906             :   int shift =
    4907             :       Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
    4908             :   shrl(rcx, Immediate(shift));
    4909             :   andp(rcx,
    4910             :        Immediate((Page::kPageAlignmentMask >> shift) &
    4911      167240 :                  ~(Bitmap::kBytesPerCell - 1)));
    4912             : 
    4913      167240 :   addp(bitmap_reg, rcx);
    4914             :   movp(rcx, addr_reg);
    4915             :   shrl(rcx, Immediate(kPointerSizeLog2));
    4916      167240 :   andp(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
    4917             :   movl(mask_reg, Immediate(3));
    4918             :   shlp_cl(mask_reg);
    4919      167240 : }
    4920             : 
    4921             : 
    4922       83620 : void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
    4923             :                                  Register mask_scratch, Label* value_is_white,
    4924             :                                  Label::Distance distance) {
    4925             :   DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, rcx));
    4926       83620 :   GetMarkBits(value, bitmap_scratch, mask_scratch);
    4927             : 
    4928             :   // If the value is black or grey we don't need to do anything.
    4929             :   DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
    4930             :   DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
    4931             :   DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
    4932             :   DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
    4933             : 
    4934             :   // Since both black and grey have a 1 in the first position and white does
    4935             :   // not have a 1 there we only need to check one bit.
    4936      167240 :   testp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
    4937       83620 :   j(zero, value_is_white, distance);
    4938       83620 : }
    4939             : 
    4940             : 
    4941        3324 : void MacroAssembler::CheckEnumCache(Label* call_runtime) {
    4942             :   Label next, start;
    4943             :   Register empty_fixed_array_value = r8;
    4944        3324 :   LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
    4945        3324 :   movp(rcx, rax);
    4946             : 
    4947             :   // Check if the enum length field is properly initialized, indicating that
    4948             :   // there is an enum cache.
    4949             :   movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
    4950             : 
    4951        3324 :   EnumLength(rdx, rbx);
    4952        3324 :   Cmp(rdx, Smi::FromInt(kInvalidEnumCacheSentinel));
    4953        3324 :   j(equal, call_runtime);
    4954             : 
    4955        3324 :   jmp(&start);
    4956             : 
    4957        3324 :   bind(&next);
    4958             : 
    4959             :   movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
    4960             : 
    4961             :   // For all objects but the receiver, check that the cache is empty.
    4962        3324 :   EnumLength(rdx, rbx);
    4963        3324 :   Cmp(rdx, Smi::kZero);
    4964        3324 :   j(not_equal, call_runtime);
    4965             : 
    4966        3324 :   bind(&start);
    4967             : 
    4968             :   // Check that there are no elements. Register rcx contains the current JS
    4969             :   // object we've reached through the prototype chain.
    4970             :   Label no_elements;
    4971             :   cmpp(empty_fixed_array_value,
    4972             :        FieldOperand(rcx, JSObject::kElementsOffset));
    4973        3324 :   j(equal, &no_elements);
    4974             : 
    4975             :   // Second chance, the object may be using the empty slow element dictionary.
    4976        3324 :   LoadRoot(kScratchRegister, Heap::kEmptySlowElementDictionaryRootIndex);
    4977             :   cmpp(kScratchRegister, FieldOperand(rcx, JSObject::kElementsOffset));
    4978        3324 :   j(not_equal, call_runtime);
    4979             : 
    4980        3324 :   bind(&no_elements);
    4981             :   movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
    4982        3324 :   CompareRoot(rcx, Heap::kNullValueRootIndex);
    4983        3324 :   j(not_equal, &next);
    4984        3324 : }
    4985             : 
    4986             : 
    4987          37 : void MacroAssembler::TestJSArrayForAllocationMemento(
    4988             :     Register receiver_reg,
    4989             :     Register scratch_reg,
    4990          37 :     Label* no_memento_found) {
    4991             :   Label map_check;
    4992             :   Label top_check;
    4993             :   ExternalReference new_space_allocation_top =
    4994          37 :       ExternalReference::new_space_allocation_top_address(isolate());
    4995             :   const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
    4996             :   const int kMementoLastWordOffset =
    4997             :       kMementoMapOffset + AllocationMemento::kSize - kPointerSize;
    4998             : 
    4999             :   // Bail out if the object is not in new space.
    5000          37 :   JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
    5001             :   // If the object is in new space, we need to check whether it is on the same
    5002             :   // page as the current top.
    5003          74 :   leap(scratch_reg, Operand(receiver_reg, kMementoLastWordOffset));
    5004          37 :   xorp(scratch_reg, ExternalOperand(new_space_allocation_top));
    5005             :   testp(scratch_reg, Immediate(~Page::kPageAlignmentMask));
    5006          37 :   j(zero, &top_check);
    5007             :   // The object is on a different page than allocation top. Bail out if the
    5008             :   // object sits on the page boundary as no memento can follow and we cannot
    5009             :   // touch the memory following it.
    5010          74 :   leap(scratch_reg, Operand(receiver_reg, kMementoLastWordOffset));
    5011             :   xorp(scratch_reg, receiver_reg);
    5012             :   testp(scratch_reg, Immediate(~Page::kPageAlignmentMask));
    5013          37 :   j(not_zero, no_memento_found);
    5014             :   // Continue with the actual map check.
    5015          37 :   jmp(&map_check);
    5016             :   // If top is on the same page as the current object, we need to check whether
    5017             :   // we are below top.
    5018          37 :   bind(&top_check);
    5019          74 :   leap(scratch_reg, Operand(receiver_reg, kMementoLastWordOffset));
    5020          37 :   cmpp(scratch_reg, ExternalOperand(new_space_allocation_top));
    5021          37 :   j(greater_equal, no_memento_found);
    5022             :   // Memento map check.
    5023          37 :   bind(&map_check);
    5024             :   CompareRoot(MemOperand(receiver_reg, kMementoMapOffset),
    5025          37 :               Heap::kAllocationMementoMapRootIndex);
    5026          37 : }
    5027             : 
    5028        6857 : void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
    5029             :   DCHECK(!dividend.is(rax));
    5030             :   DCHECK(!dividend.is(rdx));
    5031             :   base::MagicNumbersForDivision<uint32_t> mag =
    5032        6857 :       base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
    5033        6857 :   movl(rax, Immediate(mag.multiplier));
    5034             :   imull(dividend);
    5035        6857 :   bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
    5036        6857 :   if (divisor > 0 && neg) addl(rdx, dividend);
    5037        6857 :   if (divisor < 0 && !neg && mag.multiplier > 0) subl(rdx, dividend);
    5038        6857 :   if (mag.shift > 0) sarl(rdx, Immediate(mag.shift));
    5039             :   movl(rax, dividend);
    5040             :   shrl(rax, Immediate(31));
    5041        6857 :   addl(rdx, rax);
    5042        6857 : }
    5043             : 
    5044             : 
    5045             : }  // namespace internal
    5046             : }  // namespace v8
    5047             : 
    5048             : #endif  // V8_TARGET_ARCH_X64

Generated by: LCOV version 1.10