LCOV - code coverage report
Current view: top level - src/compiler/backend/x64 - instruction-selector-x64.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 1107 1162 95.3 %
Date: 2019-02-19 Functions: 322 331 97.3 %

          Line data    Source code
       1             : // Copyright 2014 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include <algorithm>
       6             : 
       7             : #include "src/base/adapters.h"
       8             : #include "src/base/overflowing-math.h"
       9             : #include "src/compiler/backend/instruction-selector-impl.h"
      10             : #include "src/compiler/node-matchers.h"
      11             : #include "src/compiler/node-properties.h"
      12             : #include "src/roots-inl.h"
      13             : 
      14             : namespace v8 {
      15             : namespace internal {
      16             : namespace compiler {
      17             : 
      18             : // Adds X64-specific methods for generating operands.
      19             : class X64OperandGenerator final : public OperandGenerator {
      20             :  public:
      21             :   explicit X64OperandGenerator(InstructionSelector* selector)
      22             :       : OperandGenerator(selector) {}
      23             : 
      24    30009202 :   bool CanBeImmediate(Node* node) {
      25    30009202 :     switch (node->opcode()) {
      26             :       case IrOpcode::kInt32Constant:
      27             :       case IrOpcode::kRelocatableInt32Constant:
      28             :         return true;
      29             :       case IrOpcode::kInt64Constant: {
      30    13578737 :         const int64_t value = OpParameter<int64_t>(node->op());
      31    13578737 :         return std::numeric_limits<int32_t>::min() < value &&
      32             :                value <= std::numeric_limits<int32_t>::max();
      33             :       }
      34             :       case IrOpcode::kNumberConstant: {
      35       25606 :         const double value = OpParameter<double>(node->op());
      36       25606 :         return bit_cast<int64_t>(value) == 0;
      37             :       }
      38             :       default:
      39             :         return false;
      40             :     }
      41             :   }
      42             : 
      43      202593 :   int32_t GetImmediateIntegerValue(Node* node) {
      44             :     DCHECK(CanBeImmediate(node));
      45      202593 :     if (node->opcode() == IrOpcode::kInt32Constant) {
      46      150776 :       return OpParameter<int32_t>(node->op());
      47             :     }
      48             :     DCHECK_EQ(IrOpcode::kInt64Constant, node->opcode());
      49       51817 :     return static_cast<int32_t>(OpParameter<int64_t>(node->op()));
      50             :   }
      51             : 
      52    12775696 :   bool CanBeMemoryOperand(InstructionCode opcode, Node* node, Node* input,
      53             :                           int effect_level) {
      54    13389488 :     if (input->opcode() != IrOpcode::kLoad ||
      55     2192491 :         !selector()->CanCover(node, input)) {
      56             :       return false;
      57             :     }
      58     1578767 :     if (effect_level != selector()->GetEffectLevel(input)) {
      59             :       return false;
      60             :     }
      61             :     MachineRepresentation rep =
      62     1578727 :         LoadRepresentationOf(input->op()).representation();
      63     1578729 :     switch (opcode) {
      64             :       case kX64And:
      65             :       case kX64Or:
      66             :       case kX64Xor:
      67             :       case kX64Add:
      68             :       case kX64Sub:
      69             :       case kX64Push:
      70             :       case kX64Cmp:
      71             :       case kX64Test:
      72             :         // When pointer compression is enabled 64-bit memory operands can't be
      73             :         // used for tagged values.
      74     1569523 :         return rep == MachineRepresentation::kWord64 ||
      75     1054600 :                (!COMPRESS_POINTERS_BOOL && IsAnyTagged(rep));
      76             :       case kX64And32:
      77             :       case kX64Or32:
      78             :       case kX64Xor32:
      79             :       case kX64Add32:
      80             :       case kX64Sub32:
      81             :       case kX64Cmp32:
      82             :       case kX64Test32:
      83             :         // When pointer compression is enabled 32-bit memory operands can be
      84             :         // used for tagged values.
      85             :         return rep == MachineRepresentation::kWord32 ||
      86      136239 :                (COMPRESS_POINTERS_BOOL && IsAnyTagged(rep));
      87             :       case kX64Cmp16:
      88             :       case kX64Test16:
      89      275839 :         return rep == MachineRepresentation::kWord16;
      90             :       case kX64Cmp8:
      91             :       case kX64Test8:
      92      112015 :         return rep == MachineRepresentation::kWord8;
      93             :       default:
      94             :         break;
      95             :     }
      96             :     return false;
      97             :   }
      98             : 
      99    14216316 :   AddressingMode GenerateMemoryOperandInputs(Node* index, int scale_exponent,
     100    12268728 :                                              Node* base, Node* displacement,
     101             :                                              DisplacementMode displacement_mode,
     102             :                                              InstructionOperand inputs[],
     103             :                                              size_t* input_count) {
     104             :     AddressingMode mode = kMode_MRI;
     105    14216316 :     if (base != nullptr && (index != nullptr || displacement != nullptr)) {
     106    12332756 :       if (base->opcode() == IrOpcode::kInt32Constant &&
     107       64028 :           OpParameter<int32_t>(base->op()) == 0) {
     108             :         base = nullptr;
     109    12398874 :       } else if (base->opcode() == IrOpcode::kInt64Constant &&
     110      179292 :                  OpParameter<int64_t>(base->op()) == 0) {
     111             :         base = nullptr;
     112             :       }
     113             :     }
     114    14216316 :     if (base != nullptr) {
     115    13922121 :       inputs[(*input_count)++] = UseRegister(base);
     116    13922179 :       if (index != nullptr) {
     117             :         DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
     118     1434204 :         inputs[(*input_count)++] = UseRegister(index);
     119     1434196 :         if (displacement != nullptr) {
     120             :           inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement
     121             :                                          ? UseNegatedImmediate(displacement)
     122      538345 :                                          : UseImmediate(displacement);
     123             :           static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I,
     124             :                                                        kMode_MR4I, kMode_MR8I};
     125      538343 :           mode = kMRnI_modes[scale_exponent];
     126             :         } else {
     127             :           static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2,
     128             :                                                       kMode_MR4, kMode_MR8};
     129      895851 :           mode = kMRn_modes[scale_exponent];
     130             :         }
     131             :       } else {
     132    12487975 :         if (displacement == nullptr) {
     133             :           mode = kMode_MR;
     134             :         } else {
     135             :           inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement
     136             :                                          ? UseNegatedImmediate(displacement)
     137    10738444 :                                          : UseImmediate(displacement);
     138             :           mode = kMode_MRI;
     139             :         }
     140             :       }
     141             :     } else {
     142             :       DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
     143      294195 :       if (displacement != nullptr) {
     144      242510 :         if (index == nullptr) {
     145       45088 :           inputs[(*input_count)++] = UseRegister(displacement);
     146             :           mode = kMode_MR;
     147             :         } else {
     148      197422 :           inputs[(*input_count)++] = UseRegister(index);
     149             :           inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement
     150             :                                          ? UseNegatedImmediate(displacement)
     151      197422 :                                          : UseImmediate(displacement);
     152             :           static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I,
     153             :                                                       kMode_M4I, kMode_M8I};
     154      197422 :           mode = kMnI_modes[scale_exponent];
     155             :         }
     156             :       } else {
     157       51685 :         inputs[(*input_count)++] = UseRegister(index);
     158             :         static const AddressingMode kMn_modes[] = {kMode_MR, kMode_MR1,
     159             :                                                    kMode_M4, kMode_M8};
     160       51686 :         mode = kMn_modes[scale_exponent];
     161       51686 :         if (mode == kMode_MR1) {
     162             :           // [%r1 + %r1*1] has a smaller encoding than [%r1*2+0]
     163       11424 :           inputs[(*input_count)++] = UseRegister(index);
     164             :         }
     165             :       }
     166             :     }
     167    14216390 :     return mode;
     168             :   }
     169             : 
     170    12288048 :   AddressingMode GetEffectiveAddressMemoryOperand(Node* operand,
     171             :                                                   InstructionOperand inputs[],
     172             :                                                   size_t* input_count) {
     173    12288048 :     if (selector()->CanAddressRelativeToRootsRegister()) {
     174     4480188 :       LoadMatcher<ExternalReferenceMatcher> m(operand);
     175     4480191 :       if (m.index().HasValue() && m.object().HasValue()) {
     176             :         ptrdiff_t const delta =
     177      487629 :             m.index().Value() +
     178             :             TurboAssemblerBase::RootRegisterOffsetForExternalReference(
     179      487629 :                 selector()->isolate(), m.object().Value());
     180      487626 :         if (is_int32(delta)) {
     181      487628 :           inputs[(*input_count)++] = TempImmediate(static_cast<int32_t>(delta));
     182      487628 :           return kMode_Root;
     183             :         }
     184             :       }
     185             :     }
     186             :     BaseWithIndexAndDisplacement64Matcher m(operand, AddressOption::kAllowAll);
     187             :     DCHECK(m.matches());
     188    11800486 :     if (m.displacement() == nullptr || CanBeImmediate(m.displacement())) {
     189             :       return GenerateMemoryOperandInputs(
     190             :           m.index(), m.scale(), m.base(), m.displacement(),
     191    11797411 :           m.displacement_mode(), inputs, input_count);
     192        4458 :     } else if (m.base() == nullptr &&
     193        1402 :                m.displacement_mode() == kPositiveDisplacement) {
     194             :       // The displacement cannot be an immediate, but we can use the
     195             :       // displacement as base instead and still benefit from addressing
     196             :       // modes for the scale.
     197             :       return GenerateMemoryOperandInputs(m.index(), m.scale(), m.displacement(),
     198             :                                          nullptr, m.displacement_mode(), inputs,
     199        1402 :                                          input_count);
     200             :     } else {
     201        3308 :       inputs[(*input_count)++] = UseRegister(operand->InputAt(0));
     202        3336 :       inputs[(*input_count)++] = UseRegister(operand->InputAt(1));
     203        1678 :       return kMode_MR1;
     204             :     }
     205             :   }
     206             : 
     207      360362 :   InstructionOperand GetEffectiveIndexOperand(Node* index,
     208             :                                               AddressingMode* mode) {
     209      360362 :     if (CanBeImmediate(index)) {
     210      301137 :       *mode = kMode_MRI;
     211      301137 :       return UseImmediate(index);
     212             :     } else {
     213       59231 :       *mode = kMode_MR1;
     214       59231 :       return UseUniqueRegister(index);
     215             :     }
     216             :   }
     217             : 
     218             :   bool CanBeBetterLeftOperand(Node* node) const {
     219      916009 :     return !selector()->IsLive(node);
     220             :   }
     221             : };
     222             : 
     223             : namespace {
     224     6097110 : ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) {
     225             :   ArchOpcode opcode = kArchNop;
     226     6097110 :   switch (load_rep.representation()) {
     227             :     case MachineRepresentation::kFloat32:
     228             :       opcode = kX64Movss;
     229       16066 :       break;
     230             :     case MachineRepresentation::kFloat64:
     231             :       opcode = kX64Movsd;
     232      423865 :       break;
     233             :     case MachineRepresentation::kBit:  // Fall through.
     234             :     case MachineRepresentation::kWord8:
     235      221854 :       opcode = load_rep.IsSigned() ? kX64Movsxbl : kX64Movzxbl;
     236      221854 :       break;
     237             :     case MachineRepresentation::kWord16:
     238      164516 :       opcode = load_rep.IsSigned() ? kX64Movsxwl : kX64Movzxwl;
     239      164516 :       break;
     240             :     case MachineRepresentation::kWord32:
     241             :       opcode = kX64Movl;
     242      391331 :       break;
     243             : #ifdef V8_COMPRESS_POINTERS
     244             :     case MachineRepresentation::kTaggedSigned:
     245             :       return kX64MovqDecompressTaggedSigned;
     246             :     case MachineRepresentation::kTaggedPointer:
     247             :       return kX64MovqDecompressTaggedPointer;
     248             :     case MachineRepresentation::kTagged:
     249             :       return kX64MovqDecompressAnyTagged;
     250             : #else
     251             :     case MachineRepresentation::kTaggedSigned:   // Fall through.
     252             :     case MachineRepresentation::kTaggedPointer:  // Fall through.
     253             :     case MachineRepresentation::kTagged:         // Fall through.
     254             : #endif
     255             :     case MachineRepresentation::kWord64:
     256             :       opcode = kX64Movq;
     257     4873664 :       break;
     258             :     case MachineRepresentation::kSimd128:  // Fall through.
     259             :       opcode = kX64Movdqu;
     260        5792 :       break;
     261             :     case MachineRepresentation::kNone:
     262           0 :       UNREACHABLE();
     263             :       break;
     264             :   }
     265     6097110 :   return opcode;
     266             : }
     267             : 
     268     4658619 : ArchOpcode GetStoreOpcode(StoreRepresentation store_rep) {
     269     4658619 :   switch (store_rep.representation()) {
     270             :     case MachineRepresentation::kFloat32:
     271             :       return kX64Movss;
     272             :       break;
     273             :     case MachineRepresentation::kFloat64:
     274             :       return kX64Movsd;
     275             :       break;
     276             :     case MachineRepresentation::kBit:  // Fall through.
     277             :     case MachineRepresentation::kWord8:
     278             :       return kX64Movb;
     279             :       break;
     280             :     case MachineRepresentation::kWord16:
     281             :       return kX64Movw;
     282             :       break;
     283             :     case MachineRepresentation::kWord32:
     284             :       return kX64Movl;
     285             :       break;
     286             :     case MachineRepresentation::kTaggedSigned:   // Fall through.
     287             :     case MachineRepresentation::kTaggedPointer:  // Fall through.
     288             :     case MachineRepresentation::kTagged:         // Fall through.
     289             : #ifdef V8_COMPRESS_POINTERS
     290             :       return kX64MovqCompressTagged;
     291             : #endif
     292             :     case MachineRepresentation::kWord64:
     293             :       return kX64Movq;
     294             :       break;
     295             :     case MachineRepresentation::kSimd128:  // Fall through.
     296             :       return kX64Movdqu;
     297             :       break;
     298             :     case MachineRepresentation::kNone:
     299           0 :       UNREACHABLE();
     300             :   }
     301           0 :   UNREACHABLE();
     302             : }
     303             : 
     304             : }  // namespace
     305             : 
     306        4918 : void InstructionSelector::VisitStackSlot(Node* node) {
     307        2459 :   StackSlotRepresentation rep = StackSlotRepresentationOf(node->op());
     308        2459 :   int slot = frame_->AllocateSpillSlot(rep.size());
     309             :   OperandGenerator g(this);
     310             : 
     311             :   Emit(kArchStackSlot, g.DefineAsRegister(node),
     312        4918 :        sequence()->AddImmediate(Constant(slot)), 0, nullptr);
     313        2459 : }
     314             : 
     315         152 : void InstructionSelector::VisitDebugAbort(Node* node) {
     316             :   X64OperandGenerator g(this);
     317         152 :   Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), rdx));
     318         152 : }
     319             : 
     320           1 : void InstructionSelector::VisitSpeculationFence(Node* node) {
     321             :   X64OperandGenerator g(this);
     322           1 :   Emit(kLFence, g.NoOutput());
     323           1 : }
     324             : 
     325    12194263 : void InstructionSelector::VisitLoad(Node* node) {
     326     6097086 :   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
     327             :   X64OperandGenerator g(this);
     328             : 
     329     6097115 :   ArchOpcode opcode = GetLoadOpcode(load_rep);
     330             :   size_t temp_count = 0;
     331    12194162 :   InstructionOperand temps[1];
     332             :   if (COMPRESS_POINTERS_BOOL && opcode == kX64MovqDecompressAnyTagged) {
     333             :     temps[temp_count++] = g.TempRegister();
     334             :   }
     335             :   DCHECK_LE(temp_count, arraysize(temps));
     336     6097081 :   InstructionOperand outputs[] = {g.DefineAsRegister(node)};
     337    24388504 :   InstructionOperand inputs[3];
     338     6097159 :   size_t input_count = 0;
     339             :   AddressingMode mode =
     340     6097159 :       g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
     341     6097177 :   InstructionCode code = opcode | AddressingModeField::encode(mode);
     342     6097177 :   if (node->opcode() == IrOpcode::kProtectedLoad) {
     343       94148 :     code |= MiscField::encode(kMemoryAccessProtected);
     344     6003029 :   } else if (node->opcode() == IrOpcode::kPoisonedLoad) {
     345           0 :     CHECK_NE(poisoning_level_, PoisoningMitigationLevel::kDontPoison);
     346           0 :     code |= MiscField::encode(kMemoryAccessPoisoned);
     347             :   }
     348     6097177 :   Emit(code, 1, outputs, input_count, inputs, temp_count, temps);
     349     6097166 : }
     350             : 
     351           0 : void InstructionSelector::VisitPoisonedLoad(Node* node) { VisitLoad(node); }
     352             : 
     353       94146 : void InstructionSelector::VisitProtectedLoad(Node* node) { VisitLoad(node); }
     354             : 
     355     9680270 : void InstructionSelector::VisitStore(Node* node) {
     356             :   X64OperandGenerator g(this);
     357             :   Node* base = node->InputAt(0);
     358             :   Node* index = node->InputAt(1);
     359     1545467 :   Node* value = node->InputAt(2);
     360             : 
     361     4840135 :   StoreRepresentation store_rep = StoreRepresentationOf(node->op());
     362             :   WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
     363             : 
     364     4840135 :   if (write_barrier_kind != kNoWriteBarrier) {
     365             :     DCHECK(CanBeTaggedPointer(store_rep.representation()));
     366             :     AddressingMode addressing_mode;
     367             :     InstructionOperand inputs[] = {
     368             :         g.UseUniqueRegister(base),
     369             :         g.GetEffectiveIndexOperand(index, &addressing_mode),
     370      325633 :         g.UseUniqueRegister(value)};
     371             :     RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
     372      325633 :     switch (write_barrier_kind) {
     373             :       case kNoWriteBarrier:
     374           0 :         UNREACHABLE();
     375             :         break;
     376             :       case kMapWriteBarrier:
     377             :         record_write_mode = RecordWriteMode::kValueIsMap;
     378       37596 :         break;
     379             :       case kPointerWriteBarrier:
     380             :         record_write_mode = RecordWriteMode::kValueIsPointer;
     381       31253 :         break;
     382             :       case kFullWriteBarrier:
     383             :         record_write_mode = RecordWriteMode::kValueIsAny;
     384             :         break;
     385             :     }
     386      651266 :     InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
     387             :     InstructionCode code = kArchStoreWithWriteBarrier;
     388      651266 :     code |= AddressingModeField::encode(addressing_mode);
     389      325633 :     code |= MiscField::encode(static_cast<int>(record_write_mode));
     390      325633 :     Emit(code, 0, nullptr, arraysize(inputs), inputs, arraysize(temps), temps);
     391             :   } else {
     392     4514502 :     ArchOpcode opcode = GetStoreOpcode(store_rep);
     393    22572496 :     InstructionOperand inputs[4];
     394     4514500 :     size_t input_count = 0;
     395             :     AddressingMode addressing_mode =
     396     4514500 :         g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
     397             :     InstructionCode code =
     398     4514503 :         opcode | AddressingModeField::encode(addressing_mode);
     399     9029002 :     if ((ElementSizeLog2Of(store_rep.representation()) <
     400     1545467 :          kSystemPointerSizeLog2) &&
     401     4635099 :         (value->opcode() == IrOpcode::kTruncateInt64ToInt32) &&
     402      120600 :         CanCover(node, value)) {
     403             :       value = value->InputAt(0);
     404             :     }
     405             :     InstructionOperand value_operand =
     406     4514499 :         g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
     407     4514506 :     inputs[input_count++] = value_operand;
     408             :     Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count,
     409     4514506 :          inputs);
     410             :   }
     411     4840141 : }
     412             : 
     413      288238 : void InstructionSelector::VisitProtectedStore(Node* node) {
     414             :   X64OperandGenerator g(this);
     415             :   Node* value = node->InputAt(2);
     416             : 
     417      144119 :   StoreRepresentation store_rep = StoreRepresentationOf(node->op());
     418             : 
     419      144120 :   ArchOpcode opcode = GetStoreOpcode(store_rep);
     420      720637 :   InstructionOperand inputs[4];
     421      144129 :   size_t input_count = 0;
     422             :   AddressingMode addressing_mode =
     423      144129 :       g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
     424      144158 :   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode) |
     425      144158 :                          MiscField::encode(kMemoryAccessProtected);
     426             :   InstructionOperand value_operand =
     427      144158 :       g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
     428      144160 :   inputs[input_count++] = value_operand;
     429      144160 :   Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, inputs);
     430      144156 : }
     431             : 
     432             : // Architecture supports unaligned access, therefore VisitLoad is used instead
     433           0 : void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); }
     434             : 
     435             : // Architecture supports unaligned access, therefore VisitStore is used instead
     436           0 : void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
     437             : 
     438             : // Shared routine for multiple binary operations.
     439     1307647 : static void VisitBinop(InstructionSelector* selector, Node* node,
     440     1594052 :                        InstructionCode opcode, FlagsContinuation* cont) {
     441             :   X64OperandGenerator g(selector);
     442      906074 :   Int32BinopMatcher m(node);
     443             :   Node* left = m.left().node();
     444             :   Node* right = m.right().node();
     445     8155010 :   InstructionOperand inputs[8];
     446      906122 :   size_t input_count = 0;
     447     1812235 :   InstructionOperand outputs[1];
     448             :   size_t output_count = 0;
     449             : 
     450             :   // TODO(turbofan): match complex addressing modes.
     451      906122 :   if (left == right) {
     452             :     // If both inputs refer to the same operand, enforce allocating a register
     453             :     // for both of them to ensure that we don't end up generating code like
     454             :     // this:
     455             :     //
     456             :     //   mov rax, [rbp-0x10]
     457             :     //   add rax, [rbp-0x10]
     458             :     //   jo label
     459        1337 :     InstructionOperand const input = g.UseRegister(left);
     460        1337 :     inputs[input_count++] = input;
     461        1337 :     inputs[input_count++] = input;
     462      904785 :   } else if (g.CanBeImmediate(right)) {
     463      503229 :     inputs[input_count++] = g.UseRegister(left);
     464      503232 :     inputs[input_count++] = g.UseImmediate(right);
     465             :   } else {
     466      401536 :     int effect_level = selector->GetEffectLevel(node);
     467      401518 :     if (cont->IsBranch()) {
     468             :       effect_level = selector->GetEffectLevel(
     469       36872 :           cont->true_block()->PredecessorAt(0)->control_input());
     470             :     }
     471      737971 :     if (node->op()->HasProperty(Operator::kCommutative) &&
     472      665807 :         g.CanBeBetterLeftOperand(right) &&
     473      251596 :         (!g.CanBeBetterLeftOperand(left) ||
     474      251651 :          !g.CanBeMemoryOperand(opcode, node, right, effect_level))) {
     475             :       std::swap(left, right);
     476             :     }
     477      401541 :     if (g.CanBeMemoryOperand(opcode, node, right, effect_level)) {
     478        2638 :       inputs[input_count++] = g.UseRegister(left);
     479             :       AddressingMode addressing_mode =
     480        2655 :           g.GetEffectiveAddressMemoryOperand(right, inputs, &input_count);
     481        2650 :       opcode |= AddressingModeField::encode(addressing_mode);
     482             :     } else {
     483      398901 :       inputs[input_count++] = g.UseRegister(left);
     484      398898 :       inputs[input_count++] = g.Use(right);
     485             :     }
     486             :   }
     487             : 
     488      906102 :   if (cont->IsBranch()) {
     489      249562 :     inputs[input_count++] = g.Label(cont->true_block());
     490      249558 :     inputs[input_count++] = g.Label(cont->false_block());
     491             :   }
     492             : 
     493      906103 :   outputs[output_count++] = g.DefineSameAsFirst(node);
     494             : 
     495             :   DCHECK_NE(0u, input_count);
     496             :   DCHECK_EQ(1u, output_count);
     497             :   DCHECK_GE(arraysize(inputs), input_count);
     498             :   DCHECK_GE(arraysize(outputs), output_count);
     499             : 
     500             :   selector->EmitWithContinuation(opcode, output_count, outputs, input_count,
     501      906120 :                                  inputs, cont);
     502      906121 : }
     503             : 
     504             : // Shared routine for multiple binary operations.
     505      616915 : static void VisitBinop(InstructionSelector* selector, Node* node,
     506             :                        InstructionCode opcode) {
     507             :   FlagsContinuation cont;
     508      616912 :   VisitBinop(selector, node, opcode, &cont);
     509      616949 : }
     510             : 
     511      180052 : void InstructionSelector::VisitWord32And(Node* node) {
     512             :   X64OperandGenerator g(this);
     513      180052 :   Uint32BinopMatcher m(node);
     514      180052 :   if (m.right().Is(0xFF)) {
     515        2326 :     Emit(kX64Movzxbl, g.DefineAsRegister(node), g.Use(m.left().node()));
     516      177726 :   } else if (m.right().Is(0xFFFF)) {
     517        5685 :     Emit(kX64Movzxwl, g.DefineAsRegister(node), g.Use(m.left().node()));
     518             :   } else {
     519      172041 :     VisitBinop(this, node, kX64And32);
     520             :   }
     521      180054 : }
     522             : 
     523      185139 : void InstructionSelector::VisitWord64And(Node* node) {
     524      185139 :   VisitBinop(this, node, kX64And);
     525      185140 : }
     526             : 
     527       61325 : void InstructionSelector::VisitWord32Or(Node* node) {
     528       61325 :   VisitBinop(this, node, kX64Or32);
     529       61326 : }
     530             : 
     531       92249 : void InstructionSelector::VisitWord64Or(Node* node) {
     532       92249 :   VisitBinop(this, node, kX64Or);
     533       92263 : }
     534             : 
     535       22123 : void InstructionSelector::VisitWord32Xor(Node* node) {
     536             :   X64OperandGenerator g(this);
     537       22123 :   Uint32BinopMatcher m(node);
     538       22123 :   if (m.right().Is(-1)) {
     539        2781 :     Emit(kX64Not32, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
     540             :   } else {
     541       19342 :     VisitBinop(this, node, kX64Xor32);
     542             :   }
     543       22123 : }
     544             : 
     545         305 : void InstructionSelector::VisitWord64Xor(Node* node) {
     546             :   X64OperandGenerator g(this);
     547         305 :   Uint64BinopMatcher m(node);
     548         305 :   if (m.right().Is(-1)) {
     549          44 :     Emit(kX64Not, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
     550             :   } else {
     551         261 :     VisitBinop(this, node, kX64Xor);
     552             :   }
     553         305 : }
     554             : 
     555             : namespace {
     556             : 
     557          20 : bool TryMergeTruncateInt64ToInt32IntoLoad(InstructionSelector* selector,
     558          40 :                                           Node* node, Node* load) {
     559          20 :   if (load->opcode() == IrOpcode::kLoad && selector->CanCover(node, load)) {
     560          20 :     LoadRepresentation load_rep = LoadRepresentationOf(load->op());
     561          20 :     MachineRepresentation rep = load_rep.representation();
     562             :     InstructionCode opcode = kArchNop;
     563             :     switch (rep) {
     564             :       case MachineRepresentation::kBit:  // Fall through.
     565             :       case MachineRepresentation::kWord8:
     566           0 :         opcode = load_rep.IsSigned() ? kX64Movsxbl : kX64Movzxbl;
     567           0 :         break;
     568             :       case MachineRepresentation::kWord16:
     569           0 :         opcode = load_rep.IsSigned() ? kX64Movsxwl : kX64Movzxwl;
     570           0 :         break;
     571             :       case MachineRepresentation::kWord32:
     572             :       case MachineRepresentation::kWord64:
     573             :       case MachineRepresentation::kTaggedSigned:
     574             :       case MachineRepresentation::kTagged:
     575             :         opcode = kX64Movl;
     576             :         break;
     577             :       default:
     578           0 :         UNREACHABLE();
     579             :         return false;
     580             :     }
     581             :     X64OperandGenerator g(selector);
     582          20 :     InstructionOperand outputs[] = {g.DefineAsRegister(node)};
     583          20 :     size_t input_count = 0;
     584          80 :     InstructionOperand inputs[3];
     585             :     AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
     586          20 :         node->InputAt(0), inputs, &input_count);
     587          20 :     opcode |= AddressingModeField::encode(mode);
     588          20 :     selector->Emit(opcode, 1, outputs, input_count, inputs);
     589             :     return true;
     590             :   }
     591             :   return false;
     592             : }
     593             : 
     594             : // Shared routine for multiple 32-bit shift operations.
     595             : // TODO(bmeurer): Merge this with VisitWord64Shift using template magic?
     596      178916 : void VisitWord32Shift(InstructionSelector* selector, Node* node,
     597             :                       ArchOpcode opcode) {
     598             :   X64OperandGenerator g(selector);
     599      178916 :   Int32BinopMatcher m(node);
     600      178916 :   Node* left = m.left().node();
     601             :   Node* right = m.right().node();
     602             : 
     603      185528 :   if (left->opcode() == IrOpcode::kTruncateInt64ToInt32 &&
     604        6612 :       selector->CanCover(node, left)) {
     605             :     left = left->InputAt(0);
     606             :   }
     607             : 
     608      178916 :   if (g.CanBeImmediate(right)) {
     609             :     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
     610      170546 :                    g.UseImmediate(right));
     611             :   } else {
     612             :     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
     613        8369 :                    g.UseFixed(right, rcx));
     614             :   }
     615      178916 : }
     616             : 
     617             : // Shared routine for multiple 64-bit shift operations.
     618             : // TODO(bmeurer): Merge this with VisitWord32Shift using template magic?
     619      327676 : void VisitWord64Shift(InstructionSelector* selector, Node* node,
     620             :                       ArchOpcode opcode) {
     621             :   X64OperandGenerator g(selector);
     622      327676 :   Int64BinopMatcher m(node);
     623             :   Node* left = m.left().node();
     624             :   Node* right = m.right().node();
     625             : 
     626      327675 :   if (g.CanBeImmediate(right)) {
     627             :     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
     628      325354 :                    g.UseImmediate(right));
     629             :   } else {
     630        2324 :     if (m.right().IsWord64And()) {
     631         112 :       Int64BinopMatcher mright(right);
     632         112 :       if (mright.right().Is(0x3F)) {
     633             :         right = mright.left().node();
     634             :       }
     635             :     }
     636             :     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
     637        2324 :                    g.UseFixed(right, rcx));
     638             :   }
     639      327677 : }
     640             : 
     641             : // Shared routine for multiple shift operations with continuation.
     642             : template <typename BinopMatcher, int Bits>
     643       26967 : bool TryVisitWordShift(InstructionSelector* selector, Node* node,
     644             :                        ArchOpcode opcode, FlagsContinuation* cont) {
     645             :   X64OperandGenerator g(selector);
     646       26967 :   BinopMatcher m(node);
     647             :   Node* left = m.left().node();
     648             :   Node* right = m.right().node();
     649             : 
     650             :   // If the shift count is 0, the flags are not affected.
     651       53934 :   if (!g.CanBeImmediate(right) ||
     652             :       (g.GetImmediateIntegerValue(right) & (Bits - 1)) == 0) {
     653             :     return false;
     654             :   }
     655       26959 :   InstructionOperand output = g.DefineSameAsFirst(node);
     656       80877 :   InstructionOperand inputs[2];
     657       26959 :   inputs[0] = g.UseRegister(left);
     658       26959 :   inputs[1] = g.UseImmediate(right);
     659       26959 :   selector->EmitWithContinuation(opcode, 1, &output, 2, inputs, cont);
     660       26959 :   return true;
     661             : }
     662             : 
     663     2417515 : void EmitLea(InstructionSelector* selector, InstructionCode opcode,
     664             :              Node* result, Node* index, int scale, Node* base,
     665             :              Node* displacement, DisplacementMode displacement_mode) {
     666             :   X64OperandGenerator g(selector);
     667             : 
     668    12087547 :   InstructionOperand inputs[4];
     669     2417515 :   size_t input_count = 0;
     670             :   AddressingMode mode =
     671             :       g.GenerateMemoryOperandInputs(index, scale, base, displacement,
     672     2417515 :                                     displacement_mode, inputs, &input_count);
     673             : 
     674             :   DCHECK_NE(0u, input_count);
     675             :   DCHECK_GE(arraysize(inputs), input_count);
     676             : 
     677     4835096 :   InstructionOperand outputs[1];
     678     2417550 :   outputs[0] = g.DefineAsRegister(result);
     679             : 
     680     2417550 :   opcode = AddressingModeField::encode(mode) | opcode;
     681             : 
     682     2417550 :   selector->Emit(opcode, 1, outputs, input_count, inputs);
     683     2417546 : }
     684             : 
     685             : }  // namespace
     686             : 
     687       41558 : void InstructionSelector::VisitWord32Shl(Node* node) {
     688       41558 :   Int32ScaleMatcher m(node, true);
     689       41562 :   if (m.matches()) {
     690             :     Node* index = node->InputAt(0);
     691       11175 :     Node* base = m.power_of_two_plus_one() ? index : nullptr;
     692             :     EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr,
     693       11175 :             kPositiveDisplacement);
     694       52733 :     return;
     695             :   }
     696       30387 :   VisitWord32Shift(this, node, kX64Shl32);
     697             : }
     698             : 
     699      388381 : void InstructionSelector::VisitWord64Shl(Node* node) {
     700             :   X64OperandGenerator g(this);
     701      388381 :   Int64ScaleMatcher m(node, true);
     702      388387 :   if (m.matches()) {
     703             :     Node* index = node->InputAt(0);
     704       38959 :     Node* base = m.power_of_two_plus_one() ? index : nullptr;
     705             :     EmitLea(this, kX64Lea, node, index, m.scale(), base, nullptr,
     706       38959 :             kPositiveDisplacement);
     707       38958 :     return;
     708             :   } else {
     709      349428 :     Int64BinopMatcher m(node);
     710      484332 :     if ((m.left().IsChangeInt32ToInt64() ||
     711      606356 :          m.left().IsChangeUint32ToUint64()) &&
     712             :         m.right().IsInRange(32, 63)) {
     713             :       // There's no need to sign/zero-extend to 64-bit if we shift out the upper
     714             :       // 32 bits anyway.
     715             :       Emit(kX64Shl, g.DefineSameAsFirst(node),
     716             :            g.UseRegister(m.left().node()->InputAt(0)),
     717      513804 :            g.UseImmediate(m.right().node()));
     718      256887 :       return;
     719             :     }
     720             :   }
     721       92546 :   VisitWord64Shift(this, node, kX64Shl);
     722             : }
     723             : 
     724       96229 : void InstructionSelector::VisitWord32Shr(Node* node) {
     725       96229 :   VisitWord32Shift(this, node, kX64Shr32);
     726       96229 : }
     727             : 
     728             : namespace {
     729             : 
     730          19 : inline AddressingMode AddDisplacementToAddressingMode(AddressingMode mode) {
     731          19 :   switch (mode) {
     732             :     case kMode_MR:
     733             :       return kMode_MRI;
     734             :       break;
     735             :     case kMode_MR1:
     736           0 :       return kMode_MR1I;
     737             :       break;
     738             :     case kMode_MR2:
     739           0 :       return kMode_MR2I;
     740             :       break;
     741             :     case kMode_MR4:
     742           0 :       return kMode_MR4I;
     743             :       break;
     744             :     case kMode_MR8:
     745           0 :       return kMode_MR8I;
     746             :       break;
     747             :     case kMode_M1:
     748           0 :       return kMode_M1I;
     749             :       break;
     750             :     case kMode_M2:
     751           0 :       return kMode_M2I;
     752             :       break;
     753             :     case kMode_M4:
     754           0 :       return kMode_M4I;
     755             :       break;
     756             :     case kMode_M8:
     757           0 :       return kMode_M8I;
     758             :       break;
     759             :     case kMode_None:
     760             :     case kMode_MRI:
     761             :     case kMode_MR1I:
     762             :     case kMode_MR2I:
     763             :     case kMode_MR4I:
     764             :     case kMode_MR8I:
     765             :     case kMode_M1I:
     766             :     case kMode_M2I:
     767             :     case kMode_M4I:
     768             :     case kMode_M8I:
     769             :     case kMode_Root:
     770           0 :       UNREACHABLE();
     771             :   }
     772           0 :   UNREACHABLE();
     773             : }
     774             : 
     775      518948 : bool TryMatchLoadWord64AndShiftRight(InstructionSelector* selector, Node* node,
     776             :                                      InstructionCode opcode) {
     777             :   DCHECK(IrOpcode::kWord64Sar == node->opcode() ||
     778             :          IrOpcode::kWord64Shr == node->opcode());
     779             :   X64OperandGenerator g(selector);
     780      518948 :   Int64BinopMatcher m(node);
     781     1167668 :   if (selector->CanCover(m.node(), m.left().node()) && m.left().IsLoad() &&
     782             :       m.right().Is(32)) {
     783             :     DCHECK_EQ(selector->GetEffectLevel(node),
     784             :               selector->GetEffectLevel(m.left().node()));
     785             :     // Just load and sign-extend the interesting 4 bytes instead. This happens,
     786             :     // for example, when we're loading and untagging SMIs.
     787             :     BaseWithIndexAndDisplacement64Matcher mleft(m.left().node(),
     788             :                                                 AddressOption::kAllowAll);
     789      351269 :     if (mleft.matches() && (mleft.displacement() == nullptr ||
     790      175625 :                             g.CanBeImmediate(mleft.displacement()))) {
     791      175644 :       size_t input_count = 0;
     792      702567 :       InstructionOperand inputs[3];
     793             :       AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
     794      175644 :           m.left().node(), inputs, &input_count);
     795      175644 :       if (mleft.displacement() == nullptr) {
     796             :         // Make sure that the addressing mode indicates the presence of an
     797             :         // immediate displacement. It seems that we never use M1 and M2, but we
     798             :         // handle them here anyways.
     799          19 :         mode = AddDisplacementToAddressingMode(mode);
     800          19 :         inputs[input_count++] = ImmediateOperand(ImmediateOperand::INLINE, 4);
     801             :       } else {
     802             :         // In the case that the base address was zero, the displacement will be
     803             :         // in a register and replacing it with an immediate is not allowed. This
     804             :         // usually only happens in dead code anyway.
     805      351250 :         if (!inputs[input_count - 1].IsImmediate()) return false;
     806             :         int32_t displacement = g.GetImmediateIntegerValue(mleft.displacement());
     807             :         inputs[input_count - 1] =
     808      351236 :             ImmediateOperand(ImmediateOperand::INLINE, displacement + 4);
     809             :       }
     810      175637 :       InstructionOperand outputs[] = {g.DefineAsRegister(node)};
     811      175637 :       InstructionCode code = opcode | AddressingModeField::encode(mode);
     812      175637 :       selector->Emit(code, 1, outputs, input_count, inputs);
     813      175637 :       return true;
     814             :     }
     815             :   }
     816             :   return false;
     817             : }
     818             : 
     819             : }  // namespace
     820             : 
     821       47377 : void InstructionSelector::VisitWord64Shr(Node* node) {
     822       94754 :   if (TryMatchLoadWord64AndShiftRight(this, node, kX64Movl)) return;
     823       47376 :   VisitWord64Shift(this, node, kX64Shr);
     824             : }
     825             : 
     826       25676 : void InstructionSelector::VisitWord32Sar(Node* node) {
     827             :   X64OperandGenerator g(this);
     828       25676 :   Int32BinopMatcher m(node);
     829       45414 :   if (CanCover(m.node(), m.left().node()) && m.left().IsWord32Shl()) {
     830        1096 :     Int32BinopMatcher mleft(m.left().node());
     831        1574 :     if (mleft.right().Is(16) && m.right().Is(16)) {
     832         478 :       Emit(kX64Movsxwl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
     833         478 :       return;
     834        1225 :     } else if (mleft.right().Is(24) && m.right().Is(24)) {
     835         607 :       Emit(kX64Movsxbl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
     836         607 :       return;
     837             :     }
     838             :   }
     839       24591 :   VisitWord32Shift(this, node, kX64Sar32);
     840             : }
     841             : 
     842      203101 : void InstructionSelector::VisitWord64Sar(Node* node) {
     843      406202 :   if (TryMatchLoadWord64AndShiftRight(this, node, kX64Movsxlq)) return;
     844      187645 :   VisitWord64Shift(this, node, kX64Sar);
     845             : }
     846             : 
     847       27709 : void InstructionSelector::VisitWord32Ror(Node* node) {
     848       27709 :   VisitWord32Shift(this, node, kX64Ror32);
     849       27709 : }
     850             : 
     851         112 : void InstructionSelector::VisitWord64Ror(Node* node) {
     852         112 :   VisitWord64Shift(this, node, kX64Ror);
     853         112 : }
     854             : 
     855           0 : void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
     856             : 
     857           0 : void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
     858             : 
     859          12 : void InstructionSelector::VisitWord64ReverseBytes(Node* node) {
     860             :   X64OperandGenerator g(this);
     861          12 :   Emit(kX64Bswap, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)));
     862          12 : }
     863             : 
     864          44 : void InstructionSelector::VisitWord32ReverseBytes(Node* node) {
     865             :   X64OperandGenerator g(this);
     866          44 :   Emit(kX64Bswap32, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)));
     867          44 : }
     868             : 
     869      266966 : void InstructionSelector::VisitInt32Add(Node* node) {
     870             :   X64OperandGenerator g(this);
     871             : 
     872             :   // Try to match the Add to a leal pattern
     873      266966 :   BaseWithIndexAndDisplacement32Matcher m(node);
     874      800901 :   if (m.matches() &&
     875      494511 :       (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
     876             :     EmitLea(this, kX64Lea32, node, m.index(), m.scale(), m.base(),
     877      266967 :             m.displacement(), m.displacement_mode());
     878      533950 :     return;
     879             :   }
     880             : 
     881             :   // No leal pattern match, use addl
     882           0 :   VisitBinop(this, node, kX64Add32);
     883             : }
     884             : 
     885     2122377 : void InstructionSelector::VisitInt64Add(Node* node) {
     886             :   X64OperandGenerator g(this);
     887             : 
     888             :   // Try to match the Add to a leaq pattern
     889     2122377 :   BaseWithIndexAndDisplacement64Matcher m(node);
     890     6367248 :   if (m.matches() &&
     891     3936402 :       (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
     892             :     EmitLea(this, kX64Lea, node, m.index(), m.scale(), m.base(),
     893     2077670 :             m.displacement(), m.displacement_mode());
     894     4200052 :     return;
     895             :   }
     896             : 
     897             :   // No leal pattern match, use addq
     898       44745 :   VisitBinop(this, node, kX64Add);
     899             : }
     900             : 
     901       26896 : void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
     902       26896 :   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
     903             :     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
     904       53792 :     return VisitBinop(this, node, kX64Add, &cont);
     905             :   }
     906             :   FlagsContinuation cont;
     907           0 :   VisitBinop(this, node, kX64Add, &cont);
     908             : }
     909             : 
     910       49712 : void InstructionSelector::VisitInt32Sub(Node* node) {
     911             :   X64OperandGenerator g(this);
     912             :   DCHECK_EQ(node->InputCount(), 2);
     913       49712 :   Node* input1 = node->InputAt(0);
     914             :   Node* input2 = node->InputAt(1);
     915       52488 :   if (input1->opcode() == IrOpcode::kTruncateInt64ToInt32 &&
     916        2776 :       g.CanBeImmediate(input2)) {
     917             :     int32_t imm = g.GetImmediateIntegerValue(input2);
     918           8 :     InstructionOperand int64_input = g.UseRegister(input1->InputAt(0));
     919           8 :     if (imm == 0) {
     920             :       // Emit "movl" for subtraction of 0.
     921           8 :       Emit(kX64Movl, g.DefineAsRegister(node), int64_input);
     922             :     } else {
     923             :       // Omit truncation and turn subtractions of constant values into immediate
     924             :       // "leal" instructions by negating the value.
     925             :       Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI),
     926           0 :            g.DefineAsRegister(node), int64_input, g.TempImmediate(-imm));
     927             :     }
     928       49713 :     return;
     929             :   }
     930             : 
     931       49704 :   Int32BinopMatcher m(node);
     932       49705 :   if (m.left().Is(0)) {
     933        5930 :     Emit(kX64Neg32, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
     934       43775 :   } else if (m.right().Is(0)) {
     935             :     // TODO(jarin): We should be able to use {EmitIdentity} here
     936             :     // (https://crbug.com/v8/7947).
     937         480 :     Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(m.left().node()));
     938       66149 :   } else if (m.right().HasValue() && g.CanBeImmediate(m.right().node())) {
     939             :     // Turn subtractions of constant values into immediate "leal" instructions
     940             :     // by negating the value.
     941             :     Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI),
     942             :          g.DefineAsRegister(node), g.UseRegister(m.left().node()),
     943       68562 :          g.TempImmediate(base::NegateWithWraparound(m.right().Value())));
     944             :   } else {
     945       20441 :     VisitBinop(this, node, kX64Sub32);
     946             :   }
     947             : }
     948             : 
     949       33268 : void InstructionSelector::VisitInt64Sub(Node* node) {
     950             :   X64OperandGenerator g(this);
     951       33268 :   Int64BinopMatcher m(node);
     952       33269 :   if (m.left().Is(0)) {
     953        9724 :     Emit(kX64Neg, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
     954             :   } else {
     955       25722 :     if (m.right().HasValue() && g.CanBeImmediate(m.right().node())) {
     956             :       // Turn subtractions of constant values into immediate "leaq" instructions
     957             :       // by negating the value.
     958             :       Emit(kX64Lea | AddressingModeField::encode(kMode_MRI),
     959             :            g.DefineAsRegister(node), g.UseRegister(m.left().node()),
     960        4292 :            g.TempImmediate(-static_cast<int32_t>(m.right().Value())));
     961       35414 :       return;
     962             :     }
     963       21400 :     VisitBinop(this, node, kX64Sub);
     964             :   }
     965             : }
     966             : 
     967       26896 : void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
     968       26896 :   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
     969             :     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
     970       53792 :     return VisitBinop(this, node, kX64Sub, &cont);
     971             :   }
     972             :   FlagsContinuation cont;
     973           0 :   VisitBinop(this, node, kX64Sub, &cont);
     974             : }
     975             : 
     976             : namespace {
     977             : 
     978       56273 : void VisitMul(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
     979             :   X64OperandGenerator g(selector);
     980       56273 :   Int32BinopMatcher m(node);
     981             :   Node* left = m.left().node();
     982             :   Node* right = m.right().node();
     983       56273 :   if (g.CanBeImmediate(right)) {
     984             :     selector->Emit(opcode, g.DefineAsRegister(node), g.Use(left),
     985       49688 :                    g.UseImmediate(right));
     986             :   } else {
     987        6585 :     if (g.CanBeBetterLeftOperand(right)) {
     988             :       std::swap(left, right);
     989             :     }
     990             :     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
     991        6585 :                    g.Use(right));
     992             :   }
     993       56274 : }
     994             : 
     995        5202 : void VisitMulHigh(InstructionSelector* selector, Node* node,
     996             :                   ArchOpcode opcode) {
     997             :   X64OperandGenerator g(selector);
     998             :   Node* left = node->InputAt(0);
     999             :   Node* right = node->InputAt(1);
    1000        5202 :   if (selector->IsLive(left) && !selector->IsLive(right)) {
    1001             :     std::swap(left, right);
    1002             :   }
    1003        5202 :   InstructionOperand temps[] = {g.TempRegister(rax)};
    1004             :   // TODO(turbofan): We use UseUniqueRegister here to improve register
    1005             :   // allocation.
    1006             :   selector->Emit(opcode, g.DefineAsFixed(node, rdx), g.UseFixed(left, rax),
    1007        5202 :                  g.UseUniqueRegister(right), arraysize(temps), temps);
    1008        5202 : }
    1009             : 
    1010       32912 : void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
    1011             :   X64OperandGenerator g(selector);
    1012       32912 :   InstructionOperand temps[] = {g.TempRegister(rdx)};
    1013             :   selector->Emit(
    1014             :       opcode, g.DefineAsFixed(node, rax), g.UseFixed(node->InputAt(0), rax),
    1015       65825 :       g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
    1016       32912 : }
    1017             : 
    1018       32690 : void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
    1019             :   X64OperandGenerator g(selector);
    1020       32690 :   InstructionOperand temps[] = {g.TempRegister(rax)};
    1021             :   selector->Emit(
    1022             :       opcode, g.DefineAsFixed(node, rdx), g.UseFixed(node->InputAt(0), rax),
    1023       65380 :       g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
    1024       32689 : }
    1025             : 
    1026             : }  // namespace
    1027             : 
    1028       57696 : void InstructionSelector::VisitInt32Mul(Node* node) {
    1029       57696 :   Int32ScaleMatcher m(node, true);
    1030       57696 :   if (m.matches()) {
    1031             :     Node* index = node->InputAt(0);
    1032       22765 :     Node* base = m.power_of_two_plus_one() ? index : nullptr;
    1033             :     EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr,
    1034       22765 :             kPositiveDisplacement);
    1035       80461 :     return;
    1036             :   }
    1037       34931 :   VisitMul(this, node, kX64Imul32);
    1038             : }
    1039             : 
    1040       13924 : void InstructionSelector::VisitInt32MulWithOverflow(Node* node) {
    1041             :   // TODO(mvstanton): Use Int32ScaleMatcher somehow.
    1042       13924 :   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
    1043             :     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
    1044       27848 :     return VisitBinop(this, node, kX64Imul32, &cont);
    1045             :   }
    1046             :   FlagsContinuation cont;
    1047           0 :   VisitBinop(this, node, kX64Imul32, &cont);
    1048             : }
    1049             : 
    1050       21342 : void InstructionSelector::VisitInt64Mul(Node* node) {
    1051       21342 :   VisitMul(this, node, kX64Imul);
    1052       21343 : }
    1053             : 
    1054        3764 : void InstructionSelector::VisitInt32MulHigh(Node* node) {
    1055        3764 :   VisitMulHigh(this, node, kX64ImulHigh32);
    1056        3764 : }
    1057             : 
    1058       15712 : void InstructionSelector::VisitInt32Div(Node* node) {
    1059       15712 :   VisitDiv(this, node, kX64Idiv32);
    1060       15712 : }
    1061             : 
    1062        1852 : void InstructionSelector::VisitInt64Div(Node* node) {
    1063        1852 :   VisitDiv(this, node, kX64Idiv);
    1064        1852 : }
    1065             : 
    1066       14453 : void InstructionSelector::VisitUint32Div(Node* node) {
    1067       14453 :   VisitDiv(this, node, kX64Udiv32);
    1068       14453 : }
    1069             : 
    1070         896 : void InstructionSelector::VisitUint64Div(Node* node) {
    1071         896 :   VisitDiv(this, node, kX64Udiv);
    1072         896 : }
    1073             : 
    1074       16288 : void InstructionSelector::VisitInt32Mod(Node* node) {
    1075       16288 :   VisitMod(this, node, kX64Idiv32);
    1076       16288 : }
    1077             : 
    1078         880 : void InstructionSelector::VisitInt64Mod(Node* node) {
    1079         880 :   VisitMod(this, node, kX64Idiv);
    1080         880 : }
    1081             : 
    1082       14650 : void InstructionSelector::VisitUint32Mod(Node* node) {
    1083       14650 :   VisitMod(this, node, kX64Udiv32);
    1084       14650 : }
    1085             : 
    1086         872 : void InstructionSelector::VisitUint64Mod(Node* node) {
    1087         872 :   VisitMod(this, node, kX64Udiv);
    1088         872 : }
    1089             : 
    1090        1438 : void InstructionSelector::VisitUint32MulHigh(Node* node) {
    1091        1438 :   VisitMulHigh(this, node, kX64UmulHigh32);
    1092        1438 : }
    1093             : 
    1094          52 : void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
    1095             :   X64OperandGenerator g(this);
    1096          52 :   InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
    1097         156 :   InstructionOperand outputs[2];
    1098             :   size_t output_count = 0;
    1099          52 :   outputs[output_count++] = g.DefineAsRegister(node);
    1100             : 
    1101          52 :   Node* success_output = NodeProperties::FindProjection(node, 1);
    1102          52 :   if (success_output) {
    1103          48 :     outputs[output_count++] = g.DefineAsRegister(success_output);
    1104             :   }
    1105             : 
    1106          52 :   Emit(kSSEFloat32ToInt64, output_count, outputs, 1, inputs);
    1107          52 : }
    1108             : 
    1109         608 : void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
    1110             :   X64OperandGenerator g(this);
    1111         608 :   InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
    1112        1827 :   InstructionOperand outputs[2];
    1113             :   size_t output_count = 0;
    1114         609 :   outputs[output_count++] = g.DefineAsRegister(node);
    1115             : 
    1116         612 :   Node* success_output = NodeProperties::FindProjection(node, 1);
    1117         611 :   if (success_output) {
    1118         608 :     outputs[output_count++] = g.DefineAsRegister(success_output);
    1119             :   }
    1120             : 
    1121         611 :   Emit(kSSEFloat64ToInt64, output_count, outputs, 1, inputs);
    1122         612 : }
    1123             : 
    1124          52 : void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
    1125             :   X64OperandGenerator g(this);
    1126          52 :   InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
    1127         156 :   InstructionOperand outputs[2];
    1128             :   size_t output_count = 0;
    1129          52 :   outputs[output_count++] = g.DefineAsRegister(node);
    1130             : 
    1131          52 :   Node* success_output = NodeProperties::FindProjection(node, 1);
    1132          52 :   if (success_output) {
    1133          48 :     outputs[output_count++] = g.DefineAsRegister(success_output);
    1134             :   }
    1135             : 
    1136          52 :   Emit(kSSEFloat32ToUint64, output_count, outputs, 1, inputs);
    1137          52 : }
    1138             : 
    1139          60 : void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
    1140             :   X64OperandGenerator g(this);
    1141          60 :   InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
    1142         180 :   InstructionOperand outputs[2];
    1143             :   size_t output_count = 0;
    1144          60 :   outputs[output_count++] = g.DefineAsRegister(node);
    1145             : 
    1146          60 :   Node* success_output = NodeProperties::FindProjection(node, 1);
    1147          60 :   if (success_output) {
    1148          56 :     outputs[output_count++] = g.DefineAsRegister(success_output);
    1149             :   }
    1150             : 
    1151          60 :   Emit(kSSEFloat64ToUint64, output_count, outputs, 1, inputs);
    1152          60 : }
    1153             : 
    1154      355844 : void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
    1155             :   X64OperandGenerator g(this);
    1156      627899 :   Node* const value = node->InputAt(0);
    1157      355844 :   if (value->opcode() == IrOpcode::kLoad && CanCover(node, value)) {
    1158      272055 :     LoadRepresentation load_rep = LoadRepresentationOf(value->op());
    1159      272055 :     MachineRepresentation rep = load_rep.representation();
    1160             :     InstructionCode opcode = kArchNop;
    1161      272055 :     switch (rep) {
    1162             :       case MachineRepresentation::kBit:  // Fall through.
    1163             :       case MachineRepresentation::kWord8:
    1164       27357 :         opcode = load_rep.IsSigned() ? kX64Movsxbq : kX64Movzxbq;
    1165       27357 :         break;
    1166             :       case MachineRepresentation::kWord16:
    1167        9641 :         opcode = load_rep.IsSigned() ? kX64Movsxwq : kX64Movzxwq;
    1168        9641 :         break;
    1169             :       case MachineRepresentation::kWord32:
    1170      235057 :         opcode = load_rep.IsSigned() ? kX64Movsxlq : kX64Movl;
    1171      235057 :         break;
    1172             :       default:
    1173           0 :         UNREACHABLE();
    1174             :         return;
    1175             :     }
    1176      272055 :     InstructionOperand outputs[] = {g.DefineAsRegister(node)};
    1177      272055 :     size_t input_count = 0;
    1178     1088220 :     InstructionOperand inputs[3];
    1179             :     AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
    1180      272055 :         node->InputAt(0), inputs, &input_count);
    1181      272055 :     opcode |= AddressingModeField::encode(mode);
    1182      272055 :     Emit(opcode, 1, outputs, input_count, inputs);
    1183             :   } else {
    1184       83789 :     Emit(kX64Movsxlq, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
    1185             :   }
    1186      355844 : }
    1187             : 
    1188             : namespace {
    1189             : 
    1190      281246 : bool ZeroExtendsWord32ToWord64(Node* node) {
    1191      281246 :   switch (node->opcode()) {
    1192             :     case IrOpcode::kWord32And:
    1193             :     case IrOpcode::kWord32Or:
    1194             :     case IrOpcode::kWord32Xor:
    1195             :     case IrOpcode::kWord32Shl:
    1196             :     case IrOpcode::kWord32Shr:
    1197             :     case IrOpcode::kWord32Sar:
    1198             :     case IrOpcode::kWord32Ror:
    1199             :     case IrOpcode::kWord32Equal:
    1200             :     case IrOpcode::kInt32Add:
    1201             :     case IrOpcode::kInt32Sub:
    1202             :     case IrOpcode::kInt32Mul:
    1203             :     case IrOpcode::kInt32MulHigh:
    1204             :     case IrOpcode::kInt32Div:
    1205             :     case IrOpcode::kInt32LessThan:
    1206             :     case IrOpcode::kInt32LessThanOrEqual:
    1207             :     case IrOpcode::kInt32Mod:
    1208             :     case IrOpcode::kUint32Div:
    1209             :     case IrOpcode::kUint32LessThan:
    1210             :     case IrOpcode::kUint32LessThanOrEqual:
    1211             :     case IrOpcode::kUint32Mod:
    1212             :     case IrOpcode::kUint32MulHigh:
    1213             :     case IrOpcode::kTruncateInt64ToInt32:
    1214             :       // These 32-bit operations implicitly zero-extend to 64-bit on x64, so the
    1215             :       // zero-extension is a no-op.
    1216             :       return true;
    1217             :     case IrOpcode::kProjection: {
    1218         511 :       Node* const value = node->InputAt(0);
    1219         511 :       switch (value->opcode()) {
    1220             :         case IrOpcode::kInt32AddWithOverflow:
    1221             :         case IrOpcode::kInt32SubWithOverflow:
    1222             :         case IrOpcode::kInt32MulWithOverflow:
    1223             :           return true;
    1224             :         default:
    1225           0 :           return false;
    1226             :       }
    1227             :     }
    1228             :     case IrOpcode::kLoad:
    1229             :     case IrOpcode::kProtectedLoad:
    1230             :     case IrOpcode::kPoisonedLoad: {
    1231             :       // The movzxbl/movsxbl/movzxwl/movsxwl/movl operations implicitly
    1232             :       // zero-extend to 64-bit on x64, so the zero-extension is a no-op.
    1233      160275 :       LoadRepresentation load_rep = LoadRepresentationOf(node->op());
    1234      160276 :       switch (load_rep.representation()) {
    1235             :         case MachineRepresentation::kWord8:
    1236             :         case MachineRepresentation::kWord16:
    1237             :         case MachineRepresentation::kWord32:
    1238             :           return true;
    1239             :         default:
    1240         112 :           return false;
    1241             :       }
    1242             :     }
    1243             :     default:
    1244       34051 :       return false;
    1245             :   }
    1246             : }
    1247             : 
    1248             : }  // namespace
    1249             : 
    1250      281246 : void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
    1251             :   X64OperandGenerator g(this);
    1252             :   Node* value = node->InputAt(0);
    1253      281246 :   if (ZeroExtendsWord32ToWord64(value)) {
    1254             :     // These 32-bit operations implicitly zero-extend to 64-bit on x64, so the
    1255             :     // zero-extension is a no-op.
    1256      528445 :     return EmitIdentity(node);
    1257             :   }
    1258       34181 :   Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
    1259             : }
    1260             : 
    1261             : namespace {
    1262             : 
    1263      657327 : void VisitRO(InstructionSelector* selector, Node* node,
    1264             :              InstructionCode opcode) {
    1265             :   X64OperandGenerator g(selector);
    1266      657327 :   selector->Emit(opcode, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
    1267      657342 : }
    1268             : 
    1269       95755 : void VisitRR(InstructionSelector* selector, Node* node,
    1270             :              InstructionCode opcode) {
    1271             :   X64OperandGenerator g(selector);
    1272             :   selector->Emit(opcode, g.DefineAsRegister(node),
    1273       95755 :                  g.UseRegister(node->InputAt(0)));
    1274       95758 : }
    1275             : 
    1276         716 : void VisitRRO(InstructionSelector* selector, Node* node,
    1277             :               InstructionCode opcode) {
    1278             :   X64OperandGenerator g(selector);
    1279             :   selector->Emit(opcode, g.DefineSameAsFirst(node),
    1280        1433 :                  g.UseRegister(node->InputAt(0)), g.Use(node->InputAt(1)));
    1281         717 : }
    1282             : 
    1283      125329 : void VisitFloatBinop(InstructionSelector* selector, Node* node,
    1284             :                      ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
    1285             :   X64OperandGenerator g(selector);
    1286      125329 :   InstructionOperand operand0 = g.UseRegister(node->InputAt(0));
    1287      125339 :   InstructionOperand operand1 = g.Use(node->InputAt(1));
    1288      125330 :   if (selector->IsSupported(AVX)) {
    1289      124716 :     selector->Emit(avx_opcode, g.DefineAsRegister(node), operand0, operand1);
    1290             :   } else {
    1291         614 :     selector->Emit(sse_opcode, g.DefineSameAsFirst(node), operand0, operand1);
    1292             :   }
    1293      125325 : }
    1294             : 
    1295       10592 : void VisitFloatUnop(InstructionSelector* selector, Node* node, Node* input,
    1296             :                     ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
    1297             :   X64OperandGenerator g(selector);
    1298       10592 :   if (selector->IsSupported(AVX)) {
    1299       10508 :     selector->Emit(avx_opcode, g.DefineAsRegister(node), g.Use(input));
    1300             :   } else {
    1301          84 :     selector->Emit(sse_opcode, g.DefineSameAsFirst(node), g.UseRegister(input));
    1302             :   }
    1303       10592 : }
    1304             : 
    1305             : }  // namespace
    1306             : 
    1307             : #define RO_OP_LIST(V)                                                    \
    1308             :   V(Word64Clz, kX64Lzcnt)                                                \
    1309             :   V(Word32Clz, kX64Lzcnt32)                                              \
    1310             :   V(Word64Ctz, kX64Tzcnt)                                                \
    1311             :   V(Word32Ctz, kX64Tzcnt32)                                              \
    1312             :   V(Word64Popcnt, kX64Popcnt)                                            \
    1313             :   V(Word32Popcnt, kX64Popcnt32)                                          \
    1314             :   V(Float64Sqrt, kSSEFloat64Sqrt)                                        \
    1315             :   V(Float32Sqrt, kSSEFloat32Sqrt)                                        \
    1316             :   V(ChangeFloat64ToInt32, kSSEFloat64ToInt32)                            \
    1317             :   V(ChangeFloat64ToInt64, kSSEFloat64ToInt64)                            \
    1318             :   V(ChangeFloat64ToUint32, kSSEFloat64ToUint32 | MiscField::encode(1))   \
    1319             :   V(TruncateFloat64ToInt64, kSSEFloat64ToInt64)                          \
    1320             :   V(TruncateFloat64ToUint32, kSSEFloat64ToUint32 | MiscField::encode(0)) \
    1321             :   V(ChangeFloat64ToUint64, kSSEFloat64ToUint64)                          \
    1322             :   V(TruncateFloat64ToFloat32, kSSEFloat64ToFloat32)                      \
    1323             :   V(ChangeFloat32ToFloat64, kSSEFloat32ToFloat64)                        \
    1324             :   V(TruncateFloat32ToInt32, kSSEFloat32ToInt32)                          \
    1325             :   V(TruncateFloat32ToUint32, kSSEFloat32ToUint32)                        \
    1326             :   V(ChangeInt32ToFloat64, kSSEInt32ToFloat64)                            \
    1327             :   V(ChangeInt64ToFloat64, kSSEInt64ToFloat64)                            \
    1328             :   V(ChangeUint32ToFloat64, kSSEUint32ToFloat64)                          \
    1329             :   V(RoundFloat64ToInt32, kSSEFloat64ToInt32)                             \
    1330             :   V(RoundInt32ToFloat32, kSSEInt32ToFloat32)                             \
    1331             :   V(RoundInt64ToFloat32, kSSEInt64ToFloat32)                             \
    1332             :   V(RoundUint64ToFloat32, kSSEUint64ToFloat32)                           \
    1333             :   V(RoundInt64ToFloat64, kSSEInt64ToFloat64)                             \
    1334             :   V(RoundUint64ToFloat64, kSSEUint64ToFloat64)                           \
    1335             :   V(RoundUint32ToFloat32, kSSEUint32ToFloat32)                           \
    1336             :   V(BitcastFloat32ToInt32, kX64BitcastFI)                                \
    1337             :   V(BitcastFloat64ToInt64, kX64BitcastDL)                                \
    1338             :   V(BitcastInt32ToFloat32, kX64BitcastIF)                                \
    1339             :   V(BitcastInt64ToFloat64, kX64BitcastLD)                                \
    1340             :   V(Float64ExtractLowWord32, kSSEFloat64ExtractLowWord32)                \
    1341             :   V(Float64ExtractHighWord32, kSSEFloat64ExtractHighWord32)              \
    1342             :   V(SignExtendWord8ToInt32, kX64Movsxbl)                                 \
    1343             :   V(SignExtendWord16ToInt32, kX64Movsxwl)                                \
    1344             :   V(SignExtendWord8ToInt64, kX64Movsxbq)                                 \
    1345             :   V(SignExtendWord16ToInt64, kX64Movsxwq)                                \
    1346             :   V(SignExtendWord32ToInt64, kX64Movsxlq)
    1347             : 
    1348             : #define RR_OP_LIST(V)                                                         \
    1349             :   V(Float32RoundDown, kSSEFloat32Round | MiscField::encode(kRoundDown))       \
    1350             :   V(Float64RoundDown, kSSEFloat64Round | MiscField::encode(kRoundDown))       \
    1351             :   V(Float32RoundUp, kSSEFloat32Round | MiscField::encode(kRoundUp))           \
    1352             :   V(Float64RoundUp, kSSEFloat64Round | MiscField::encode(kRoundUp))           \
    1353             :   V(Float32RoundTruncate, kSSEFloat32Round | MiscField::encode(kRoundToZero)) \
    1354             :   V(Float64RoundTruncate, kSSEFloat64Round | MiscField::encode(kRoundToZero)) \
    1355             :   V(Float32RoundTiesEven,                                                     \
    1356             :     kSSEFloat32Round | MiscField::encode(kRoundToNearest))                    \
    1357             :   V(Float64RoundTiesEven, kSSEFloat64Round | MiscField::encode(kRoundToNearest))
    1358             : 
    1359             : #define RO_VISITOR(Name, opcode)                      \
    1360             :   void InstructionSelector::Visit##Name(Node* node) { \
    1361             :     VisitRO(this, node, opcode);                      \
    1362             :   }
    1363      657332 : RO_OP_LIST(RO_VISITOR)
    1364             : #undef RO_VISITOR
    1365             : #undef RO_OP_LIST
    1366             : 
    1367             : #define RR_VISITOR(Name, opcode)                      \
    1368             :   void InstructionSelector::Visit##Name(Node* node) { \
    1369             :     VisitRR(this, node, opcode);                      \
    1370             :   }
    1371       43418 : RR_OP_LIST(RR_VISITOR)
    1372             : #undef RR_VISITOR
    1373             : #undef RR_OP_LIST
    1374             : 
    1375       52339 : void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
    1376       52339 :   VisitRR(this, node, kArchTruncateDoubleToI);
    1377       52340 : }
    1378             : 
    1379      493384 : void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
    1380             :   // We rely on the fact that TruncateInt64ToInt32 zero extends the
    1381             :   // value (see ZeroExtendsWord32ToWord64). So all code paths here
    1382             :   // have to satisfy that condition.
    1383             :   X64OperandGenerator g(this);
    1384      472544 :   Node* value = node->InputAt(0);
    1385      493384 :   if (CanCover(node, value)) {
    1386      472544 :     switch (value->opcode()) {
    1387             :       case IrOpcode::kWord64Sar:
    1388             :       case IrOpcode::kWord64Shr: {
    1389      468748 :         Int64BinopMatcher m(value);
    1390      468746 :         if (m.right().Is(32)) {
    1391      736734 :           if (CanCoverTransitively(node, value, value->InputAt(0)) &&
    1392      268476 :               TryMatchLoadWord64AndShiftRight(this, value, kX64Movl)) {
    1393      628437 :             return EmitIdentity(node);
    1394             :           }
    1395             :           Emit(kX64Shr, g.DefineSameAsFirst(node),
    1396      616162 :                g.UseRegister(m.left().node()), g.TempImmediate(32));
    1397      308078 :           return;
    1398             :         }
    1399         488 :         break;
    1400             :       }
    1401             :       case IrOpcode::kLoad: {
    1402          20 :         if (TryMergeTruncateInt64ToInt32IntoLoad(this, node, value)) {
    1403             :           return;
    1404             :         }
    1405             :         break;
    1406             :       }
    1407             :       default:
    1408             :         break;
    1409             :     }
    1410             :   }
    1411       25111 :   Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
    1412             : }
    1413             : 
    1414        1745 : void InstructionSelector::VisitFloat32Add(Node* node) {
    1415        1745 :   VisitFloatBinop(this, node, kAVXFloat32Add, kSSEFloat32Add);
    1416        1745 : }
    1417             : 
    1418        2552 : void InstructionSelector::VisitFloat32Sub(Node* node) {
    1419        2552 :   VisitFloatBinop(this, node, kAVXFloat32Sub, kSSEFloat32Sub);
    1420        2553 : }
    1421             : 
    1422         873 : void InstructionSelector::VisitFloat32Mul(Node* node) {
    1423         873 :   VisitFloatBinop(this, node, kAVXFloat32Mul, kSSEFloat32Mul);
    1424         873 : }
    1425             : 
    1426         353 : void InstructionSelector::VisitFloat32Div(Node* node) {
    1427         353 :   VisitFloatBinop(this, node, kAVXFloat32Div, kSSEFloat32Div);
    1428         353 : }
    1429             : 
    1430          68 : void InstructionSelector::VisitFloat32Abs(Node* node) {
    1431          68 :   VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat32Abs, kSSEFloat32Abs);
    1432          68 : }
    1433             : 
    1434          66 : void InstructionSelector::VisitFloat32Max(Node* node) {
    1435          66 :   VisitRRO(this, node, kSSEFloat32Max);
    1436          66 : }
    1437             : 
    1438          66 : void InstructionSelector::VisitFloat32Min(Node* node) {
    1439          66 :   VisitRRO(this, node, kSSEFloat32Min);
    1440          66 : }
    1441             : 
    1442       80197 : void InstructionSelector::VisitFloat64Add(Node* node) {
    1443       80197 :   VisitFloatBinop(this, node, kAVXFloat64Add, kSSEFloat64Add);
    1444       80203 : }
    1445             : 
    1446       15161 : void InstructionSelector::VisitFloat64Sub(Node* node) {
    1447       15161 :   VisitFloatBinop(this, node, kAVXFloat64Sub, kSSEFloat64Sub);
    1448       15162 : }
    1449             : 
    1450       12542 : void InstructionSelector::VisitFloat64Mul(Node* node) {
    1451       12542 :   VisitFloatBinop(this, node, kAVXFloat64Mul, kSSEFloat64Mul);
    1452       12542 : }
    1453             : 
    1454       11927 : void InstructionSelector::VisitFloat64Div(Node* node) {
    1455       11927 :   VisitFloatBinop(this, node, kAVXFloat64Div, kSSEFloat64Div);
    1456       11927 : }
    1457             : 
    1458        1614 : void InstructionSelector::VisitFloat64Mod(Node* node) {
    1459             :   X64OperandGenerator g(this);
    1460        1614 :   InstructionOperand temps[] = {g.TempRegister(rax)};
    1461             :   Emit(kSSEFloat64Mod, g.DefineSameAsFirst(node),
    1462             :        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)), 1,
    1463        3228 :        temps);
    1464        1614 : }
    1465             : 
    1466         250 : void InstructionSelector::VisitFloat64Max(Node* node) {
    1467         250 :   VisitRRO(this, node, kSSEFloat64Max);
    1468         250 : }
    1469             : 
    1470         335 : void InstructionSelector::VisitFloat64Min(Node* node) {
    1471         335 :   VisitRRO(this, node, kSSEFloat64Min);
    1472         335 : }
    1473             : 
    1474         623 : void InstructionSelector::VisitFloat64Abs(Node* node) {
    1475         623 :   VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat64Abs, kSSEFloat64Abs);
    1476         623 : }
    1477             : 
    1478           0 : void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
    1479           0 :   UNREACHABLE();
    1480             : }
    1481             : 
    1482         169 : void InstructionSelector::VisitFloat32Neg(Node* node) {
    1483         169 :   VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat32Neg, kSSEFloat32Neg);
    1484         169 : }
    1485             : 
    1486        9732 : void InstructionSelector::VisitFloat64Neg(Node* node) {
    1487        9732 :   VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat64Neg, kSSEFloat64Neg);
    1488        9732 : }
    1489             : 
    1490         465 : void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
    1491             :                                                    InstructionCode opcode) {
    1492             :   X64OperandGenerator g(this);
    1493             :   Emit(opcode, g.DefineAsFixed(node, xmm0), g.UseFixed(node->InputAt(0), xmm0),
    1494             :        g.UseFixed(node->InputAt(1), xmm1))
    1495         930 :       ->MarkAsCall();
    1496         465 : }
    1497             : 
    1498        2802 : void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
    1499             :                                                   InstructionCode opcode) {
    1500             :   X64OperandGenerator g(this);
    1501             :   Emit(opcode, g.DefineAsFixed(node, xmm0), g.UseFixed(node->InputAt(0), xmm0))
    1502        2802 :       ->MarkAsCall();
    1503        2802 : }
    1504             : 
    1505     5720668 : void InstructionSelector::EmitPrepareArguments(
    1506     5746869 :     ZoneVector<PushParameter>* arguments, const CallDescriptor* call_descriptor,
    1507     2930149 :     Node* node) {
    1508             :   X64OperandGenerator g(this);
    1509             : 
    1510             :   // Prepare for C function call.
    1511     5720668 :   if (call_descriptor->IsCFunctionCall()) {
    1512       26201 :     Emit(kArchPrepareCallCFunction | MiscField::encode(static_cast<int>(
    1513             :                                          call_descriptor->ParameterCount())),
    1514       26201 :          0, nullptr, 0, nullptr);
    1515             : 
    1516             :     // Poke any stack arguments.
    1517       59186 :     for (size_t n = 0; n < arguments->size(); ++n) {
    1518       32985 :       PushParameter input = (*arguments)[n];
    1519        3392 :       if (input.node) {
    1520             :         int slot = static_cast<int>(n);
    1521        3392 :         InstructionOperand value = g.CanBeImmediate(input.node)
    1522             :                                        ? g.UseImmediate(input.node)
    1523        3392 :                                        : g.UseRegister(input.node);
    1524        3392 :         Emit(kX64Poke | MiscField::encode(slot), g.NoOutput(), value);
    1525             :       }
    1526             :     }
    1527             :   } else {
    1528             :     // Push any stack arguments.
    1529     5694467 :     int effect_level = GetEffectLevel(node);
    1530    11874035 :     for (PushParameter input : base::Reversed(*arguments)) {
    1531             :       // Skip any alignment holes in pushed nodes. We may have one in case of a
    1532             :       // Simd128 stack argument.
    1533     3089782 :       if (input.node == nullptr) continue;
    1534     3089398 :       if (g.CanBeImmediate(input.node)) {
    1535      159249 :         Emit(kX64Push, g.NoOutput(), g.UseImmediate(input.node));
    1536     5860296 :       } else if (IsSupported(ATOM) ||
    1537     2930148 :                  sequence()->IsFP(GetVirtualRegister(input.node))) {
    1538             :         // TODO(titzer): X64Push cannot handle stack->stack double moves
    1539             :         // because there is no way to encode fixed double slots.
    1540           0 :         Emit(kX64Push, g.NoOutput(), g.UseRegister(input.node));
    1541     2930148 :       } else if (g.CanBeMemoryOperand(kX64Push, node, input.node,
    1542             :                                       effect_level)) {
    1543       19481 :         InstructionOperand outputs[1];
    1544       77924 :         InstructionOperand inputs[4];
    1545       19481 :         size_t input_count = 0;
    1546             :         InstructionCode opcode = kX64Push;
    1547             :         AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
    1548       19481 :             input.node, inputs, &input_count);
    1549       19481 :         opcode |= AddressingModeField::encode(mode);
    1550       19481 :         Emit(opcode, 0, outputs, input_count, inputs);
    1551             :       } else {
    1552     2910668 :         Emit(kX64Push, g.NoOutput(), g.UseAny(input.node));
    1553             :       }
    1554             :     }
    1555             :   }
    1556     5720672 : }
    1557             : 
    1558     5720661 : void InstructionSelector::EmitPrepareResults(
    1559             :     ZoneVector<PushParameter>* results, const CallDescriptor* call_descriptor,
    1560             :     Node* node) {
    1561             :   X64OperandGenerator g(this);
    1562             : 
    1563             :   int reverse_slot = 0;
    1564    17048804 :   for (PushParameter output : *results) {
    1565    11209879 :     if (!output.location.IsCallerFrameSlot()) continue;
    1566       14704 :     reverse_slot += output.location.GetSizeInPointers();
    1567             :     // Skip any alignment holes in nodes.
    1568       14704 :     if (output.node == nullptr) continue;
    1569             :     DCHECK(!call_descriptor->IsCFunctionCall());
    1570        5072 :     if (output.location.GetType() == MachineType::Float32()) {
    1571             :       MarkAsFloat32(output.node);
    1572        3808 :     } else if (output.location.GetType() == MachineType::Float64()) {
    1573             :       MarkAsFloat64(output.node);
    1574             :     }
    1575        5072 :     InstructionOperand result = g.DefineAsRegister(output.node);
    1576        5072 :     InstructionOperand slot = g.UseImmediate(reverse_slot);
    1577        5072 :     Emit(kX64Peek, 1, &result, 1, &slot);
    1578             :   }
    1579     5720669 : }
    1580             : 
    1581      119272 : bool InstructionSelector::IsTailCallAddressImmediate() { return true; }
    1582             : 
    1583        1344 : int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
    1584             : 
    1585             : namespace {
    1586             : 
    1587     1062484 : void VisitCompareWithMemoryOperand(InstructionSelector* selector,
    1588             :                                    InstructionCode opcode, Node* left,
    1589             :                                    InstructionOperand right,
    1590             :                                    FlagsContinuation* cont) {
    1591             :   DCHECK_EQ(IrOpcode::kLoad, left->opcode());
    1592             :   X64OperandGenerator g(selector);
    1593     1062484 :   size_t input_count = 0;
    1594     5312420 :   InstructionOperand inputs[4];
    1595             :   AddressingMode addressing_mode =
    1596     1062484 :       g.GetEffectiveAddressMemoryOperand(left, inputs, &input_count);
    1597     1062497 :   opcode |= AddressingModeField::encode(addressing_mode);
    1598     1062497 :   inputs[input_count++] = right;
    1599             : 
    1600     1062497 :   selector->EmitWithContinuation(opcode, 0, nullptr, input_count, inputs, cont);
    1601     1062491 : }
    1602             : 
    1603             : // Shared routine for multiple compare operations.
    1604             : void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
    1605             :                   InstructionOperand left, InstructionOperand right,
    1606             :                   FlagsContinuation* cont) {
    1607     4167053 :   selector->EmitWithContinuation(opcode, left, right, cont);
    1608             : }
    1609             : 
    1610             : // Shared routine for multiple compare operations.
    1611      976328 : void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
    1612             :                   Node* left, Node* right, FlagsContinuation* cont,
    1613             :                   bool commutative) {
    1614             :   X64OperandGenerator g(selector);
    1615     1285111 :   if (commutative && g.CanBeBetterLeftOperand(right)) {
    1616             :     std::swap(left, right);
    1617             :   }
    1618      976328 :   VisitCompare(selector, opcode, g.UseRegister(left), g.Use(right), cont);
    1619      976339 : }
    1620             : 
    1621    15047724 : MachineType MachineTypeForNarrow(Node* node, Node* hint_node) {
    1622     7103502 :   if (hint_node->opcode() == IrOpcode::kLoad) {
    1623     1532778 :     MachineType hint = LoadRepresentationOf(hint_node->op());
    1624     1532775 :     if (node->opcode() == IrOpcode::kInt32Constant ||
    1625             :         node->opcode() == IrOpcode::kInt64Constant) {
    1626             :       int64_t constant = node->opcode() == IrOpcode::kInt32Constant
    1627             :                              ? OpParameter<int32_t>(node->op())
    1628      773924 :                              : OpParameter<int64_t>(node->op());
    1629      773924 :       if (hint == MachineType::Int8()) {
    1630           0 :         if (constant >= std::numeric_limits<int8_t>::min() &&
    1631             :             constant <= std::numeric_limits<int8_t>::max()) {
    1632           0 :           return hint;
    1633             :         }
    1634      773924 :       } else if (hint == MachineType::Uint8()) {
    1635      113966 :         if (constant >= std::numeric_limits<uint8_t>::min() &&
    1636             :             constant <= std::numeric_limits<uint8_t>::max()) {
    1637      109934 :           return hint;
    1638             :         }
    1639      659958 :       } else if (hint == MachineType::Int16()) {
    1640         112 :         if (constant >= std::numeric_limits<int16_t>::min() &&
    1641             :             constant <= std::numeric_limits<int16_t>::max()) {
    1642         112 :           return hint;
    1643             :         }
    1644      659846 :       } else if (hint == MachineType::Uint16()) {
    1645      453084 :         if (constant >= std::numeric_limits<uint16_t>::min() &&
    1646             :             constant <= std::numeric_limits<uint16_t>::max()) {
    1647      453084 :           return hint;
    1648             :         }
    1649      206762 :       } else if (hint == MachineType::Int32()) {
    1650       22926 :         return hint;
    1651      183836 :       } else if (hint == MachineType::Uint32()) {
    1652      112052 :         if (constant >= 0) return hint;
    1653             :       }
    1654             :     }
    1655             :   }
    1656             :   return node->opcode() == IrOpcode::kLoad ? LoadRepresentationOf(node->op())
    1657     6411447 :                                            : MachineType::None();
    1658             : }
    1659             : 
    1660             : // Tries to match the size of the given opcode to that of the operands, if
    1661             : // possible.
    1662     3551743 : InstructionCode TryNarrowOpcodeSize(InstructionCode opcode, Node* left,
    1663             :                                     Node* right, FlagsContinuation* cont) {
    1664             :   // TODO(epertoso): we can probably get some size information out phi nodes.
    1665             :   // If the load representations don't match, both operands will be
    1666             :   // zero/sign-extended to 32bit.
    1667     3551743 :   MachineType left_type = MachineTypeForNarrow(left, right);
    1668     3551753 :   MachineType right_type = MachineTypeForNarrow(right, left);
    1669     3551753 :   if (left_type == right_type) {
    1670     2849069 :     switch (left_type.representation()) {
    1671             :       case MachineRepresentation::kBit:
    1672             :       case MachineRepresentation::kWord8: {
    1673      112169 :         if (opcode == kX64Test32) return kX64Test8;
    1674        3769 :         if (opcode == kX64Cmp32) {
    1675        3769 :           if (left_type.semantic() == MachineSemantic::kUint32) {
    1676             :             cont->OverwriteUnsignedIfSigned();
    1677             :           } else {
    1678         112 :             CHECK_EQ(MachineSemantic::kInt32, left_type.semantic());
    1679             :           }
    1680             :           return kX64Cmp8;
    1681             :         }
    1682             :         break;
    1683             :       }
    1684             :       case MachineRepresentation::kWord16:
    1685      455259 :         if (opcode == kX64Test32) return kX64Test16;
    1686      429186 :         if (opcode == kX64Cmp32) {
    1687      429186 :           if (left_type.semantic() == MachineSemantic::kUint32) {
    1688             :             cont->OverwriteUnsignedIfSigned();
    1689             :           } else {
    1690         112 :             CHECK_EQ(MachineSemantic::kInt32, left_type.semantic());
    1691             :           }
    1692             :           return kX64Cmp16;
    1693             :         }
    1694             :         break;
    1695             : #ifdef V8_COMPRESS_POINTERS
    1696             :       case MachineRepresentation::kTaggedSigned:
    1697             :       case MachineRepresentation::kTaggedPointer:
    1698             :       case MachineRepresentation::kTagged:
    1699             :         // When pointer compression is enabled the lower 32-bits uniquely
    1700             :         // identify tagged value.
    1701             :         if (opcode == kX64Cmp) return kX64Cmp32;
    1702             :         break;
    1703             : #endif
    1704             :       default:
    1705             :         break;
    1706             :     }
    1707             :   }
    1708             :   return opcode;
    1709             : }
    1710             : 
    1711             : // Shared routine for multiple word compare operations.
    1712     4917319 : void VisitWordCompare(InstructionSelector* selector, Node* node,
    1713     6623534 :                       InstructionCode opcode, FlagsContinuation* cont) {
    1714             :   X64OperandGenerator g(selector);
    1715             :   Node* left = node->InputAt(0);
    1716             :   Node* right = node->InputAt(1);
    1717             : 
    1718             :   // The 32-bit comparisons automatically truncate Word64
    1719             :   // values to Word32 range, no need to do that explicitly.
    1720     3551741 :   if (opcode == kX64Cmp32 || opcode == kX64Test32) {
    1721     2887379 :     if (left->opcode() == IrOpcode::kTruncateInt64ToInt32 &&
    1722       33831 :         selector->CanCover(node, left)) {
    1723             :       left = left->InputAt(0);
    1724             :     }
    1725             : 
    1726     2892292 :     if (right->opcode() == IrOpcode::kTruncateInt64ToInt32 &&
    1727       38744 :         selector->CanCover(node, right)) {
    1728             :       right = right->InputAt(0);
    1729             :     }
    1730             :   }
    1731             : 
    1732     3551741 :   opcode = TryNarrowOpcodeSize(opcode, left, right, cont);
    1733             : 
    1734             :   // If one of the two inputs is an immediate, make sure it's on the right, or
    1735             :   // if one of the two inputs is a memory operand, make sure it's on the left.
    1736     3551741 :   int effect_level = selector->GetEffectLevel(node);
    1737     3551750 :   if (cont->IsBranch()) {
    1738             :     effect_level = selector->GetEffectLevel(
    1739     3071784 :         cont->true_block()->PredecessorAt(0)->control_input());
    1740             :   }
    1741             : 
    1742    10371921 :   if ((!g.CanBeImmediate(right) && g.CanBeImmediate(left)) ||
    1743     3652079 :       (g.CanBeMemoryOperand(opcode, node, right, effect_level) &&
    1744      383653 :        !g.CanBeMemoryOperand(opcode, node, left, effect_level))) {
    1745      649396 :     if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
    1746             :     std::swap(left, right);
    1747             :   }
    1748             : 
    1749             :   // Match immediates on right side of comparison.
    1750     3551743 :   if (g.CanBeImmediate(right)) {
    1751     2306162 :     if (g.CanBeMemoryOperand(opcode, node, left, effect_level)) {
    1752             :       return VisitCompareWithMemoryOperand(selector, opcode, left,
    1753      497646 :                                            g.UseImmediate(right), cont);
    1754             :     }
    1755             :     return VisitCompare(selector, opcode, g.Use(left), g.UseImmediate(right),
    1756     1808521 :                         cont);
    1757             :   }
    1758             : 
    1759             :   // Match memory operands on left side of comparison.
    1760     1245581 :   if (g.CanBeMemoryOperand(opcode, node, left, effect_level)) {
    1761             :     return VisitCompareWithMemoryOperand(selector, opcode, left,
    1762      529404 :                                          g.UseRegister(right), cont);
    1763             :   }
    1764             : 
    1765             :   return VisitCompare(selector, opcode, left, right, cont,
    1766      716182 :                       node->op()->HasProperty(Operator::kCommutative));
    1767             : }
    1768             : 
    1769             : // Shared routine for 64-bit word comparison operations.
    1770     4574484 : void VisitWord64Compare(InstructionSelector* selector, Node* node,
    1771      556448 :                         FlagsContinuation* cont) {
    1772             :   X64OperandGenerator g(selector);
    1773     3029295 :   if (selector->CanUseRootsRegister()) {
    1774             :     const RootsTable& roots_table = selector->isolate()->roots_table();
    1775             :     RootIndex root_index;
    1776     2863923 :     HeapObjectBinopMatcher m(node);
    1777     3936660 :     if (m.right().HasValue() &&
    1778             :         roots_table.IsRootHandle(m.right().Value(), &root_index)) {
    1779      988743 :       if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
    1780             :       InstructionCode opcode =
    1781             :           kX64Cmp | AddressingModeField::encode(kMode_Root);
    1782             :       return VisitCompare(
    1783             :           selector, opcode,
    1784             :           g.TempImmediate(
    1785             :               TurboAssemblerBase::RootRegisterOffsetForRootIndex(root_index)),
    1786     1977472 :           g.UseRegister(m.left().node()), cont);
    1787     1875174 :     } else if (m.left().HasValue() &&
    1788             :                roots_table.IsRootHandle(m.left().Value(), &root_index)) {
    1789             :       InstructionCode opcode =
    1790             :           kX64Cmp | AddressingModeField::encode(kMode_Root);
    1791             :       return VisitCompare(
    1792             :           selector, opcode,
    1793             :           g.TempImmediate(
    1794             :               TurboAssemblerBase::RootRegisterOffsetForRootIndex(root_index)),
    1795           0 :           g.UseRegister(m.right().node()), cont);
    1796             :     }
    1797             :   }
    1798     2040560 :   if (selector->isolate() != nullptr) {
    1799             :     StackCheckMatcher<Int64BinopMatcher, IrOpcode::kUint64LessThan> m(
    1800             :         selector->isolate(), node);
    1801     1961625 :     if (m.Matched()) {
    1802             :       // Compare(Load(js_stack_limit), LoadStackPointer)
    1803      556446 :       if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
    1804             :       InstructionCode opcode = cont->Encode(kX64StackCheck);
    1805      556448 :       CHECK(cont->IsBranch());
    1806      556448 :       selector->EmitWithContinuation(opcode, cont);
    1807      556453 :       return;
    1808             :     }
    1809             :   }
    1810             :   WasmStackCheckMatcher<Int64BinopMatcher, IrOpcode::kUint64LessThan> wasm_m(
    1811             :       node);
    1812     1484119 :   if (wasm_m.Matched()) {
    1813             :     // This is a wasm stack check. By structure, we know that we can use the
    1814             :     // stack pointer directly, as wasm code does not modify the stack at points
    1815             :     // where stack checks are performed.
    1816             :     Node* left = node->InputAt(0);
    1817             :     LocationOperand rsp(InstructionOperand::EXPLICIT, LocationOperand::REGISTER,
    1818             :                         InstructionSequence::DefaultRepresentation(),
    1819             :                         RegisterCode::kRegCode_rsp);
    1820       18994 :     return VisitCompareWithMemoryOperand(selector, kX64Cmp, left, rsp, cont);
    1821             :   }
    1822     1465131 :   VisitWordCompare(selector, node, kX64Cmp, cont);
    1823             : }
    1824             : 
    1825             : // Shared routine for comparison with zero.
    1826     1463297 : void VisitCompareZero(InstructionSelector* selector, Node* user, Node* node,
    1827     1836753 :                       InstructionCode opcode, FlagsContinuation* cont) {
    1828             :   X64OperandGenerator g(selector);
    1829     1598312 :   if (cont->IsBranch() &&
    1830      216996 :       (cont->condition() == kNotEqual || cont->condition() == kEqual)) {
    1831      508486 :     switch (node->opcode()) {
    1832             : #define FLAGS_SET_BINOP_LIST(V)        \
    1833             :   V(kInt32Add, VisitBinop, kX64Add32)  \
    1834             :   V(kInt32Sub, VisitBinop, kX64Sub32)  \
    1835             :   V(kWord32And, VisitBinop, kX64And32) \
    1836             :   V(kWord32Or, VisitBinop, kX64Or32)   \
    1837             :   V(kInt64Add, VisitBinop, kX64Add)    \
    1838             :   V(kInt64Sub, VisitBinop, kX64Sub)    \
    1839             :   V(kWord64And, VisitBinop, kX64And)   \
    1840             :   V(kWord64Or, VisitBinop, kX64Or)
    1841             : #define FLAGS_SET_BINOP(opcode, Visit, archOpcode)           \
    1842             :   case IrOpcode::opcode:                                     \
    1843             :     if (selector->IsOnlyUserOfNodeInSameBlock(user, node)) { \
    1844             :       return Visit(selector, node, archOpcode, cont);        \
    1845             :     }                                                        \
    1846             :     break;
    1847      140780 :       FLAGS_SET_BINOP_LIST(FLAGS_SET_BINOP)
    1848             : #undef FLAGS_SET_BINOP_LIST
    1849             : #undef FLAGS_SET_BINOP
    1850             : 
    1851             : #define TRY_VISIT_WORD32_SHIFT TryVisitWordShift<Int32BinopMatcher, 32>
    1852             : #define TRY_VISIT_WORD64_SHIFT TryVisitWordShift<Int64BinopMatcher, 64>
    1853             : // Skip Word64Sar/Word32Sar since no instruction reduction in most cases.
    1854             : #define FLAGS_SET_SHIFT_LIST(V)                    \
    1855             :   V(kWord32Shl, TRY_VISIT_WORD32_SHIFT, kX64Shl32) \
    1856             :   V(kWord32Shr, TRY_VISIT_WORD32_SHIFT, kX64Shr32) \
    1857             :   V(kWord64Shl, TRY_VISIT_WORD64_SHIFT, kX64Shl)   \
    1858             :   V(kWord64Shr, TRY_VISIT_WORD64_SHIFT, kX64Shr)
    1859             : #define FLAGS_SET_SHIFT(opcode, TryVisit, archOpcode)         \
    1860             :   case IrOpcode::opcode:                                      \
    1861             :     if (selector->IsOnlyUserOfNodeInSameBlock(user, node)) {  \
    1862             :       if (TryVisit(selector, node, archOpcode, cont)) return; \
    1863             :     }                                                         \
    1864             :     break;
    1865         133 :       FLAGS_SET_SHIFT_LIST(FLAGS_SET_SHIFT)
    1866             : #undef TRY_VISIT_WORD32_SHIFT
    1867             : #undef TRY_VISIT_WORD64_SHIFT
    1868             : #undef FLAGS_SET_SHIFT_LIST
    1869             : #undef FLAGS_SET_SHIFT
    1870             :       default:
    1871             :         break;
    1872             :     }
    1873             :   }
    1874      409892 :   int effect_level = selector->GetEffectLevel(node);
    1875      409893 :   if (cont->IsBranch()) {
    1876             :     effect_level = selector->GetEffectLevel(
    1877      373461 :         cont->true_block()->PredecessorAt(0)->control_input());
    1878             :   }
    1879      409898 :   if (node->opcode() == IrOpcode::kLoad) {
    1880       30990 :     switch (LoadRepresentationOf(node->op()).representation()) {
    1881             :       case MachineRepresentation::kWord8:
    1882        6839 :         if (opcode == kX64Cmp32) {
    1883             :           opcode = kX64Cmp8;
    1884           0 :         } else if (opcode == kX64Test32) {
    1885             :           opcode = kX64Test8;
    1886             :         }
    1887             :         break;
    1888             :       case MachineRepresentation::kWord16:
    1889         833 :         if (opcode == kX64Cmp32) {
    1890             :           opcode = kX64Cmp16;
    1891           0 :         } else if (opcode == kX64Test32) {
    1892             :           opcode = kX64Test16;
    1893             :         }
    1894             :         break;
    1895             :       default:
    1896             :         break;
    1897             :     }
    1898             :   }
    1899      409898 :   if (g.CanBeMemoryOperand(opcode, user, node, effect_level)) {
    1900             :     VisitCompareWithMemoryOperand(selector, opcode, node, g.TempImmediate(0),
    1901       16445 :                                   cont);
    1902             :   } else {
    1903      393451 :     VisitCompare(selector, opcode, g.Use(node), g.TempImmediate(0), cont);
    1904             :   }
    1905             : }
    1906             : 
    1907             : // Shared routine for multiple float32 compare operations (inputs commuted).
    1908        1884 : void VisitFloat32Compare(InstructionSelector* selector, Node* node,
    1909             :                          FlagsContinuation* cont) {
    1910             :   Node* const left = node->InputAt(0);
    1911             :   Node* const right = node->InputAt(1);
    1912             :   InstructionCode const opcode =
    1913        1884 :       selector->IsSupported(AVX) ? kAVXFloat32Cmp : kSSEFloat32Cmp;
    1914        1884 :   VisitCompare(selector, opcode, right, left, cont, false);
    1915        1886 : }
    1916             : 
    1917             : // Shared routine for multiple float64 compare operations (inputs commuted).
    1918      195680 : void VisitFloat64Compare(InstructionSelector* selector, Node* node,
    1919             :                          FlagsContinuation* cont) {
    1920             :   Node* const left = node->InputAt(0);
    1921             :   Node* const right = node->InputAt(1);
    1922             :   InstructionCode const opcode =
    1923      195680 :       selector->IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
    1924      195680 :   VisitCompare(selector, opcode, right, left, cont, false);
    1925      195679 : }
    1926             : 
    1927             : // Shared routine for Word32/Word64 Atomic Binops
    1928       23980 : void VisitAtomicBinop(InstructionSelector* selector, Node* node,
    1929             :                       ArchOpcode opcode) {
    1930             :   X64OperandGenerator g(selector);
    1931             :   Node* base = node->InputAt(0);
    1932             :   Node* index = node->InputAt(1);
    1933             :   Node* value = node->InputAt(2);
    1934             :   AddressingMode addressing_mode;
    1935             :   InstructionOperand inputs[] = {
    1936             :       g.UseUniqueRegister(value), g.UseUniqueRegister(base),
    1937       23980 :       g.GetEffectiveIndexOperand(index, &addressing_mode)};
    1938       23985 :   InstructionOperand outputs[] = {g.DefineAsFixed(node, rax)};
    1939       23991 :   InstructionOperand temps[] = {g.TempRegister()};
    1940       47982 :   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
    1941             :   selector->Emit(code, arraysize(outputs), outputs, arraysize(inputs), inputs,
    1942       23991 :                  arraysize(temps), temps);
    1943       23991 : }
    1944             : 
    1945             : // Shared routine for Word32/Word64 Atomic CmpExchg
    1946        1071 : void VisitAtomicCompareExchange(InstructionSelector* selector, Node* node,
    1947             :                                 ArchOpcode opcode) {
    1948             :   X64OperandGenerator g(selector);
    1949             :   Node* base = node->InputAt(0);
    1950             :   Node* index = node->InputAt(1);
    1951             :   Node* old_value = node->InputAt(2);
    1952             :   Node* new_value = node->InputAt(3);
    1953             :   AddressingMode addressing_mode;
    1954             :   InstructionOperand inputs[] = {
    1955             :       g.UseFixed(old_value, rax), g.UseUniqueRegister(new_value),
    1956             :       g.UseUniqueRegister(base),
    1957        1071 :       g.GetEffectiveIndexOperand(index, &addressing_mode)};
    1958        1071 :   InstructionOperand outputs[] = {g.DefineAsFixed(node, rax)};
    1959        2142 :   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
    1960        1071 :   selector->Emit(code, arraysize(outputs), outputs, arraysize(inputs), inputs);
    1961        1071 : }
    1962             : 
    1963             : // Shared routine for Word32/Word64 Atomic Exchange
    1964        9684 : void VisitAtomicExchange(InstructionSelector* selector, Node* node,
    1965             :                          ArchOpcode opcode) {
    1966             :   X64OperandGenerator g(selector);
    1967             :   Node* base = node->InputAt(0);
    1968             :   Node* index = node->InputAt(1);
    1969             :   Node* value = node->InputAt(2);
    1970             :   AddressingMode addressing_mode;
    1971             :   InstructionOperand inputs[] = {
    1972             :       g.UseUniqueRegister(value), g.UseUniqueRegister(base),
    1973        9684 :       g.GetEffectiveIndexOperand(index, &addressing_mode)};
    1974        9685 :   InstructionOperand outputs[] = {g.DefineSameAsFirst(node)};
    1975       19364 :   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
    1976        9682 :   selector->Emit(code, arraysize(outputs), outputs, arraysize(inputs), inputs);
    1977        9686 : }
    1978             : 
    1979             : }  // namespace
    1980             : 
    1981             : // Shared routine for word comparison against zero.
    1982    18025503 : void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
    1983             :                                                FlagsContinuation* cont) {
    1984             :   // Try to combine with comparisons against 0 by simply inverting the branch.
    1985    12482081 :   while (value->opcode() == IrOpcode::kWord32Equal && CanCover(user, value)) {
    1986     1435933 :     Int32BinopMatcher m(value);
    1987     1435935 :     if (!m.right().Is(0)) break;
    1988             : 
    1989             :     user = value;
    1990             :     value = m.left().node();
    1991             :     cont->Negate();
    1992             :   }
    1993             : 
    1994     5774557 :   if (CanCover(user, value)) {
    1995     5543422 :     switch (value->opcode()) {
    1996             :       case IrOpcode::kWord32Equal:
    1997             :         cont->OverwriteAndNegateIfEqual(kEqual);
    1998      502962 :         return VisitWordCompare(this, value, kX64Cmp32, cont);
    1999             :       case IrOpcode::kInt32LessThan:
    2000             :         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
    2001      219946 :         return VisitWordCompare(this, value, kX64Cmp32, cont);
    2002             :       case IrOpcode::kInt32LessThanOrEqual:
    2003             :         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
    2004       52234 :         return VisitWordCompare(this, value, kX64Cmp32, cont);
    2005             :       case IrOpcode::kUint32LessThan:
    2006             :         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
    2007      136498 :         return VisitWordCompare(this, value, kX64Cmp32, cont);
    2008             :       case IrOpcode::kUint32LessThanOrEqual:
    2009             :         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
    2010       57703 :         return VisitWordCompare(this, value, kX64Cmp32, cont);
    2011             :       case IrOpcode::kWord64Equal: {
    2012             :         cont->OverwriteAndNegateIfEqual(kEqual);
    2013     2414066 :         Int64BinopMatcher m(value);
    2014     2414073 :         if (m.right().Is(0)) {
    2015             :           // Try to combine the branch with a comparison.
    2016             :           Node* const user = m.node();
    2017      695210 :           Node* const value = m.left().node();
    2018      783682 :           if (CanCover(user, value)) {
    2019      695210 :             switch (value->opcode()) {
    2020             :               case IrOpcode::kInt64Sub:
    2021           4 :                 return VisitWord64Compare(this, value, cont);
    2022             :               case IrOpcode::kWord64And:
    2023      655085 :                 return VisitWordCompare(this, value, kX64Test, cont);
    2024             :               default:
    2025             :                 break;
    2026             :             }
    2027             :           }
    2028      128596 :           return VisitCompareZero(this, user, value, kX64Cmp, cont);
    2029             :         }
    2030     1630391 :         return VisitWord64Compare(this, value, cont);
    2031             :       }
    2032             :       case IrOpcode::kInt64LessThan:
    2033             :         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
    2034      101828 :         return VisitWord64Compare(this, value, cont);
    2035             :       case IrOpcode::kInt64LessThanOrEqual:
    2036             :         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
    2037       28987 :         return VisitWord64Compare(this, value, cont);
    2038             :       case IrOpcode::kUint64LessThan:
    2039             :         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
    2040     1141768 :         return VisitWord64Compare(this, value, cont);
    2041             :       case IrOpcode::kUint64LessThanOrEqual:
    2042             :         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
    2043       83561 :         return VisitWord64Compare(this, value, cont);
    2044             :       case IrOpcode::kFloat32Equal:
    2045             :         cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
    2046         690 :         return VisitFloat32Compare(this, value, cont);
    2047             :       case IrOpcode::kFloat32LessThan:
    2048             :         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
    2049         667 :         return VisitFloat32Compare(this, value, cont);
    2050             :       case IrOpcode::kFloat32LessThanOrEqual:
    2051             :         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
    2052         182 :         return VisitFloat32Compare(this, value, cont);
    2053             :       case IrOpcode::kFloat64Equal:
    2054             :         cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
    2055      159256 :         return VisitFloat64Compare(this, value, cont);
    2056             :       case IrOpcode::kFloat64LessThan: {
    2057       78487 :         Float64BinopMatcher m(value);
    2058      138088 :         if (m.left().Is(0.0) && m.right().IsFloat64Abs()) {
    2059             :           // This matches the pattern
    2060             :           //
    2061             :           //   Float64LessThan(#0.0, Float64Abs(x))
    2062             :           //
    2063             :           // which TurboFan generates for NumberToBoolean in the general case,
    2064             :           // and which evaluates to false if x is 0, -0 or NaN. We can compile
    2065             :           // this to a simple (v)ucomisd using not_equal flags condition, which
    2066             :           // avoids the costly Float64Abs.
    2067             :           cont->OverwriteAndNegateIfEqual(kNotEqual);
    2068             :           InstructionCode const opcode =
    2069       59001 :               IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
    2070             :           return VisitCompare(this, opcode, m.left().node(),
    2071       59001 :                               m.right().InputAt(0), cont, false);
    2072             :         }
    2073             :         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
    2074       19486 :         return VisitFloat64Compare(this, value, cont);
    2075             :       }
    2076             :       case IrOpcode::kFloat64LessThanOrEqual:
    2077             :         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
    2078       11715 :         return VisitFloat64Compare(this, value, cont);
    2079             :       case IrOpcode::kProjection:
    2080             :         // Check if this is the overflow output projection of an
    2081             :         // <Operation>WithOverflow node.
    2082       85570 :         if (ProjectionIndexOf(value->op()) == 1u) {
    2083             :           // We cannot combine the <Operation>WithOverflow with this branch
    2084             :           // unless the 0th projection (the use of the actual value of the
    2085             :           // <Operation> is either nullptr, which means there's no use of the
    2086             :           // actual value, or was already defined, which means it is scheduled
    2087             :           // *AFTER* this branch).
    2088       85544 :           Node* const node = value->InputAt(0);
    2089       85541 :           Node* const result = NodeProperties::FindProjection(node, 0);
    2090       85542 :           if (result == nullptr || IsDefined(result)) {
    2091       85544 :             switch (node->opcode()) {
    2092             :               case IrOpcode::kInt32AddWithOverflow:
    2093             :                 cont->OverwriteAndNegateIfEqual(kOverflow);
    2094       56141 :                 return VisitBinop(this, node, kX64Add32, cont);
    2095             :               case IrOpcode::kInt32SubWithOverflow:
    2096             :                 cont->OverwriteAndNegateIfEqual(kOverflow);
    2097       10629 :                 return VisitBinop(this, node, kX64Sub32, cont);
    2098             :               case IrOpcode::kInt32MulWithOverflow:
    2099             :                 cont->OverwriteAndNegateIfEqual(kOverflow);
    2100        3633 :                 return VisitBinop(this, node, kX64Imul32, cont);
    2101             :               case IrOpcode::kInt64AddWithOverflow:
    2102             :                 cont->OverwriteAndNegateIfEqual(kOverflow);
    2103        9640 :                 return VisitBinop(this, node, kX64Add, cont);
    2104             :               case IrOpcode::kInt64SubWithOverflow:
    2105             :                 cont->OverwriteAndNegateIfEqual(kOverflow);
    2106        5496 :                 return VisitBinop(this, node, kX64Sub, cont);
    2107             :               default:
    2108             :                 break;
    2109             :             }
    2110             :           }
    2111             :         }
    2112             :         break;
    2113             :       case IrOpcode::kInt32Sub:
    2114         968 :         return VisitWordCompare(this, value, kX64Cmp32, cont);
    2115             :       case IrOpcode::kWord32And:
    2116      281179 :         return VisitWordCompare(this, value, kX64Test32, cont);
    2117             :       default:
    2118             :         break;
    2119             :     }
    2120             :   }
    2121             : 
    2122             :   // Branch could not be combined with a compare, emit compare against 0.
    2123      416353 :   VisitCompareZero(this, user, value, kX64Cmp32, cont);
    2124             : }
    2125             : 
    2126       50172 : void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
    2127             :   X64OperandGenerator g(this);
    2128       35036 :   InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
    2129             : 
    2130             :   // Emit either ArchTableSwitch or ArchLookupSwitch.
    2131       35036 :   if (enable_switch_jump_table_ == kEnableSwitchJumpTable) {
    2132             :     static const size_t kMaxTableSwitchValueRange = 2 << 16;
    2133             :     size_t table_space_cost = 4 + sw.value_range();
    2134             :     size_t table_time_cost = 3;
    2135       14508 :     size_t lookup_space_cost = 3 + 2 * sw.case_count();
    2136             :     size_t lookup_time_cost = sw.case_count();
    2137       14829 :     if (sw.case_count() > 4 &&
    2138         321 :         table_space_cost + 3 * table_time_cost <=
    2139         635 :             lookup_space_cost + 3 * lookup_time_cost &&
    2140       14822 :         sw.min_value() > std::numeric_limits<int32_t>::min() &&
    2141             :         sw.value_range() <= kMaxTableSwitchValueRange) {
    2142         314 :       InstructionOperand index_operand = g.TempRegister();
    2143         314 :       if (sw.min_value()) {
    2144             :         // The leal automatically zero extends, so result is a valid 64-bit
    2145             :         // index.
    2146             :         Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI), index_operand,
    2147           3 :              value_operand, g.TempImmediate(-sw.min_value()));
    2148             :       } else {
    2149             :         // Zero extend, because we use it as 64-bit index into the jump table.
    2150         311 :         Emit(kX64Movl, index_operand, value_operand);
    2151             :       }
    2152             :       // Generate a table lookup.
    2153         314 :       return EmitTableSwitch(sw, index_operand);
    2154             :     }
    2155             :   }
    2156             : 
    2157             :   // Generate a tree of conditional jumps.
    2158       34722 :   return EmitBinarySearchSwitch(sw, value_operand);
    2159             : }
    2160             : 
    2161      107963 : void InstructionSelector::VisitWord32Equal(Node* const node) {
    2162             :   Node* user = node;
    2163             :   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
    2164      107963 :   Int32BinopMatcher m(user);
    2165      107965 :   if (m.right().Is(0)) {
    2166      157324 :     return VisitWordCompareZero(m.node(), m.left().node(), &cont);
    2167             :   }
    2168       58607 :   VisitWordCompare(this, node, kX64Cmp32, &cont);
    2169             : }
    2170             : 
    2171       28135 : void InstructionSelector::VisitInt32LessThan(Node* node) {
    2172             :   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
    2173       28135 :   VisitWordCompare(this, node, kX64Cmp32, &cont);
    2174       28135 : }
    2175             : 
    2176       27762 : void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
    2177             :   FlagsContinuation cont =
    2178             :       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
    2179       27762 :   VisitWordCompare(this, node, kX64Cmp32, &cont);
    2180       27762 : }
    2181             : 
    2182       32833 : void InstructionSelector::VisitUint32LessThan(Node* node) {
    2183             :   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
    2184       32833 :   VisitWordCompare(this, node, kX64Cmp32, &cont);
    2185       32833 : }
    2186             : 
    2187       27942 : void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
    2188             :   FlagsContinuation cont =
    2189             :       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
    2190       27942 :   VisitWordCompare(this, node, kX64Cmp32, &cont);
    2191       27942 : }
    2192             : 
    2193       39235 : void InstructionSelector::VisitWord64Equal(Node* const node) {
    2194             :   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
    2195       39235 :   Int64BinopMatcher m(node);
    2196       39235 :   if (m.right().Is(0)) {
    2197             :     // Try to combine the equality check with a comparison.
    2198             :     Node* const user = m.node();
    2199        4878 :     Node* const value = m.left().node();
    2200        5498 :     if (CanCover(user, value)) {
    2201        4878 :       switch (value->opcode()) {
    2202             :         case IrOpcode::kInt64Sub:
    2203        4758 :           return VisitWord64Compare(this, value, &cont);
    2204             :         case IrOpcode::kWord64And:
    2205        4758 :           return VisitWordCompare(this, value, kX64Test, &cont);
    2206             :         default:
    2207             :           break;
    2208             :       }
    2209             :     }
    2210             :   }
    2211       34477 :   VisitWord64Compare(this, node, &cont);
    2212             : }
    2213             : 
    2214       13924 : void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
    2215       13924 :   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
    2216             :     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
    2217       27848 :     return VisitBinop(this, node, kX64Add32, &cont);
    2218             :   }
    2219             :   FlagsContinuation cont;
    2220           0 :   VisitBinop(this, node, kX64Add32, &cont);
    2221             : }
    2222             : 
    2223       13924 : void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
    2224       13924 :   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
    2225             :     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
    2226       27848 :     return VisitBinop(this, node, kX64Sub32, &cont);
    2227             :   }
    2228             :   FlagsContinuation cont;
    2229           0 :   VisitBinop(this, node, kX64Sub32, &cont);
    2230             : }
    2231             : 
    2232        1360 : void InstructionSelector::VisitInt64LessThan(Node* node) {
    2233             :   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
    2234        1360 :   VisitWord64Compare(this, node, &cont);
    2235        1360 : }
    2236             : 
    2237         829 : void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
    2238             :   FlagsContinuation cont =
    2239             :       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
    2240         829 :   VisitWord64Compare(this, node, &cont);
    2241         829 : }
    2242             : 
    2243        5690 : void InstructionSelector::VisitUint64LessThan(Node* node) {
    2244             :   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
    2245        5690 :   VisitWord64Compare(this, node, &cont);
    2246        5690 : }
    2247             : 
    2248         408 : void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
    2249             :   FlagsContinuation cont =
    2250             :       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
    2251         408 :   VisitWord64Compare(this, node, &cont);
    2252         408 : }
    2253             : 
    2254         106 : void InstructionSelector::VisitFloat32Equal(Node* node) {
    2255             :   FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
    2256         106 :   VisitFloat32Compare(this, node, &cont);
    2257         106 : }
    2258             : 
    2259         138 : void InstructionSelector::VisitFloat32LessThan(Node* node) {
    2260             :   FlagsContinuation cont =
    2261             :       FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
    2262         138 :   VisitFloat32Compare(this, node, &cont);
    2263         138 : }
    2264             : 
    2265         103 : void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
    2266             :   FlagsContinuation cont =
    2267             :       FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
    2268         103 :   VisitFloat32Compare(this, node, &cont);
    2269         103 : }
    2270             : 
    2271        2814 : void InstructionSelector::VisitFloat64Equal(Node* node) {
    2272             :   FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
    2273        2814 :   VisitFloat64Compare(this, node, &cont);
    2274        2814 : }
    2275             : 
    2276        4846 : void InstructionSelector::VisitFloat64LessThan(Node* node) {
    2277        4846 :   Float64BinopMatcher m(node);
    2278        8521 :   if (m.left().Is(0.0) && m.right().IsFloat64Abs()) {
    2279             :     // This matches the pattern
    2280             :     //
    2281             :     //   Float64LessThan(#0.0, Float64Abs(x))
    2282             :     //
    2283             :     // which TurboFan generates for NumberToBoolean in the general case,
    2284             :     // and which evaluates to false if x is 0, -0 or NaN. We can compile
    2285             :     // this to a simple (v)ucomisd using not_equal flags condition, which
    2286             :     // avoids the costly Float64Abs.
    2287             :     FlagsContinuation cont = FlagsContinuation::ForSet(kNotEqual, node);
    2288             :     InstructionCode const opcode =
    2289        3580 :         IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
    2290             :     return VisitCompare(this, opcode, m.left().node(), m.right().InputAt(0),
    2291        8426 :                         &cont, false);
    2292             :   }
    2293             :   FlagsContinuation cont =
    2294             :       FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
    2295        1266 :   VisitFloat64Compare(this, node, &cont);
    2296             : }
    2297             : 
    2298        1147 : void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
    2299             :   FlagsContinuation cont =
    2300             :       FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
    2301        1147 :   VisitFloat64Compare(this, node, &cont);
    2302        1147 : }
    2303             : 
    2304         116 : void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
    2305             :   X64OperandGenerator g(this);
    2306             :   Node* left = node->InputAt(0);
    2307             :   Node* right = node->InputAt(1);
    2308             :   Float64Matcher mleft(left);
    2309         116 :   if (mleft.HasValue() && (bit_cast<uint64_t>(mleft.Value()) >> 32) == 0u) {
    2310         112 :     Emit(kSSEFloat64LoadLowWord32, g.DefineAsRegister(node), g.Use(right));
    2311         228 :     return;
    2312             :   }
    2313             :   Emit(kSSEFloat64InsertLowWord32, g.DefineSameAsFirst(node),
    2314           4 :        g.UseRegister(left), g.Use(right));
    2315             : }
    2316             : 
    2317         116 : void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
    2318             :   X64OperandGenerator g(this);
    2319             :   Node* left = node->InputAt(0);
    2320             :   Node* right = node->InputAt(1);
    2321             :   Emit(kSSEFloat64InsertHighWord32, g.DefineSameAsFirst(node),
    2322         116 :        g.UseRegister(left), g.Use(right));
    2323         116 : }
    2324             : 
    2325        5907 : void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
    2326             :   X64OperandGenerator g(this);
    2327             :   Emit(kSSEFloat64SilenceNaN, g.DefineSameAsFirst(node),
    2328        5907 :        g.UseRegister(node->InputAt(0)));
    2329        5907 : }
    2330             : 
    2331        1054 : void InstructionSelector::VisitWord32AtomicLoad(Node* node) {
    2332        1054 :   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
    2333             :   DCHECK(load_rep.representation() == MachineRepresentation::kWord8 ||
    2334             :          load_rep.representation() == MachineRepresentation::kWord16 ||
    2335             :          load_rep.representation() == MachineRepresentation::kWord32);
    2336             :   USE(load_rep);
    2337        1054 :   VisitLoad(node);
    2338        1054 : }
    2339             : 
    2340         544 : void InstructionSelector::VisitWord64AtomicLoad(Node* node) {
    2341         544 :   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
    2342             :   USE(load_rep);
    2343         544 :   VisitLoad(node);
    2344         544 : }
    2345             : 
    2346        2035 : void InstructionSelector::VisitWord32AtomicStore(Node* node) {
    2347        2035 :   MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
    2348             :   ArchOpcode opcode = kArchNop;
    2349        2035 :   switch (rep) {
    2350             :     case MachineRepresentation::kWord8:
    2351             :       opcode = kWord32AtomicExchangeInt8;
    2352             :       break;
    2353             :     case MachineRepresentation::kWord16:
    2354             :       opcode = kWord32AtomicExchangeInt16;
    2355         582 :       break;
    2356             :     case MachineRepresentation::kWord32:
    2357             :       opcode = kWord32AtomicExchangeWord32;
    2358         578 :       break;
    2359             :     default:
    2360           0 :       UNREACHABLE();
    2361             :       return;
    2362             :   }
    2363        2035 :   VisitAtomicExchange(this, node, opcode);
    2364        2035 : }
    2365             : 
    2366        2248 : void InstructionSelector::VisitWord64AtomicStore(Node* node) {
    2367        2248 :   MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
    2368             :   ArchOpcode opcode = kArchNop;
    2369        2248 :   switch (rep) {
    2370             :     case MachineRepresentation::kWord8:
    2371             :       opcode = kX64Word64AtomicExchangeUint8;
    2372             :       break;
    2373             :     case MachineRepresentation::kWord16:
    2374             :       opcode = kX64Word64AtomicExchangeUint16;
    2375         865 :       break;
    2376             :     case MachineRepresentation::kWord32:
    2377             :       opcode = kX64Word64AtomicExchangeUint32;
    2378         430 :       break;
    2379             :     case MachineRepresentation::kWord64:
    2380             :       opcode = kX64Word64AtomicExchangeUint64;
    2381         366 :       break;
    2382             :     default:
    2383           0 :       UNREACHABLE();
    2384             :       return;
    2385             :   }
    2386        2248 :   VisitAtomicExchange(this, node, opcode);
    2387        2249 : }
    2388             : 
    2389        2442 : void InstructionSelector::VisitWord32AtomicExchange(Node* node) {
    2390        2442 :   MachineType type = AtomicOpType(node->op());
    2391             :   ArchOpcode opcode = kArchNop;
    2392        2441 :   if (type == MachineType::Int8()) {
    2393             :     opcode = kWord32AtomicExchangeInt8;
    2394        2329 :   } else if (type == MachineType::Uint8()) {
    2395             :     opcode = kWord32AtomicExchangeUint8;
    2396        1682 :   } else if (type == MachineType::Int16()) {
    2397             :     opcode = kWord32AtomicExchangeInt16;
    2398        1570 :   } else if (type == MachineType::Uint16()) {
    2399             :     opcode = kWord32AtomicExchangeUint16;
    2400        1132 :   } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
    2401             :     opcode = kWord32AtomicExchangeWord32;
    2402             :   } else {
    2403           0 :     UNREACHABLE();
    2404             :     return;
    2405             :   }
    2406        2441 :   VisitAtomicExchange(this, node, opcode);
    2407        2442 : }
    2408             : 
    2409        2960 : void InstructionSelector::VisitWord64AtomicExchange(Node* node) {
    2410        2960 :   MachineType type = AtomicOpType(node->op());
    2411             :   ArchOpcode opcode = kArchNop;
    2412        2960 :   if (type == MachineType::Uint8()) {
    2413             :     opcode = kX64Word64AtomicExchangeUint8;
    2414        1736 :   } else if (type == MachineType::Uint16()) {
    2415             :     opcode = kX64Word64AtomicExchangeUint16;
    2416         940 :   } else if (type == MachineType::Uint32()) {
    2417             :     opcode = kX64Word64AtomicExchangeUint32;
    2418         518 :   } else if (type == MachineType::Uint64()) {
    2419             :     opcode = kX64Word64AtomicExchangeUint64;
    2420             :   } else {
    2421           0 :     UNREACHABLE();
    2422             :     return;
    2423             :   }
    2424        2960 :   VisitAtomicExchange(this, node, opcode);
    2425        2960 : }
    2426             : 
    2427         739 : void InstructionSelector::VisitWord32AtomicCompareExchange(Node* node) {
    2428         739 :   MachineType type = AtomicOpType(node->op());
    2429             :   ArchOpcode opcode = kArchNop;
    2430         739 :   if (type == MachineType::Int8()) {
    2431             :     opcode = kWord32AtomicCompareExchangeInt8;
    2432         627 :   } else if (type == MachineType::Uint8()) {
    2433             :     opcode = kWord32AtomicCompareExchangeUint8;
    2434         498 :   } else if (type == MachineType::Int16()) {
    2435             :     opcode = kWord32AtomicCompareExchangeInt16;
    2436         386 :   } else if (type == MachineType::Uint16()) {
    2437             :     opcode = kWord32AtomicCompareExchangeUint16;
    2438         402 :   } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
    2439             :     opcode = kWord32AtomicCompareExchangeWord32;
    2440             :   } else {
    2441           0 :     UNREACHABLE();
    2442             :     return;
    2443             :   }
    2444         739 :   VisitAtomicCompareExchange(this, node, opcode);
    2445         739 : }
    2446             : 
    2447         332 : void InstructionSelector::VisitWord64AtomicCompareExchange(Node* node) {
    2448         332 :   MachineType type = AtomicOpType(node->op());
    2449             :   ArchOpcode opcode = kArchNop;
    2450         332 :   if (type == MachineType::Uint8()) {
    2451             :     opcode = kX64Word64AtomicCompareExchangeUint8;
    2452         315 :   } else if (type == MachineType::Uint16()) {
    2453             :     opcode = kX64Word64AtomicCompareExchangeUint16;
    2454         290 :   } else if (type == MachineType::Uint32()) {
    2455             :     opcode = kX64Word64AtomicCompareExchangeUint32;
    2456         265 :   } else if (type == MachineType::Uint64()) {
    2457             :     opcode = kX64Word64AtomicCompareExchangeUint64;
    2458             :   } else {
    2459           0 :     UNREACHABLE();
    2460             :     return;
    2461             :   }
    2462         332 :   VisitAtomicCompareExchange(this, node, opcode);
    2463         332 : }
    2464             : 
    2465       13201 : void InstructionSelector::VisitWord32AtomicBinaryOperation(
    2466       13201 :     Node* node, ArchOpcode int8_op, ArchOpcode uint8_op, ArchOpcode int16_op,
    2467             :     ArchOpcode uint16_op, ArchOpcode word32_op) {
    2468       13201 :   MachineType type = AtomicOpType(node->op());
    2469             :   ArchOpcode opcode = kArchNop;
    2470       13204 :   if (type == MachineType::Int8()) {
    2471             :     opcode = int8_op;
    2472       12646 :   } else if (type == MachineType::Uint8()) {
    2473             :     opcode = uint8_op;
    2474        7772 :   } else if (type == MachineType::Int16()) {
    2475             :     opcode = int16_op;
    2476        7212 :   } else if (type == MachineType::Uint16()) {
    2477             :     opcode = uint16_op;
    2478        6360 :   } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
    2479             :     opcode = word32_op;
    2480             :   } else {
    2481           0 :     UNREACHABLE();
    2482             :     return;
    2483             :   }
    2484       13204 :   VisitAtomicBinop(this, node, opcode);
    2485       13208 : }
    2486             : 
    2487             : #define VISIT_ATOMIC_BINOP(op)                                   \
    2488             :   void InstructionSelector::VisitWord32Atomic##op(Node* node) {  \
    2489             :     VisitWord32AtomicBinaryOperation(                            \
    2490             :         node, kWord32Atomic##op##Int8, kWord32Atomic##op##Uint8, \
    2491             :         kWord32Atomic##op##Int16, kWord32Atomic##op##Uint16,     \
    2492             :         kWord32Atomic##op##Word32);                              \
    2493             :   }
    2494        2567 : VISIT_ATOMIC_BINOP(Add)
    2495        2676 : VISIT_ATOMIC_BINOP(Sub)
    2496        2645 : VISIT_ATOMIC_BINOP(And)
    2497        2552 : VISIT_ATOMIC_BINOP(Or)
    2498        2770 : VISIT_ATOMIC_BINOP(Xor)
    2499             : #undef VISIT_ATOMIC_BINOP
    2500             : 
    2501       10770 : void InstructionSelector::VisitWord64AtomicBinaryOperation(
    2502       10770 :     Node* node, ArchOpcode uint8_op, ArchOpcode uint16_op, ArchOpcode uint32_op,
    2503             :     ArchOpcode word64_op) {
    2504       10770 :   MachineType type = AtomicOpType(node->op());
    2505             :   ArchOpcode opcode = kArchNop;
    2506       10774 :   if (type == MachineType::Uint8()) {
    2507             :     opcode = uint8_op;
    2508        6994 :   } else if (type == MachineType::Uint16()) {
    2509             :     opcode = uint16_op;
    2510        4512 :   } else if (type == MachineType::Uint32()) {
    2511             :     opcode = uint32_op;
    2512        2490 :   } else if (type == MachineType::Uint64()) {
    2513             :     opcode = word64_op;
    2514             :   } else {
    2515           0 :     UNREACHABLE();
    2516             :     return;
    2517             :   }
    2518       10774 :   VisitAtomicBinop(this, node, opcode);
    2519       10781 : }
    2520             : 
    2521             : #define VISIT_ATOMIC_BINOP(op)                                           \
    2522             :   void InstructionSelector::VisitWord64Atomic##op(Node* node) {          \
    2523             :     VisitWord64AtomicBinaryOperation(                                    \
    2524             :         node, kX64Word64Atomic##op##Uint8, kX64Word64Atomic##op##Uint16, \
    2525             :         kX64Word64Atomic##op##Uint32, kX64Word64Atomic##op##Uint64);     \
    2526             :   }
    2527        2204 : VISIT_ATOMIC_BINOP(Add)
    2528        2056 : VISIT_ATOMIC_BINOP(Sub)
    2529        2262 : VISIT_ATOMIC_BINOP(And)
    2530        1915 : VISIT_ATOMIC_BINOP(Or)
    2531        2349 : VISIT_ATOMIC_BINOP(Xor)
    2532             : #undef VISIT_ATOMIC_BINOP
    2533             : 
    2534             : #define SIMD_TYPES(V) \
    2535             :   V(F32x4)            \
    2536             :   V(I32x4)            \
    2537             :   V(I16x8)            \
    2538             :   V(I8x16)
    2539             : 
    2540             : #define SIMD_BINOP_LIST(V) \
    2541             :   V(F32x4Add)              \
    2542             :   V(F32x4AddHoriz)         \
    2543             :   V(F32x4Sub)              \
    2544             :   V(F32x4Mul)              \
    2545             :   V(F32x4Min)              \
    2546             :   V(F32x4Max)              \
    2547             :   V(F32x4Eq)               \
    2548             :   V(F32x4Ne)               \
    2549             :   V(F32x4Lt)               \
    2550             :   V(F32x4Le)               \
    2551             :   V(I32x4Add)              \
    2552             :   V(I32x4AddHoriz)         \
    2553             :   V(I32x4Sub)              \
    2554             :   V(I32x4Mul)              \
    2555             :   V(I32x4MinS)             \
    2556             :   V(I32x4MaxS)             \
    2557             :   V(I32x4Eq)               \
    2558             :   V(I32x4Ne)               \
    2559             :   V(I32x4GtS)              \
    2560             :   V(I32x4GeS)              \
    2561             :   V(I32x4MinU)             \
    2562             :   V(I32x4MaxU)             \
    2563             :   V(I32x4GtU)              \
    2564             :   V(I32x4GeU)              \
    2565             :   V(I16x8SConvertI32x4)    \
    2566             :   V(I16x8Add)              \
    2567             :   V(I16x8AddSaturateS)     \
    2568             :   V(I16x8AddHoriz)         \
    2569             :   V(I16x8Sub)              \
    2570             :   V(I16x8SubSaturateS)     \
    2571             :   V(I16x8Mul)              \
    2572             :   V(I16x8MinS)             \
    2573             :   V(I16x8MaxS)             \
    2574             :   V(I16x8Eq)               \
    2575             :   V(I16x8Ne)               \
    2576             :   V(I16x8GtS)              \
    2577             :   V(I16x8GeS)              \
    2578             :   V(I16x8AddSaturateU)     \
    2579             :   V(I16x8SubSaturateU)     \
    2580             :   V(I16x8MinU)             \
    2581             :   V(I16x8MaxU)             \
    2582             :   V(I16x8GtU)              \
    2583             :   V(I16x8GeU)              \
    2584             :   V(I8x16SConvertI16x8)    \
    2585             :   V(I8x16Add)              \
    2586             :   V(I8x16AddSaturateS)     \
    2587             :   V(I8x16Sub)              \
    2588             :   V(I8x16SubSaturateS)     \
    2589             :   V(I8x16MinS)             \
    2590             :   V(I8x16MaxS)             \
    2591             :   V(I8x16Eq)               \
    2592             :   V(I8x16Ne)               \
    2593             :   V(I8x16GtS)              \
    2594             :   V(I8x16GeS)              \
    2595             :   V(I8x16AddSaturateU)     \
    2596             :   V(I8x16SubSaturateU)     \
    2597             :   V(I8x16MinU)             \
    2598             :   V(I8x16MaxU)             \
    2599             :   V(I8x16GtU)              \
    2600             :   V(I8x16GeU)              \
    2601             :   V(S128And)               \
    2602             :   V(S128Or)                \
    2603             :   V(S128Xor)
    2604             : 
    2605             : #define SIMD_UNOP_LIST(V)   \
    2606             :   V(F32x4SConvertI32x4)     \
    2607             :   V(F32x4Abs)               \
    2608             :   V(F32x4Neg)               \
    2609             :   V(F32x4RecipApprox)       \
    2610             :   V(F32x4RecipSqrtApprox)   \
    2611             :   V(I32x4SConvertI16x8Low)  \
    2612             :   V(I32x4SConvertI16x8High) \
    2613             :   V(I32x4Neg)               \
    2614             :   V(I32x4UConvertI16x8Low)  \
    2615             :   V(I32x4UConvertI16x8High) \
    2616             :   V(I16x8SConvertI8x16Low)  \
    2617             :   V(I16x8SConvertI8x16High) \
    2618             :   V(I16x8Neg)               \
    2619             :   V(I16x8UConvertI8x16Low)  \
    2620             :   V(I16x8UConvertI8x16High) \
    2621             :   V(I8x16Neg)               \
    2622             :   V(S128Not)
    2623             : 
    2624             : #define SIMD_SHIFT_OPCODES(V) \
    2625             :   V(I32x4Shl)                 \
    2626             :   V(I32x4ShrS)                \
    2627             :   V(I32x4ShrU)                \
    2628             :   V(I16x8Shl)                 \
    2629             :   V(I16x8ShrS)                \
    2630             :   V(I16x8ShrU)                \
    2631             :   V(I8x16Shl)                 \
    2632             :   V(I8x16ShrS)                \
    2633             :   V(I8x16ShrU)
    2634             : 
    2635             : #define SIMD_ANYTRUE_LIST(V) \
    2636             :   V(S1x4AnyTrue)             \
    2637             :   V(S1x8AnyTrue)             \
    2638             :   V(S1x16AnyTrue)
    2639             : 
    2640             : #define SIMD_ALLTRUE_LIST(V) \
    2641             :   V(S1x4AllTrue)             \
    2642             :   V(S1x8AllTrue)             \
    2643             :   V(S1x16AllTrue)
    2644             : 
    2645          16 : void InstructionSelector::VisitS128Zero(Node* node) {
    2646             :   X64OperandGenerator g(this);
    2647          16 :   Emit(kX64S128Zero, g.DefineAsRegister(node));
    2648          16 : }
    2649             : 
    2650             : #define VISIT_SIMD_SPLAT(Type)                               \
    2651             :   void InstructionSelector::Visit##Type##Splat(Node* node) { \
    2652             :     X64OperandGenerator g(this);                             \
    2653             :     Emit(kX64##Type##Splat, g.DefineAsRegister(node),        \
    2654             :          g.Use(node->InputAt(0)));                           \
    2655             :   }
    2656        3856 : SIMD_TYPES(VISIT_SIMD_SPLAT)
    2657             : #undef VISIT_SIMD_SPLAT
    2658             : 
    2659             : #define VISIT_SIMD_EXTRACT_LANE(Type)                              \
    2660             :   void InstructionSelector::Visit##Type##ExtractLane(Node* node) { \
    2661             :     X64OperandGenerator g(this);                                   \
    2662             :     int32_t lane = OpParameter<int32_t>(node->op());               \
    2663             :     Emit(kX64##Type##ExtractLane, g.DefineAsRegister(node),        \
    2664             :          g.UseRegister(node->InputAt(0)), g.UseImmediate(lane));   \
    2665             :   }
    2666       32724 : SIMD_TYPES(VISIT_SIMD_EXTRACT_LANE)
    2667             : #undef VISIT_SIMD_EXTRACT_LANE
    2668             : 
    2669             : #define VISIT_SIMD_REPLACE_LANE(Type)                              \
    2670             :   void InstructionSelector::Visit##Type##ReplaceLane(Node* node) { \
    2671             :     X64OperandGenerator g(this);                                   \
    2672             :     int32_t lane = OpParameter<int32_t>(node->op());               \
    2673             :     Emit(kX64##Type##ReplaceLane, g.DefineSameAsFirst(node),       \
    2674             :          g.UseRegister(node->InputAt(0)), g.UseImmediate(lane),    \
    2675             :          g.Use(node->InputAt(1)));                                 \
    2676             :   }
    2677        7840 : SIMD_TYPES(VISIT_SIMD_REPLACE_LANE)
    2678             : #undef VISIT_SIMD_REPLACE_LANE
    2679             : 
    2680             : #define VISIT_SIMD_SHIFT(Opcode)                                  \
    2681             :   void InstructionSelector::Visit##Opcode(Node* node) {           \
    2682             :     X64OperandGenerator g(this);                                  \
    2683             :     int32_t value = OpParameter<int32_t>(node->op());             \
    2684             :     Emit(kX64##Opcode, g.DefineSameAsFirst(node),                 \
    2685             :          g.UseRegister(node->InputAt(0)), g.UseImmediate(value)); \
    2686             :   }
    2687        1908 : SIMD_SHIFT_OPCODES(VISIT_SIMD_SHIFT)
    2688             : #undef VISIT_SIMD_SHIFT
    2689             : #undef SIMD_SHIFT_OPCODES
    2690             : 
    2691             : #define VISIT_SIMD_UNOP(Opcode)                         \
    2692             :   void InstructionSelector::Visit##Opcode(Node* node) { \
    2693             :     X64OperandGenerator g(this);                        \
    2694             :     Emit(kX64##Opcode, g.DefineAsRegister(node),        \
    2695             :          g.UseRegister(node->InputAt(0)));              \
    2696             :   }
    2697         136 : SIMD_UNOP_LIST(VISIT_SIMD_UNOP)
    2698             : #undef VISIT_SIMD_UNOP
    2699             : #undef SIMD_UNOP_LIST
    2700             : 
    2701             : #define VISIT_SIMD_BINOP(Opcode)                                            \
    2702             :   void InstructionSelector::Visit##Opcode(Node* node) {                     \
    2703             :     X64OperandGenerator g(this);                                            \
    2704             :     Emit(kX64##Opcode, g.DefineSameAsFirst(node),                           \
    2705             :          g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1))); \
    2706             :   }
    2707        1152 : SIMD_BINOP_LIST(VISIT_SIMD_BINOP)
    2708             : #undef VISIT_SIMD_BINOP
    2709             : #undef SIMD_BINOP_LIST
    2710             : 
    2711             : #define VISIT_SIMD_ANYTRUE(Opcode)                                        \
    2712             :   void InstructionSelector::Visit##Opcode(Node* node) {                   \
    2713             :     X64OperandGenerator g(this);                                          \
    2714             :     InstructionOperand temps[] = {g.TempRegister()};                      \
    2715             :     Emit(kX64##Opcode, g.DefineAsRegister(node),                          \
    2716             :          g.UseUniqueRegister(node->InputAt(0)), arraysize(temps), temps); \
    2717             :   }
    2718         180 : SIMD_ANYTRUE_LIST(VISIT_SIMD_ANYTRUE)
    2719             : #undef VISIT_SIMD_ANYTRUE
    2720             : #undef SIMD_ANYTRUE_LIST
    2721             : 
    2722             : #define VISIT_SIMD_ALLTRUE(Opcode)                                        \
    2723             :   void InstructionSelector::Visit##Opcode(Node* node) {                   \
    2724             :     X64OperandGenerator g(this);                                          \
    2725             :     InstructionOperand temps[] = {g.TempRegister()};                      \
    2726             :     Emit(kX64##Opcode, g.DefineAsRegister(node),                          \
    2727             :          g.UseUniqueRegister(node->InputAt(0)), arraysize(temps), temps); \
    2728             :   }
    2729         180 : SIMD_ALLTRUE_LIST(VISIT_SIMD_ALLTRUE)
    2730             : #undef VISIT_SIMD_ALLTRUE
    2731             : #undef SIMD_ALLTRUE_LIST
    2732             : #undef SIMD_TYPES
    2733             : 
    2734          28 : void InstructionSelector::VisitS128Select(Node* node) {
    2735             :   X64OperandGenerator g(this);
    2736             :   Emit(kX64S128Select, g.DefineSameAsFirst(node),
    2737             :        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)),
    2738          84 :        g.UseRegister(node->InputAt(2)));
    2739          28 : }
    2740             : 
    2741           4 : void InstructionSelector::VisitF32x4UConvertI32x4(Node* node) {
    2742             :   X64OperandGenerator g(this);
    2743             :   Emit(kX64F32x4UConvertI32x4, g.DefineSameAsFirst(node),
    2744           4 :        g.UseRegister(node->InputAt(0)));
    2745           4 : }
    2746             : 
    2747           4 : void InstructionSelector::VisitI32x4SConvertF32x4(Node* node) {
    2748             :   X64OperandGenerator g(this);
    2749             :   Emit(kX64I32x4SConvertF32x4, g.DefineSameAsFirst(node),
    2750           4 :        g.UseRegister(node->InputAt(0)));
    2751           4 : }
    2752             : 
    2753           4 : void InstructionSelector::VisitI32x4UConvertF32x4(Node* node) {
    2754             :   X64OperandGenerator g(this);
    2755           4 :   InstructionOperand temps[] = {g.TempSimd128Register()};
    2756             :   Emit(kX64I32x4UConvertF32x4, g.DefineSameAsFirst(node),
    2757           4 :        g.UseRegister(node->InputAt(0)), arraysize(temps), temps);
    2758           4 : }
    2759             : 
    2760           4 : void InstructionSelector::VisitI16x8UConvertI32x4(Node* node) {
    2761             :   X64OperandGenerator g(this);
    2762             :   Emit(kX64I16x8UConvertI32x4, g.DefineSameAsFirst(node),
    2763           8 :        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
    2764           4 : }
    2765             : 
    2766           4 : void InstructionSelector::VisitI8x16UConvertI16x8(Node* node) {
    2767             :   X64OperandGenerator g(this);
    2768             :   Emit(kX64I8x16UConvertI16x8, g.DefineSameAsFirst(node),
    2769           8 :        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
    2770           4 : }
    2771             : 
    2772           4 : void InstructionSelector::VisitI8x16Mul(Node* node) {
    2773             :   X64OperandGenerator g(this);
    2774           4 :   InstructionOperand temps[] = {g.TempSimd128Register()};
    2775             :   Emit(kX64I8x16Mul, g.DefineSameAsFirst(node),
    2776             :        g.UseUniqueRegister(node->InputAt(0)),
    2777           8 :        g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
    2778           4 : }
    2779             : 
    2780           0 : void InstructionSelector::VisitInt32AbsWithOverflow(Node* node) {
    2781           0 :   UNREACHABLE();
    2782             : }
    2783             : 
    2784           0 : void InstructionSelector::VisitInt64AbsWithOverflow(Node* node) {
    2785           0 :   UNREACHABLE();
    2786             : }
    2787             : 
    2788             : namespace {
    2789             : 
    2790             : // Packs a 4 lane shuffle into a single imm8 suitable for use by pshufd,
    2791             : // pshuflw, and pshufhw.
    2792             : uint8_t PackShuffle4(uint8_t* shuffle) {
    2793        5616 :   return (shuffle[0] & 3) | ((shuffle[1] & 3) << 2) | ((shuffle[2] & 3) << 4) |
    2794        5616 :          ((shuffle[3] & 3) << 6);
    2795             : }
    2796             : 
    2797             : // Gets an 8 bit lane mask suitable for 16x8 pblendw.
    2798             : uint8_t PackBlend8(const uint8_t* shuffle16x8) {
    2799             :   int8_t result = 0;
    2800         256 :   for (int i = 0; i < 8; ++i) {
    2801         256 :     result |= (shuffle16x8[i] >= 8 ? 1 : 0) << i;
    2802             :   }
    2803          32 :   return result;
    2804             : }
    2805             : 
    2806             : // Gets an 8 bit lane mask suitable for 32x4 pblendw.
    2807             : uint8_t PackBlend4(const uint8_t* shuffle32x4) {
    2808             :   int8_t result = 0;
    2809        1968 :   for (int i = 0; i < 4; ++i) {
    2810        1968 :     result |= (shuffle32x4[i] >= 4 ? 0x3 : 0) << (i * 2);
    2811             :   }
    2812          24 :   return result;
    2813             : }
    2814             : 
    2815             : // Returns true if shuffle can be decomposed into two 16x4 half shuffles
    2816             : // followed by a 16x8 blend.
    2817             : // E.g. [3 2 1 0 15 14 13 12].
    2818             : bool TryMatch16x8HalfShuffle(uint8_t* shuffle16x8, uint8_t* blend_mask) {
    2819             :   *blend_mask = 0;
    2820        4256 :   for (int i = 0; i < 8; i++) {
    2821        4584 :     if ((shuffle16x8[i] & 0x4) != (i & 0x4)) return false;
    2822        4256 :     *blend_mask |= (shuffle16x8[i] > 7 ? 1 : 0) << i;
    2823             :   }
    2824             :   return true;
    2825             : }
    2826             : 
    2827             : struct ShuffleEntry {
    2828             :   uint8_t shuffle[kSimd128Size];
    2829             :   ArchOpcode opcode;
    2830             :   bool src0_needs_reg;
    2831             :   bool src1_needs_reg;
    2832             : };
    2833             : 
    2834             : // Shuffles that map to architecture-specific instruction sequences. These are
    2835             : // matched very early, so we shouldn't include shuffles that match better in
    2836             : // later tests, like 32x4 and 16x8 shuffles. In general, these patterns should
    2837             : // map to either a single instruction, or be finer grained, such as zip/unzip or
    2838             : // transpose patterns.
    2839             : static const ShuffleEntry arch_shuffles[] = {
    2840             :     {{0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23},
    2841             :      kX64S64x2UnpackLow,
    2842             :      true,
    2843             :      false},
    2844             :     {{8, 9, 10, 11, 12, 13, 14, 15, 24, 25, 26, 27, 28, 29, 30, 31},
    2845             :      kX64S64x2UnpackHigh,
    2846             :      true,
    2847             :      false},
    2848             :     {{0, 1, 2, 3, 16, 17, 18, 19, 4, 5, 6, 7, 20, 21, 22, 23},
    2849             :      kX64S32x4UnpackLow,
    2850             :      true,
    2851             :      false},
    2852             :     {{8, 9, 10, 11, 24, 25, 26, 27, 12, 13, 14, 15, 28, 29, 30, 31},
    2853             :      kX64S32x4UnpackHigh,
    2854             :      true,
    2855             :      false},
    2856             :     {{0, 1, 16, 17, 2, 3, 18, 19, 4, 5, 20, 21, 6, 7, 22, 23},
    2857             :      kX64S16x8UnpackLow,
    2858             :      true,
    2859             :      false},
    2860             :     {{8, 9, 24, 25, 10, 11, 26, 27, 12, 13, 28, 29, 14, 15, 30, 31},
    2861             :      kX64S16x8UnpackHigh,
    2862             :      true,
    2863             :      false},
    2864             :     {{0, 16, 1, 17, 2, 18, 3, 19, 4, 20, 5, 21, 6, 22, 7, 23},
    2865             :      kX64S8x16UnpackLow,
    2866             :      true,
    2867             :      false},
    2868             :     {{8, 24, 9, 25, 10, 26, 11, 27, 12, 28, 13, 29, 14, 30, 15, 31},
    2869             :      kX64S8x16UnpackHigh,
    2870             :      true,
    2871             :      false},
    2872             : 
    2873             :     {{0, 1, 4, 5, 8, 9, 12, 13, 16, 17, 20, 21, 24, 25, 28, 29},
    2874             :      kX64S16x8UnzipLow,
    2875             :      true,
    2876             :      false},
    2877             :     {{2, 3, 6, 7, 10, 11, 14, 15, 18, 19, 22, 23, 26, 27, 30, 31},
    2878             :      kX64S16x8UnzipHigh,
    2879             :      true,
    2880             :      true},
    2881             :     {{0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30},
    2882             :      kX64S8x16UnzipLow,
    2883             :      true,
    2884             :      true},
    2885             :     {{1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31},
    2886             :      kX64S8x16UnzipHigh,
    2887             :      true,
    2888             :      true},
    2889             :     {{0, 16, 2, 18, 4, 20, 6, 22, 8, 24, 10, 26, 12, 28, 14, 30},
    2890             :      kX64S8x16TransposeLow,
    2891             :      true,
    2892             :      true},
    2893             :     {{1, 17, 3, 19, 5, 21, 7, 23, 9, 25, 11, 27, 13, 29, 15, 31},
    2894             :      kX64S8x16TransposeHigh,
    2895             :      true,
    2896             :      true},
    2897             :     {{7, 6, 5, 4, 3, 2, 1, 0, 15, 14, 13, 12, 11, 10, 9, 8},
    2898             :      kX64S8x8Reverse,
    2899             :      false,
    2900             :      false},
    2901             :     {{3, 2, 1, 0, 7, 6, 5, 4, 11, 10, 9, 8, 15, 14, 13, 12},
    2902             :      kX64S8x4Reverse,
    2903             :      false,
    2904             :      false},
    2905             :     {{1, 0, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14},
    2906             :      kX64S8x2Reverse,
    2907             :      true,
    2908             :      true}};
    2909             : 
    2910        4904 : bool TryMatchArchShuffle(const uint8_t* shuffle, const ShuffleEntry* table,
    2911             :                          size_t num_entries, bool is_swizzle,
    2912             :                          const ShuffleEntry** arch_shuffle) {
    2913        4904 :   uint8_t mask = is_swizzle ? kSimd128Size - 1 : 2 * kSimd128Size - 1;
    2914       75868 :   for (size_t i = 0; i < num_entries; ++i) {
    2915       72516 :     const ShuffleEntry& entry = table[i];
    2916             :     int j = 0;
    2917      126392 :     for (; j < kSimd128Size; ++j) {
    2918      124840 :       if ((entry.shuffle[j] & mask) != (shuffle[j] & mask)) {
    2919             :         break;
    2920             :       }
    2921             :     }
    2922       72516 :     if (j == kSimd128Size) {
    2923        1552 :       *arch_shuffle = &entry;
    2924        1552 :       return true;
    2925             :     }
    2926             :   }
    2927             :   return false;
    2928             : }
    2929             : 
    2930             : }  // namespace
    2931             : 
    2932        5144 : void InstructionSelector::VisitS8x16Shuffle(Node* node) {
    2933             :   uint8_t shuffle[kSimd128Size];
    2934             :   bool is_swizzle;
    2935        5144 :   CanonicalizeShuffle(node, shuffle, &is_swizzle);
    2936             : 
    2937             :   int imm_count = 0;
    2938             :   static const int kMaxImms = 6;
    2939             :   uint32_t imms[kMaxImms];
    2940             :   int temp_count = 0;
    2941             :   static const int kMaxTemps = 2;
    2942       15432 :   InstructionOperand temps[kMaxTemps];
    2943             : 
    2944             :   X64OperandGenerator g(this);
    2945             :   // Swizzles don't generally need DefineSameAsFirst to avoid a move.
    2946        5144 :   bool no_same_as_first = is_swizzle;
    2947             :   // We generally need UseRegister for input0, Use for input1.
    2948             :   bool src0_needs_reg = true;
    2949             :   bool src1_needs_reg = false;
    2950             :   ArchOpcode opcode = kX64S8x16Shuffle;  // general shuffle is the default
    2951             : 
    2952             :   uint8_t offset;
    2953             :   uint8_t shuffle32x4[4];
    2954             :   uint8_t shuffle16x8[8];
    2955             :   int index;
    2956             :   const ShuffleEntry* arch_shuffle;
    2957        5144 :   if (TryMatchConcat(shuffle, &offset)) {
    2958             :     // Swap inputs from the normal order for (v)palignr.
    2959         240 :     SwapShuffleInputs(node);
    2960         240 :     is_swizzle = false;        // It's simpler to just handle the general case.
    2961             :     no_same_as_first = false;  // SSE requires same-as-first.
    2962             :     opcode = kX64S8x16Alignr;
    2963             :     // palignr takes a single imm8 offset.
    2964         240 :     imms[imm_count++] = offset;
    2965        4904 :   } else if (TryMatchArchShuffle(shuffle, arch_shuffles,
    2966             :                                  arraysize(arch_shuffles), is_swizzle,
    2967        4904 :                                  &arch_shuffle)) {
    2968        1552 :     opcode = arch_shuffle->opcode;
    2969        1552 :     src0_needs_reg = arch_shuffle->src0_needs_reg;
    2970             :     // SSE can't take advantage of both operands in registers and needs
    2971             :     // same-as-first.
    2972             :     src1_needs_reg = false;
    2973             :     no_same_as_first = false;
    2974        3352 :   } else if (TryMatch32x4Shuffle(shuffle, shuffle32x4)) {
    2975             :     uint8_t shuffle_mask = PackShuffle4(shuffle32x4);
    2976        1024 :     if (is_swizzle) {
    2977         532 :       if (TryMatchIdentity(shuffle)) {
    2978             :         // Bypass normal shuffle code generation in this case.
    2979         228 :         EmitIdentity(node);
    2980        5372 :         return;
    2981             :       } else {
    2982             :         // pshufd takes a single imm8 shuffle mask.
    2983             :         opcode = kX64S32x4Swizzle;
    2984             :         no_same_as_first = true;
    2985             :         src0_needs_reg = false;
    2986         304 :         imms[imm_count++] = shuffle_mask;
    2987             :       }
    2988             :     } else {
    2989             :       // 2 operand shuffle
    2990             :       // A blend is more efficient than a general 32x4 shuffle; try it first.
    2991         492 :       if (TryMatchBlend(shuffle)) {
    2992             :         opcode = kX64S16x8Blend;
    2993             :         uint8_t blend_mask = PackBlend4(shuffle32x4);
    2994          24 :         imms[imm_count++] = blend_mask;
    2995             :       } else {
    2996             :         opcode = kX64S32x4Shuffle;
    2997             :         no_same_as_first = true;
    2998             :         src0_needs_reg = false;
    2999         468 :         imms[imm_count++] = shuffle_mask;
    3000             :         int8_t blend_mask = PackBlend4(shuffle32x4);
    3001         468 :         imms[imm_count++] = blend_mask;
    3002             :       }
    3003             :     }
    3004        2328 :   } else if (TryMatch16x8Shuffle(shuffle, shuffle16x8)) {
    3005             :     uint8_t blend_mask;
    3006         896 :     if (TryMatchBlend(shuffle)) {
    3007             :       opcode = kX64S16x8Blend;
    3008             :       blend_mask = PackBlend8(shuffle16x8);
    3009          32 :       imms[imm_count++] = blend_mask;
    3010         864 :     } else if (TryMatchDup<8>(shuffle, &index)) {
    3011             :       opcode = kX64S16x8Dup;
    3012             :       src0_needs_reg = false;
    3013         112 :       imms[imm_count++] = index;
    3014         752 :     } else if (TryMatch16x8HalfShuffle(shuffle16x8, &blend_mask)) {
    3015         424 :       opcode = is_swizzle ? kX64S16x8HalfShuffle1 : kX64S16x8HalfShuffle2;
    3016             :       // Half-shuffles don't need DefineSameAsFirst or UseRegister(src0).
    3017             :       no_same_as_first = true;
    3018             :       src0_needs_reg = false;
    3019             :       uint8_t mask_lo = PackShuffle4(shuffle16x8);
    3020             :       uint8_t mask_hi = PackShuffle4(shuffle16x8 + 4);
    3021         424 :       imms[imm_count++] = mask_lo;
    3022         424 :       imms[imm_count++] = mask_hi;
    3023         424 :       if (!is_swizzle) imms[imm_count++] = blend_mask;
    3024             :     }
    3025        1432 :   } else if (TryMatchDup<16>(shuffle, &index)) {
    3026             :     opcode = kX64S8x16Dup;
    3027             :     no_same_as_first = false;
    3028             :     src0_needs_reg = true;
    3029         216 :     imms[imm_count++] = index;
    3030             :   }
    3031        4916 :   if (opcode == kX64S8x16Shuffle) {
    3032             :     // Use same-as-first for general swizzle, but not shuffle.
    3033        1544 :     no_same_as_first = !is_swizzle;
    3034             :     src0_needs_reg = !no_same_as_first;
    3035        1544 :     imms[imm_count++] = Pack4Lanes(shuffle);
    3036        1544 :     imms[imm_count++] = Pack4Lanes(shuffle + 4);
    3037        1544 :     imms[imm_count++] = Pack4Lanes(shuffle + 8);
    3038        1544 :     imms[imm_count++] = Pack4Lanes(shuffle + 12);
    3039        1544 :     temps[temp_count++] = g.TempRegister();
    3040             :   }
    3041             : 
    3042             :   // Use DefineAsRegister(node) and Use(src0) if we can without forcing an extra
    3043             :   // move instruction in the CodeGenerator.
    3044             :   Node* input0 = node->InputAt(0);
    3045             :   InstructionOperand dst =
    3046        4916 :       no_same_as_first ? g.DefineAsRegister(node) : g.DefineSameAsFirst(node);
    3047             :   InstructionOperand src0 =
    3048        4916 :       src0_needs_reg ? g.UseRegister(input0) : g.Use(input0);
    3049             : 
    3050             :   int input_count = 0;
    3051       49160 :   InstructionOperand inputs[2 + kMaxImms + kMaxTemps];
    3052        4916 :   inputs[input_count++] = src0;
    3053        4916 :   if (!is_swizzle) {
    3054             :     Node* input1 = node->InputAt(1);
    3055             :     inputs[input_count++] =
    3056        2848 :         src1_needs_reg ? g.UseRegister(input1) : g.Use(input1);
    3057             :   }
    3058        9064 :   for (int i = 0; i < imm_count; ++i) {
    3059        9064 :     inputs[input_count++] = g.UseImmediate(imms[i]);
    3060             :   }
    3061        4916 :   Emit(opcode, 1, &dst, input_count, inputs, temp_count, temps);
    3062             : }
    3063             : 
    3064             : // static
    3065             : MachineOperatorBuilder::Flags
    3066     4756169 : InstructionSelector::SupportedMachineOperatorFlags() {
    3067             :   MachineOperatorBuilder::Flags flags =
    3068             :       MachineOperatorBuilder::kWord32ShiftIsSafe |
    3069             :       MachineOperatorBuilder::kWord32Ctz | MachineOperatorBuilder::kWord64Ctz |
    3070             :       MachineOperatorBuilder::kSpeculationFence;
    3071     4756169 :   if (CpuFeatures::IsSupported(POPCNT)) {
    3072             :     flags |= MachineOperatorBuilder::kWord32Popcnt |
    3073             :              MachineOperatorBuilder::kWord64Popcnt;
    3074             :   }
    3075     4756169 :   if (CpuFeatures::IsSupported(SSE4_1)) {
    3076             :     flags |= MachineOperatorBuilder::kFloat32RoundDown |
    3077             :              MachineOperatorBuilder::kFloat64RoundDown |
    3078             :              MachineOperatorBuilder::kFloat32RoundUp |
    3079             :              MachineOperatorBuilder::kFloat64RoundUp |
    3080             :              MachineOperatorBuilder::kFloat32RoundTruncate |
    3081             :              MachineOperatorBuilder::kFloat64RoundTruncate |
    3082             :              MachineOperatorBuilder::kFloat32RoundTiesEven |
    3083             :              MachineOperatorBuilder::kFloat64RoundTiesEven;
    3084             :   }
    3085     4756169 :   return flags;
    3086             : }
    3087             : 
    3088             : // static
    3089             : MachineOperatorBuilder::AlignmentRequirements
    3090     4755545 : InstructionSelector::AlignmentRequirements() {
    3091             :   return MachineOperatorBuilder::AlignmentRequirements::
    3092     4755545 :       FullUnalignedAccessSupport();
    3093             : }
    3094             : 
    3095             : }  // namespace compiler
    3096             : }  // namespace internal
    3097      178779 : }  // namespace v8

Generated by: LCOV version 1.10