LCOV - code coverage report
Current view: top level - src/compiler/backend/x64 - instruction-selector-x64.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 1091 1163 93.8 %
Date: 2019-04-17 Functions: 319 333 95.8 %

          Line data    Source code
       1             : // Copyright 2014 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include <algorithm>
       6             : 
       7             : #include "src/base/adapters.h"
       8             : #include "src/base/overflowing-math.h"
       9             : #include "src/compiler/backend/instruction-selector-impl.h"
      10             : #include "src/compiler/node-matchers.h"
      11             : #include "src/compiler/node-properties.h"
      12             : #include "src/roots-inl.h"
      13             : 
      14             : namespace v8 {
      15             : namespace internal {
      16             : namespace compiler {
      17             : 
      18             : // Adds X64-specific methods for generating operands.
      19             : class X64OperandGenerator final : public OperandGenerator {
      20             :  public:
      21             :   explicit X64OperandGenerator(InstructionSelector* selector)
      22             :       : OperandGenerator(selector) {}
      23             : 
      24    30501386 :   bool CanBeImmediate(Node* node) {
      25    30501386 :     switch (node->opcode()) {
      26             :       case IrOpcode::kInt32Constant:
      27             :       case IrOpcode::kRelocatableInt32Constant:
      28             :         return true;
      29             :       case IrOpcode::kInt64Constant: {
      30    12973218 :         const int64_t value = OpParameter<int64_t>(node->op());
      31    12973218 :         return std::numeric_limits<int32_t>::min() < value &&
      32             :                value <= std::numeric_limits<int32_t>::max();
      33             :       }
      34             :       case IrOpcode::kNumberConstant: {
      35      387911 :         const double value = OpParameter<double>(node->op());
      36      387911 :         return bit_cast<int64_t>(value) == 0;
      37             :       }
      38             :       default:
      39             :         return false;
      40             :     }
      41             :   }
      42             : 
      43             :   int32_t GetImmediateIntegerValue(Node* node) {
      44             :     DCHECK(CanBeImmediate(node));
      45      201555 :     if (node->opcode() == IrOpcode::kInt32Constant) {
      46      151916 :       return OpParameter<int32_t>(node->op());
      47             :     }
      48             :     DCHECK_EQ(IrOpcode::kInt64Constant, node->opcode());
      49       49639 :     return static_cast<int32_t>(OpParameter<int64_t>(node->op()));
      50             :   }
      51             : 
      52    11055298 :   bool CanBeMemoryOperand(InstructionCode opcode, Node* node, Node* input,
      53             :                           int effect_level) {
      54     2018580 :     if (input->opcode() != IrOpcode::kLoad ||
      55     2018547 :         !selector()->CanCover(node, input)) {
      56             :       return false;
      57             :     }
      58     1475920 :     if (effect_level != selector()->GetEffectLevel(input)) {
      59             :       return false;
      60             :     }
      61             :     MachineRepresentation rep =
      62     1475872 :         LoadRepresentationOf(input->op()).representation();
      63     1475873 :     switch (opcode) {
      64             :       case kX64And:
      65             :       case kX64Or:
      66             :       case kX64Xor:
      67             :       case kX64Add:
      68             :       case kX64Sub:
      69             :       case kX64Push:
      70             :       case kX64Cmp:
      71             :       case kX64Test:
      72             :         // When pointer compression is enabled 64-bit memory operands can't be
      73             :         // used for tagged values.
      74     1516825 :         return rep == MachineRepresentation::kWord64 ||
      75             :                (!COMPRESS_POINTERS_BOOL && IsAnyTagged(rep));
      76             :       case kX64And32:
      77             :       case kX64Or32:
      78             :       case kX64Xor32:
      79             :       case kX64Add32:
      80             :       case kX64Sub32:
      81             :       case kX64Cmp32:
      82             :       case kX64Test32:
      83             :         // When pointer compression is enabled 32-bit memory operands can be
      84             :         // used for tagged values.
      85             :         return rep == MachineRepresentation::kWord32 ||
      86      124684 :                (COMPRESS_POINTERS_BOOL && IsAnyTagged(rep));
      87             :       case kX64Cmp16:
      88             :       case kX64Test16:
      89      221558 :         return rep == MachineRepresentation::kWord16;
      90             :       case kX64Cmp8:
      91             :       case kX64Test8:
      92      109334 :         return rep == MachineRepresentation::kWord8;
      93             :       default:
      94             :         break;
      95             :     }
      96             :     return false;
      97             :   }
      98             : 
      99    15295887 :   AddressingMode GenerateMemoryOperandInputs(Node* index, int scale_exponent,
     100             :                                              Node* base, Node* displacement,
     101             :                                              DisplacementMode displacement_mode,
     102             :                                              InstructionOperand inputs[],
     103             :                                              size_t* input_count) {
     104             :     AddressingMode mode = kMode_MRI;
     105    15295887 :     if (base != nullptr && (index != nullptr || displacement != nullptr)) {
     106    12857487 :       if (base->opcode() == IrOpcode::kInt32Constant &&
     107       69995 :           OpParameter<int32_t>(base->op()) == 0) {
     108             :         base = nullptr;
     109    12911262 :       } else if (base->opcode() == IrOpcode::kInt64Constant &&
     110      178894 :                  OpParameter<int64_t>(base->op()) == 0) {
     111             :         base = nullptr;
     112             :       }
     113             :     }
     114    15295887 :     if (base != nullptr) {
     115    15012728 :       inputs[(*input_count)++] = UseRegister(base);
     116    15013849 :       if (index != nullptr) {
     117             :         DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
     118     1335995 :         inputs[(*input_count)++] = UseRegister(index);
     119     1335994 :         if (displacement != nullptr) {
     120             :           inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement
     121             :                                          ? UseNegatedImmediate(displacement)
     122      455213 :                                          : UseImmediate(displacement);
     123             :           static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I,
     124             :                                                        kMode_MR4I, kMode_MR8I};
     125      455214 :           mode = kMRnI_modes[scale_exponent];
     126             :         } else {
     127             :           static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2,
     128             :                                                       kMode_MR4, kMode_MR8};
     129      880781 :           mode = kMRn_modes[scale_exponent];
     130             :         }
     131             :       } else {
     132    13677854 :         if (displacement == nullptr) {
     133             :           mode = kMode_MR;
     134             :         } else {
     135             :           inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement
     136             :                                          ? UseNegatedImmediate(displacement)
     137    11350117 :                                          : UseImmediate(displacement);
     138             :           mode = kMode_MRI;
     139             :         }
     140             :       }
     141             :     } else {
     142             :       DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
     143      283159 :       if (displacement != nullptr) {
     144      234584 :         if (index == nullptr) {
     145       50977 :           inputs[(*input_count)++] = UseRegister(displacement);
     146             :           mode = kMode_MR;
     147             :         } else {
     148      183607 :           inputs[(*input_count)++] = UseRegister(index);
     149             :           inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement
     150             :                                          ? UseNegatedImmediate(displacement)
     151      183607 :                                          : UseImmediate(displacement);
     152             :           static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I,
     153             :                                                       kMode_M4I, kMode_M8I};
     154      183607 :           mode = kMnI_modes[scale_exponent];
     155             :         }
     156             :       } else {
     157       48575 :         inputs[(*input_count)++] = UseRegister(index);
     158             :         static const AddressingMode kMn_modes[] = {kMode_MR, kMode_MR1,
     159             :                                                    kMode_M4, kMode_M8};
     160       48579 :         mode = kMn_modes[scale_exponent];
     161       48579 :         if (mode == kMode_MR1) {
     162             :           // [%r1 + %r1*1] has a smaller encoding than [%r1*2+0]
     163       11524 :           inputs[(*input_count)++] = UseRegister(index);
     164             :         }
     165             :       }
     166             :     }
     167    15296837 :     return mode;
     168             :   }
     169             : 
     170    13523883 :   AddressingMode GetEffectiveAddressMemoryOperand(Node* operand,
     171             :                                                   InstructionOperand inputs[],
     172             :                                                   size_t* input_count) {
     173    13523883 :     if (selector()->CanAddressRelativeToRootsRegister()) {
     174     4452482 :       LoadMatcher<ExternalReferenceMatcher> m(operand);
     175     4452480 :       if (m.index().HasValue() && m.object().HasValue()) {
     176             :         ptrdiff_t const delta =
     177      487566 :             m.index().Value() +
     178      487566 :             TurboAssemblerBase::RootRegisterOffsetForExternalReference(
     179      487562 :                 selector()->isolate(), m.object().Value());
     180      487562 :         if (is_int32(delta)) {
     181      487564 :           inputs[(*input_count)++] = TempImmediate(static_cast<int32_t>(delta));
     182      487564 :           return kMode_Root;
     183             :         }
     184             :       }
     185             :     }
     186             :     BaseWithIndexAndDisplacement64Matcher m(operand, AddressOption::kAllowAll);
     187             :     DCHECK(m.matches());
     188    13035874 :     if (m.displacement() == nullptr || CanBeImmediate(m.displacement())) {
     189             :       return GenerateMemoryOperandInputs(
     190             :           m.index(), m.scale(), m.base(), m.displacement(),
     191    13032601 :           m.displacement_mode(), inputs, input_count);
     192        3109 :     } else if (m.base() == nullptr &&
     193             :                m.displacement_mode() == kPositiveDisplacement) {
     194             :       // The displacement cannot be an immediate, but we can use the
     195             :       // displacement as base instead and still benefit from addressing
     196             :       // modes for the scale.
     197             :       return GenerateMemoryOperandInputs(m.index(), m.scale(), m.displacement(),
     198             :                                          nullptr, m.displacement_mode(), inputs,
     199        1469 :                                          input_count);
     200             :     } else {
     201        3280 :       inputs[(*input_count)++] = UseRegister(operand->InputAt(0));
     202        3290 :       inputs[(*input_count)++] = UseRegister(operand->InputAt(1));
     203        1667 :       return kMode_MR1;
     204             :     }
     205             :   }
     206             : 
     207      355109 :   InstructionOperand GetEffectiveIndexOperand(Node* index,
     208             :                                               AddressingMode* mode) {
     209      355109 :     if (CanBeImmediate(index)) {
     210      299288 :       *mode = kMode_MRI;
     211      299288 :       return UseImmediate(index);
     212             :     } else {
     213       55800 :       *mode = kMode_MR1;
     214       55800 :       return UseUniqueRegister(index);
     215             :     }
     216             :   }
     217             : 
     218             :   bool CanBeBetterLeftOperand(Node* node) const {
     219      844849 :     return !selector()->IsLive(node);
     220             :   }
     221             : };
     222             : 
     223             : namespace {
     224     7373628 : ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) {
     225             :   ArchOpcode opcode = kArchNop;
     226     7373628 :   switch (load_rep.representation()) {
     227             :     case MachineRepresentation::kFloat32:
     228             :       opcode = kX64Movss;
     229       15708 :       break;
     230             :     case MachineRepresentation::kFloat64:
     231             :       opcode = kX64Movsd;
     232      426154 :       break;
     233             :     case MachineRepresentation::kBit:  // Fall through.
     234             :     case MachineRepresentation::kWord8:
     235      219881 :       opcode = load_rep.IsSigned() ? kX64Movsxbl : kX64Movzxbl;
     236             :       break;
     237             :     case MachineRepresentation::kWord16:
     238      162510 :       opcode = load_rep.IsSigned() ? kX64Movsxwl : kX64Movzxwl;
     239             :       break;
     240             :     case MachineRepresentation::kWord32:
     241             :       opcode = kX64Movl;
     242      748266 :       break;
     243             : #ifdef V8_COMPRESS_POINTERS
     244             :     case MachineRepresentation::kTaggedSigned:
     245             :       opcode = kX64MovqDecompressTaggedSigned;
     246             :       break;
     247             :     case MachineRepresentation::kTaggedPointer:
     248             :       opcode = kX64MovqDecompressTaggedPointer;
     249             :       break;
     250             :     case MachineRepresentation::kTagged:
     251             :       opcode = kX64MovqDecompressAnyTagged;
     252             :       break;
     253             :     case MachineRepresentation::kCompressedSigned:   // Fall through.
     254             :     case MachineRepresentation::kCompressedPointer:  // Fall through.
     255             :     case MachineRepresentation::kCompressed:
     256             :       opcode = kX64Movl;
     257             :       break;
     258             : #else
     259             :     case MachineRepresentation::kCompressedSigned:   // Fall through.
     260             :     case MachineRepresentation::kCompressedPointer:  // Fall through.
     261             :     case MachineRepresentation::kCompressed:
     262           0 :       UNREACHABLE();
     263             :       break;
     264             :     case MachineRepresentation::kTaggedSigned:   // Fall through.
     265             :     case MachineRepresentation::kTaggedPointer:  // Fall through.
     266             :     case MachineRepresentation::kTagged:
     267             :       opcode = kX64Movq;
     268     3891526 :       break;
     269             : #endif
     270             :     case MachineRepresentation::kWord64:
     271             :       opcode = kX64Movq;
     272     1904833 :       break;
     273             :     case MachineRepresentation::kSimd128:  // Fall through.
     274             :       opcode = kX64Movdqu;
     275        5780 :       break;
     276             :     case MachineRepresentation::kNone:
     277           0 :       UNREACHABLE();
     278             :       break;
     279             :   }
     280     7373628 :   return opcode;
     281             : }
     282             : 
     283     4703041 : ArchOpcode GetStoreOpcode(StoreRepresentation store_rep) {
     284     4703041 :   switch (store_rep.representation()) {
     285             :     case MachineRepresentation::kFloat32:
     286             :       return kX64Movss;
     287             :       break;
     288             :     case MachineRepresentation::kFloat64:
     289             :       return kX64Movsd;
     290             :       break;
     291             :     case MachineRepresentation::kBit:  // Fall through.
     292             :     case MachineRepresentation::kWord8:
     293             :       return kX64Movb;
     294             :       break;
     295             :     case MachineRepresentation::kWord16:
     296             :       return kX64Movw;
     297             :       break;
     298             :     case MachineRepresentation::kWord32:
     299             :       return kX64Movl;
     300             :       break;
     301             : #ifdef V8_COMPRESS_POINTERS
     302             :     case MachineRepresentation::kTaggedSigned:   // Fall through.
     303             :     case MachineRepresentation::kTaggedPointer:  // Fall through.
     304             :     case MachineRepresentation::kTagged:
     305             :       return kX64MovqCompressTagged;
     306             :     case MachineRepresentation::kCompressedSigned:   // Fall through.
     307             :     case MachineRepresentation::kCompressedPointer:  // Fall through.
     308             :     case MachineRepresentation::kCompressed:
     309             :       return kX64Movl;
     310             : #else
     311             :     case MachineRepresentation::kCompressedSigned:   // Fall through.
     312             :     case MachineRepresentation::kCompressedPointer:  // Fall through.
     313             :     case MachineRepresentation::kCompressed:
     314           0 :       UNREACHABLE();
     315             :     case MachineRepresentation::kTaggedSigned:   // Fall through.
     316             :     case MachineRepresentation::kTaggedPointer:  // Fall through.
     317             :     case MachineRepresentation::kTagged:
     318             :       return kX64Movq;
     319             :       break;
     320             : #endif
     321             :     case MachineRepresentation::kWord64:
     322             :       return kX64Movq;
     323             :       break;
     324             :     case MachineRepresentation::kSimd128:  // Fall through.
     325             :       return kX64Movdqu;
     326             :       break;
     327             :     case MachineRepresentation::kNone:
     328           0 :       UNREACHABLE();
     329             :   }
     330           0 :   UNREACHABLE();
     331             : }
     332             : 
     333             : }  // namespace
     334             : 
     335      367595 : void InstructionSelector::VisitStackSlot(Node* node) {
     336      367595 :   StackSlotRepresentation rep = StackSlotRepresentationOf(node->op());
     337      367596 :   int slot = frame_->AllocateSpillSlot(rep.size());
     338             :   OperandGenerator g(this);
     339             : 
     340      367604 :   Emit(kArchStackSlot, g.DefineAsRegister(node),
     341      735205 :        sequence()->AddImmediate(Constant(slot)), 0, nullptr);
     342      367613 : }
     343             : 
     344         160 : void InstructionSelector::VisitDebugAbort(Node* node) {
     345             :   X64OperandGenerator g(this);
     346         160 :   Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), rdx));
     347         160 : }
     348             : 
     349     7373521 : void InstructionSelector::VisitLoad(Node* node) {
     350     7373521 :   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
     351             :   X64OperandGenerator g(this);
     352             : 
     353     7373600 :   ArchOpcode opcode = GetLoadOpcode(load_rep);
     354     7373588 :   InstructionOperand outputs[] = {g.DefineAsRegister(node)};
     355    51613269 :   InstructionOperand inputs[3];
     356     7373379 :   size_t input_count = 0;
     357             :   AddressingMode mode =
     358     7373379 :       g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
     359     7373946 :   InstructionCode code = opcode | AddressingModeField::encode(mode);
     360     7373946 :   if (node->opcode() == IrOpcode::kProtectedLoad) {
     361       96892 :     code |= MiscField::encode(kMemoryAccessProtected);
     362     7277054 :   } else if (node->opcode() == IrOpcode::kPoisonedLoad) {
     363           0 :     CHECK_NE(poisoning_level_, PoisoningMitigationLevel::kDontPoison);
     364           0 :     code |= MiscField::encode(kMemoryAccessPoisoned);
     365             :   }
     366     7373946 :   Emit(code, 1, outputs, input_count, inputs);
     367     7374302 : }
     368             : 
     369           0 : void InstructionSelector::VisitPoisonedLoad(Node* node) { VisitLoad(node); }
     370             : 
     371       96872 : void InstructionSelector::VisitProtectedLoad(Node* node) { VisitLoad(node); }
     372             : 
     373     4878495 : void InstructionSelector::VisitStore(Node* node) {
     374             :   X64OperandGenerator g(this);
     375             :   Node* base = node->InputAt(0);
     376             :   Node* index = node->InputAt(1);
     377             :   Node* value = node->InputAt(2);
     378             : 
     379     4878495 :   StoreRepresentation store_rep = StoreRepresentationOf(node->op());
     380             :   WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
     381             : 
     382     4878503 :   if (write_barrier_kind != kNoWriteBarrier) {
     383             :     DCHECK(CanBeTaggedPointer(store_rep.representation()));
     384             :     AddressingMode addressing_mode;
     385             :     InstructionOperand inputs[] = {
     386             :         g.UseUniqueRegister(base),
     387             :         g.GetEffectiveIndexOperand(index, &addressing_mode),
     388      320388 :         g.UseUniqueRegister(value)};
     389             :     RecordWriteMode record_write_mode =
     390      320389 :         WriteBarrierKindToRecordWriteMode(write_barrier_kind);
     391      640778 :     InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
     392             :     InstructionCode code = kArchStoreWithWriteBarrier;
     393      640778 :     code |= AddressingModeField::encode(addressing_mode);
     394      320389 :     code |= MiscField::encode(static_cast<int>(record_write_mode));
     395      320389 :     Emit(code, 0, nullptr, arraysize(inputs), inputs, arraysize(temps), temps);
     396             :   } else {
     397     4558115 :     ArchOpcode opcode = GetStoreOpcode(store_rep);
     398    41022945 :     InstructionOperand inputs[4];
     399     4558105 :     size_t input_count = 0;
     400             :     AddressingMode addressing_mode =
     401     4558105 :         g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
     402             :     InstructionCode code =
     403     4558119 :         opcode | AddressingModeField::encode(addressing_mode);
     404     9116223 :     if ((ElementSizeLog2Of(store_rep.representation()) <
     405     1561329 :          kSystemPointerSizeLog2) &&
     406     4675616 :         (value->opcode() == IrOpcode::kTruncateInt64ToInt32) &&
     407      117512 :         CanCover(node, value)) {
     408             :       value = value->InputAt(0);
     409             :     }
     410             :     InstructionOperand value_operand =
     411     4558104 :         g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
     412     4558108 :     inputs[input_count++] = value_operand;
     413             :     Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count,
     414     4558108 :          inputs);
     415             :   }
     416     4878505 : }
     417             : 
     418      144965 : void InstructionSelector::VisitProtectedStore(Node* node) {
     419             :   X64OperandGenerator g(this);
     420             :   Node* value = node->InputAt(2);
     421             : 
     422      144965 :   StoreRepresentation store_rep = StoreRepresentationOf(node->op());
     423             : 
     424      144957 :   ArchOpcode opcode = GetStoreOpcode(store_rep);
     425     1304438 :   InstructionOperand inputs[4];
     426      145038 :   size_t input_count = 0;
     427             :   AddressingMode addressing_mode =
     428      145038 :       g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
     429      145232 :   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode) |
     430      145232 :                          MiscField::encode(kMemoryAccessProtected);
     431             :   InstructionOperand value_operand =
     432      145232 :       g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
     433      145216 :   inputs[input_count++] = value_operand;
     434      145216 :   Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, inputs);
     435      145232 : }
     436             : 
     437             : // Architecture supports unaligned access, therefore VisitLoad is used instead
     438           0 : void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); }
     439             : 
     440             : // Architecture supports unaligned access, therefore VisitStore is used instead
     441           0 : void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
     442             : 
     443             : // Shared routine for multiple binary operations.
     444      870696 : static void VisitBinop(InstructionSelector* selector, Node* node,
     445             :                        InstructionCode opcode, FlagsContinuation* cont) {
     446             :   X64OperandGenerator g(selector);
     447      870696 :   Int32BinopMatcher m(node);
     448             :   Node* left = m.left().node();
     449             :   Node* right = m.right().node();
     450    14802549 :   InstructionOperand inputs[8];
     451      870709 :   size_t input_count = 0;
     452     2612205 :   InstructionOperand outputs[1];
     453             :   size_t output_count = 0;
     454             : 
     455             :   // TODO(turbofan): match complex addressing modes.
     456      870709 :   if (left == right) {
     457             :     // If both inputs refer to the same operand, enforce allocating a register
     458             :     // for both of them to ensure that we don't end up generating code like
     459             :     // this:
     460             :     //
     461             :     //   mov rax, [rbp-0x10]
     462             :     //   add rax, [rbp-0x10]
     463             :     //   jo label
     464        1518 :     InstructionOperand const input = g.UseRegister(left);
     465        1518 :     inputs[input_count++] = input;
     466        1518 :     inputs[input_count++] = input;
     467      869191 :   } else if (g.CanBeImmediate(right)) {
     468      517380 :     inputs[input_count++] = g.UseRegister(left);
     469      517387 :     inputs[input_count++] = g.UseImmediate(right);
     470             :   } else {
     471      351869 :     int effect_level = selector->GetEffectLevel(node);
     472      351890 :     if (cont->IsBranch()) {
     473             :       effect_level = selector->GetEffectLevel(
     474       35209 :           cont->true_block()->PredecessorAt(0)->control_input());
     475             :     }
     476      637362 :     if (node->op()->HasProperty(Operator::kCommutative) &&
     477      606218 :         g.CanBeBetterLeftOperand(right) &&
     478      241671 :         (!g.CanBeBetterLeftOperand(left) ||
     479      241709 :          !g.CanBeMemoryOperand(opcode, node, right, effect_level))) {
     480             :       std::swap(left, right);
     481             :     }
     482      351925 :     if (g.CanBeMemoryOperand(opcode, node, right, effect_level)) {
     483        2628 :       inputs[input_count++] = g.UseRegister(left);
     484             :       AddressingMode addressing_mode =
     485        2646 :           g.GetEffectiveAddressMemoryOperand(right, inputs, &input_count);
     486        2616 :       opcode |= AddressingModeField::encode(addressing_mode);
     487             :     } else {
     488      349259 :       inputs[input_count++] = g.UseRegister(left);
     489      349279 :       inputs[input_count++] = g.Use(right);
     490             :     }
     491             :   }
     492             : 
     493      870848 :   if (cont->IsBranch()) {
     494      249212 :     inputs[input_count++] = g.Label(cont->true_block());
     495      249213 :     inputs[input_count++] = g.Label(cont->false_block());
     496             :   }
     497             : 
     498      870849 :   outputs[output_count++] = g.DefineSameAsFirst(node);
     499             : 
     500             :   DCHECK_NE(0u, input_count);
     501             :   DCHECK_EQ(1u, output_count);
     502             :   DCHECK_GE(arraysize(inputs), input_count);
     503             :   DCHECK_GE(arraysize(outputs), output_count);
     504             : 
     505             :   selector->EmitWithContinuation(opcode, output_count, outputs, input_count,
     506      870745 :                                  inputs, cont);
     507      870874 : }
     508             : 
     509             : // Shared routine for multiple binary operations.
     510             : static void VisitBinop(InstructionSelector* selector, Node* node,
     511             :                        InstructionCode opcode) {
     512             :   FlagsContinuation cont;
     513      581610 :   VisitBinop(selector, node, opcode, &cont);
     514             : }
     515             : 
     516      186370 : void InstructionSelector::VisitWord32And(Node* node) {
     517             :   X64OperandGenerator g(this);
     518      186370 :   Uint32BinopMatcher m(node);
     519      186378 :   if (m.right().Is(0xFF)) {
     520        1079 :     Emit(kX64Movzxbl, g.DefineAsRegister(node), g.Use(m.left().node()));
     521      185299 :   } else if (m.right().Is(0xFFFF)) {
     522        5749 :     Emit(kX64Movzxwl, g.DefineAsRegister(node), g.Use(m.left().node()));
     523             :   } else {
     524             :     VisitBinop(this, node, kX64And32);
     525             :   }
     526      186389 : }
     527             : 
     528      149569 : void InstructionSelector::VisitWord64And(Node* node) {
     529             :   VisitBinop(this, node, kX64And);
     530      149603 : }
     531             : 
     532       60776 : void InstructionSelector::VisitWord32Or(Node* node) {
     533             :   VisitBinop(this, node, kX64Or32);
     534       60776 : }
     535             : 
     536       92659 : void InstructionSelector::VisitWord64Or(Node* node) {
     537             :   VisitBinop(this, node, kX64Or);
     538       92757 : }
     539             : 
     540       22650 : void InstructionSelector::VisitWord32Xor(Node* node) {
     541             :   X64OperandGenerator g(this);
     542       22650 :   Uint32BinopMatcher m(node);
     543       22650 :   if (m.right().Is(-1)) {
     544        2778 :     Emit(kX64Not32, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
     545             :   } else {
     546             :     VisitBinop(this, node, kX64Xor32);
     547             :   }
     548       22650 : }
     549             : 
     550         877 : void InstructionSelector::VisitWord64Xor(Node* node) {
     551             :   X64OperandGenerator g(this);
     552         877 :   Uint64BinopMatcher m(node);
     553         877 :   if (m.right().Is(-1)) {
     554          44 :     Emit(kX64Not, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
     555             :   } else {
     556             :     VisitBinop(this, node, kX64Xor);
     557             :   }
     558         877 : }
     559             : 
     560             : namespace {
     561             : 
     562          20 : bool TryMergeTruncateInt64ToInt32IntoLoad(InstructionSelector* selector,
     563             :                                           Node* node, Node* load) {
     564          20 :   if (load->opcode() == IrOpcode::kLoad && selector->CanCover(node, load)) {
     565          20 :     LoadRepresentation load_rep = LoadRepresentationOf(load->op());
     566             :     MachineRepresentation rep = load_rep.representation();
     567             :     InstructionCode opcode = kArchNop;
     568             :     switch (rep) {
     569             :       case MachineRepresentation::kBit:  // Fall through.
     570             :       case MachineRepresentation::kWord8:
     571           0 :         opcode = load_rep.IsSigned() ? kX64Movsxbl : kX64Movzxbl;
     572             :         break;
     573             :       case MachineRepresentation::kWord16:
     574           0 :         opcode = load_rep.IsSigned() ? kX64Movsxwl : kX64Movzxwl;
     575             :         break;
     576             :       case MachineRepresentation::kWord32:
     577             :       case MachineRepresentation::kWord64:
     578             :       case MachineRepresentation::kTaggedSigned:
     579             :       case MachineRepresentation::kTagged:
     580             :       case MachineRepresentation::kCompressedSigned:  // Fall through.
     581             :       case MachineRepresentation::kCompressed:        // Fall through.
     582             :         opcode = kX64Movl;
     583             :         break;
     584             :       default:
     585           0 :         UNREACHABLE();
     586             :         return false;
     587             :     }
     588             :     X64OperandGenerator g(selector);
     589          20 :     InstructionOperand outputs[] = {g.DefineAsRegister(node)};
     590          20 :     size_t input_count = 0;
     591         140 :     InstructionOperand inputs[3];
     592             :     AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
     593          20 :         node->InputAt(0), inputs, &input_count);
     594          20 :     opcode |= AddressingModeField::encode(mode);
     595          20 :     selector->Emit(opcode, 1, outputs, input_count, inputs);
     596             :     return true;
     597             :   }
     598             :   return false;
     599             : }
     600             : 
     601             : // Shared routine for multiple 32-bit shift operations.
     602             : // TODO(bmeurer): Merge this with VisitWord64Shift using template magic?
     603      170643 : void VisitWord32Shift(InstructionSelector* selector, Node* node,
     604             :                       ArchOpcode opcode) {
     605             :   X64OperandGenerator g(selector);
     606      170643 :   Int32BinopMatcher m(node);
     607             :   Node* left = m.left().node();
     608             :   Node* right = m.right().node();
     609             : 
     610      177352 :   if (left->opcode() == IrOpcode::kTruncateInt64ToInt32 &&
     611        6709 :       selector->CanCover(node, left)) {
     612             :     left = left->InputAt(0);
     613             :   }
     614             : 
     615      170643 :   if (g.CanBeImmediate(right)) {
     616      162307 :     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
     617      162307 :                    g.UseImmediate(right));
     618             :   } else {
     619        8336 :     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
     620        8336 :                    g.UseFixed(right, rcx));
     621             :   }
     622      170643 : }
     623             : 
     624             : // Shared routine for multiple 64-bit shift operations.
     625             : // TODO(bmeurer): Merge this with VisitWord32Shift using template magic?
     626      316970 : void VisitWord64Shift(InstructionSelector* selector, Node* node,
     627             :                       ArchOpcode opcode) {
     628             :   X64OperandGenerator g(selector);
     629      316970 :   Int64BinopMatcher m(node);
     630             :   Node* left = m.left().node();
     631             :   Node* right = m.right().node();
     632             : 
     633      316971 :   if (g.CanBeImmediate(right)) {
     634      312962 :     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
     635      312961 :                    g.UseImmediate(right));
     636             :   } else {
     637        4010 :     if (m.right().IsWord64And()) {
     638         112 :       Int64BinopMatcher mright(right);
     639         112 :       if (mright.right().Is(0x3F)) {
     640             :         right = mright.left().node();
     641             :       }
     642             :     }
     643        4010 :     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
     644        4009 :                    g.UseFixed(right, rcx));
     645             :   }
     646      316970 : }
     647             : 
     648             : // Shared routine for multiple shift operations with continuation.
     649             : template <typename BinopMatcher, int Bits>
     650       27003 : bool TryVisitWordShift(InstructionSelector* selector, Node* node,
     651             :                        ArchOpcode opcode, FlagsContinuation* cont) {
     652             :   X64OperandGenerator g(selector);
     653       27003 :   BinopMatcher m(node);
     654             :   Node* left = m.left().node();
     655             :   Node* right = m.right().node();
     656             : 
     657             :   // If the shift count is 0, the flags are not affected.
     658       54006 :   if (!g.CanBeImmediate(right) ||
     659             :       (g.GetImmediateIntegerValue(right) & (Bits - 1)) == 0) {
     660             :     return false;
     661             :   }
     662       26995 :   InstructionOperand output = g.DefineSameAsFirst(node);
     663      134975 :   InstructionOperand inputs[2];
     664       26995 :   inputs[0] = g.UseRegister(left);
     665       26995 :   inputs[1] = g.UseImmediate(right);
     666       26995 :   selector->EmitWithContinuation(opcode, 1, &output, 2, inputs, cont);
     667       26995 :   return true;
     668             : }
     669             : 
     670     2262278 : void EmitLea(InstructionSelector* selector, InstructionCode opcode,
     671             :              Node* result, Node* index, int scale, Node* base,
     672             :              Node* displacement, DisplacementMode displacement_mode) {
     673             :   X64OperandGenerator g(selector);
     674             : 
     675    20360422 :   InstructionOperand inputs[4];
     676     2262278 :   size_t input_count = 0;
     677             :   AddressingMode mode =
     678             :       g.GenerateMemoryOperandInputs(index, scale, base, displacement,
     679     2262278 :                                     displacement_mode, inputs, &input_count);
     680             : 
     681             :   DCHECK_NE(0u, input_count);
     682             :   DCHECK_GE(arraysize(inputs), input_count);
     683             : 
     684     6787536 :   InstructionOperand outputs[1];
     685     2262516 :   outputs[0] = g.DefineAsRegister(result);
     686             : 
     687     2262534 :   opcode = AddressingModeField::encode(mode) | opcode;
     688             : 
     689     2262534 :   selector->Emit(opcode, 1, outputs, input_count, inputs);
     690     2262549 : }
     691             : 
     692             : }  // namespace
     693             : 
     694       38736 : void InstructionSelector::VisitWord32Shl(Node* node) {
     695       38736 :   Int32ScaleMatcher m(node, true);
     696       38740 :   if (m.matches()) {
     697             :     Node* index = node->InputAt(0);
     698        8378 :     Node* base = m.power_of_two_plus_one() ? index : nullptr;
     699             :     EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr,
     700        8378 :             kPositiveDisplacement);
     701        8378 :     return;
     702             :   }
     703       30362 :   VisitWord32Shift(this, node, kX64Shl32);
     704             : }
     705             : 
     706      381461 : void InstructionSelector::VisitWord64Shl(Node* node) {
     707             :   X64OperandGenerator g(this);
     708      381461 :   Int64ScaleMatcher m(node, true);
     709      381541 :   if (m.matches()) {
     710             :     Node* index = node->InputAt(0);
     711       38626 :     Node* base = m.power_of_two_plus_one() ? index : nullptr;
     712             :     EmitLea(this, kX64Lea, node, index, m.scale(), base, nullptr,
     713       38626 :             kPositiveDisplacement);
     714       38626 :     return;
     715             :   } else {
     716      342915 :     Int64BinopMatcher m(node);
     717      475521 :     if ((m.left().IsChangeInt32ToInt64() ||
     718      597179 :          m.left().IsChangeUint32ToUint64()) &&
     719             :         m.right().IsInRange(32, 63)) {
     720             :       // There's no need to sign/zero-extend to 64-bit if we shift out the upper
     721             :       // 32 bits anyway.
     722      508426 :       Emit(kX64Shl, g.DefineSameAsFirst(node),
     723             :            g.UseRegister(m.left().node()->InputAt(0)),
     724      254231 :            g.UseImmediate(m.right().node()));
     725      254146 :       return;
     726             :     }
     727             :   }
     728       88805 :   VisitWord64Shift(this, node, kX64Shl);
     729             : }
     730             : 
     731       87436 : void InstructionSelector::VisitWord32Shr(Node* node) {
     732       87436 :   VisitWord32Shift(this, node, kX64Shr32);
     733       87436 : }
     734             : 
     735             : namespace {
     736             : 
     737          19 : inline AddressingMode AddDisplacementToAddressingMode(AddressingMode mode) {
     738          19 :   switch (mode) {
     739             :     case kMode_MR:
     740             :       return kMode_MRI;
     741             :       break;
     742             :     case kMode_MR1:
     743           0 :       return kMode_MR1I;
     744             :       break;
     745             :     case kMode_MR2:
     746           0 :       return kMode_MR2I;
     747             :       break;
     748             :     case kMode_MR4:
     749           0 :       return kMode_MR4I;
     750             :       break;
     751             :     case kMode_MR8:
     752           0 :       return kMode_MR8I;
     753             :       break;
     754             :     case kMode_M1:
     755           0 :       return kMode_M1I;
     756             :       break;
     757             :     case kMode_M2:
     758           0 :       return kMode_M2I;
     759             :       break;
     760             :     case kMode_M4:
     761           0 :       return kMode_M4I;
     762             :       break;
     763             :     case kMode_M8:
     764           0 :       return kMode_M8I;
     765             :       break;
     766             :     case kMode_None:
     767             :     case kMode_MRI:
     768             :     case kMode_MR1I:
     769             :     case kMode_MR2I:
     770             :     case kMode_MR4I:
     771             :     case kMode_MR8I:
     772             :     case kMode_M1I:
     773             :     case kMode_M2I:
     774             :     case kMode_M4I:
     775             :     case kMode_M8I:
     776             :     case kMode_Root:
     777           0 :       UNREACHABLE();
     778             :   }
     779           0 :   UNREACHABLE();
     780             : }
     781             : 
     782      511935 : bool TryMatchLoadWord64AndShiftRight(InstructionSelector* selector, Node* node,
     783             :                                      InstructionCode opcode) {
     784             :   DCHECK(IrOpcode::kWord64Sar == node->opcode() ||
     785             :          IrOpcode::kWord64Shr == node->opcode());
     786             :   X64OperandGenerator g(selector);
     787      511935 :   Int64BinopMatcher m(node);
     788     1153737 :   if (selector->CanCover(m.node(), m.left().node()) && m.left().IsLoad() &&
     789             :       m.right().Is(32)) {
     790             :     DCHECK_EQ(selector->GetEffectLevel(node),
     791             :               selector->GetEffectLevel(m.left().node()));
     792             :     // Just load and sign-extend the interesting 4 bytes instead. This happens,
     793             :     // for example, when we're loading and untagging SMIs.
     794             :     BaseWithIndexAndDisplacement64Matcher mleft(m.left().node(),
     795             :                                                 AddressOption::kAllowAll);
     796      349120 :     if (mleft.matches() && (mleft.displacement() == nullptr ||
     797      174551 :                             g.CanBeImmediate(mleft.displacement()))) {
     798      174569 :       size_t input_count = 0;
     799     1221965 :       InstructionOperand inputs[3];
     800             :       AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
     801      174569 :           m.left().node(), inputs, &input_count);
     802      174571 :       if (mleft.displacement() == nullptr) {
     803             :         // Make sure that the addressing mode indicates the presence of an
     804             :         // immediate displacement. It seems that we never use M1 and M2, but we
     805             :         // handle them here anyways.
     806          19 :         mode = AddDisplacementToAddressingMode(mode);
     807          19 :         inputs[input_count++] = ImmediateOperand(ImmediateOperand::INLINE, 4);
     808             :       } else {
     809             :         // In the case that the base address was zero, the displacement will be
     810             :         // in a register and replacing it with an immediate is not allowed. This
     811             :         // usually only happens in dead code anyway.
     812      349104 :         if (!inputs[input_count - 1].IsImmediate()) return false;
     813             :         int32_t displacement = g.GetImmediateIntegerValue(mleft.displacement());
     814             :         inputs[input_count - 1] =
     815      349088 :             ImmediateOperand(ImmediateOperand::INLINE, displacement + 4);
     816             :       }
     817      174563 :       InstructionOperand outputs[] = {g.DefineAsRegister(node)};
     818      174565 :       InstructionCode code = opcode | AddressingModeField::encode(mode);
     819      174565 :       selector->Emit(code, 1, outputs, input_count, inputs);
     820      174562 :       return true;
     821             :     }
     822             :   }
     823             :   return false;
     824             : }
     825             : 
     826             : }  // namespace
     827             : 
     828       49303 : void InstructionSelector::VisitWord64Shr(Node* node) {
     829       49303 :   if (TryMatchLoadWord64AndShiftRight(this, node, kX64Movl)) return;
     830       49302 :   VisitWord64Shift(this, node, kX64Shr);
     831             : }
     832             : 
     833       26205 : void InstructionSelector::VisitWord32Sar(Node* node) {
     834             :   X64OperandGenerator g(this);
     835       26205 :   Int32BinopMatcher m(node);
     836       46414 :   if (CanCover(m.node(), m.left().node()) && m.left().IsWord32Shl()) {
     837        1102 :     Int32BinopMatcher mleft(m.left().node());
     838        1580 :     if (mleft.right().Is(16) && m.right().Is(16)) {
     839         478 :       Emit(kX64Movsxwl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
     840         478 :       return;
     841        1231 :     } else if (mleft.right().Is(24) && m.right().Is(24)) {
     842         607 :       Emit(kX64Movsxbl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
     843         607 :       return;
     844             :     }
     845             :   }
     846       25120 :   VisitWord32Shift(this, node, kX64Sar32);
     847             : }
     848             : 
     849      193321 : void InstructionSelector::VisitWord64Sar(Node* node) {
     850      193321 :   if (TryMatchLoadWord64AndShiftRight(this, node, kX64Movsxlq)) return;
     851      178763 :   VisitWord64Shift(this, node, kX64Sar);
     852             : }
     853             : 
     854       27725 : void InstructionSelector::VisitWord32Ror(Node* node) {
     855       27725 :   VisitWord32Shift(this, node, kX64Ror32);
     856       27725 : }
     857             : 
     858         102 : void InstructionSelector::VisitWord64Ror(Node* node) {
     859         102 :   VisitWord64Shift(this, node, kX64Ror);
     860         101 : }
     861             : 
     862           0 : void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
     863             : 
     864           0 : void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
     865             : 
     866          12 : void InstructionSelector::VisitWord64ReverseBytes(Node* node) {
     867             :   X64OperandGenerator g(this);
     868          12 :   Emit(kX64Bswap, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)));
     869          12 : }
     870             : 
     871          44 : void InstructionSelector::VisitWord32ReverseBytes(Node* node) {
     872             :   X64OperandGenerator g(this);
     873          44 :   Emit(kX64Bswap32, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)));
     874          44 : }
     875             : 
     876      276259 : void InstructionSelector::VisitInt32Add(Node* node) {
     877             :   X64OperandGenerator g(this);
     878             : 
     879             :   // Try to match the Add to a leal pattern
     880      276259 :   BaseWithIndexAndDisplacement32Matcher m(node);
     881      552517 :   if (m.matches() &&
     882      231932 :       (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
     883             :     EmitLea(this, kX64Lea32, node, m.index(), m.scale(), m.base(),
     884      276260 :             m.displacement(), m.displacement_mode());
     885      276273 :     return;
     886             :   }
     887             : 
     888             :   // No leal pattern match, use addl
     889             :   VisitBinop(this, node, kX64Add32);
     890             : }
     891             : 
     892     1953051 : void InstructionSelector::VisitInt64Add(Node* node) {
     893             :   X64OperandGenerator g(this);
     894             : 
     895             :   // Try to match the Add to a leaq pattern
     896     1953051 :   BaseWithIndexAndDisplacement64Matcher m(node);
     897     3906241 :   if (m.matches() &&
     898     1657148 :       (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
     899             :     EmitLea(this, kX64Lea, node, m.index(), m.scale(), m.base(),
     900     1917250 :             m.displacement(), m.displacement_mode());
     901     1917409 :     return;
     902             :   }
     903             : 
     904             :   // No leal pattern match, use addq
     905             :   VisitBinop(this, node, kX64Add);
     906             : }
     907             : 
     908       26896 : void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
     909       26896 :   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
     910             :     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
     911       26896 :     return VisitBinop(this, node, kX64Add, &cont);
     912             :   }
     913             :   FlagsContinuation cont;
     914           0 :   VisitBinop(this, node, kX64Add, &cont);
     915             : }
     916             : 
     917       50440 : void InstructionSelector::VisitInt32Sub(Node* node) {
     918             :   X64OperandGenerator g(this);
     919             :   DCHECK_EQ(node->InputCount(), 2);
     920             :   Node* input1 = node->InputAt(0);
     921             :   Node* input2 = node->InputAt(1);
     922       53447 :   if (input1->opcode() == IrOpcode::kTruncateInt64ToInt32 &&
     923        3007 :       g.CanBeImmediate(input2)) {
     924             :     int32_t imm = g.GetImmediateIntegerValue(input2);
     925           8 :     InstructionOperand int64_input = g.UseRegister(input1->InputAt(0));
     926           8 :     if (imm == 0) {
     927             :       // Emit "movl" for subtraction of 0.
     928           8 :       Emit(kX64Movl, g.DefineAsRegister(node), int64_input);
     929             :     } else {
     930             :       // Omit truncation and turn subtractions of constant values into immediate
     931             :       // "leal" instructions by negating the value.
     932           0 :       Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI),
     933           0 :            g.DefineAsRegister(node), int64_input, g.TempImmediate(-imm));
     934             :     }
     935             :     return;
     936             :   }
     937             : 
     938       50432 :   Int32BinopMatcher m(node);
     939       50464 :   if (m.left().Is(0)) {
     940        6419 :     Emit(kX64Neg32, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
     941       44045 :   } else if (m.right().Is(0)) {
     942             :     // TODO(jarin): We should be able to use {EmitIdentity} here
     943             :     // (https://crbug.com/v8/7947).
     944         506 :     Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(m.left().node()));
     945       66230 :   } else if (m.right().HasValue() && g.CanBeImmediate(m.right().node())) {
     946             :     // Turn subtractions of constant values into immediate "leal" instructions
     947             :     // by negating the value.
     948       68081 :     Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI),
     949             :          g.DefineAsRegister(node), g.UseRegister(m.left().node()),
     950       22831 :          g.TempImmediate(base::NegateWithWraparound(m.right().Value())));
     951             :   } else {
     952             :     VisitBinop(this, node, kX64Sub32);
     953             :   }
     954             : }
     955             : 
     956       33196 : void InstructionSelector::VisitInt64Sub(Node* node) {
     957             :   X64OperandGenerator g(this);
     958       33196 :   Int64BinopMatcher m(node);
     959       33190 :   if (m.left().Is(0)) {
     960        8727 :     Emit(kX64Neg, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
     961             :   } else {
     962       27344 :     if (m.right().HasValue() && g.CanBeImmediate(m.right().node())) {
     963             :       // Turn subtractions of constant values into immediate "leaq" instructions
     964             :       // by negating the value.
     965        5732 :       Emit(kX64Lea | AddressingModeField::encode(kMode_MRI),
     966             :            g.DefineAsRegister(node), g.UseRegister(m.left().node()),
     967        5735 :            g.TempImmediate(-static_cast<int32_t>(m.right().Value())));
     968        2869 :       return;
     969             :     }
     970             :     VisitBinop(this, node, kX64Sub);
     971             :   }
     972             : }
     973             : 
     974       26896 : void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
     975       26896 :   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
     976             :     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
     977       26896 :     return VisitBinop(this, node, kX64Sub, &cont);
     978             :   }
     979             :   FlagsContinuation cont;
     980           0 :   VisitBinop(this, node, kX64Sub, &cont);
     981             : }
     982             : 
     983             : namespace {
     984             : 
     985       56007 : void VisitMul(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
     986             :   X64OperandGenerator g(selector);
     987       56007 :   Int32BinopMatcher m(node);
     988             :   Node* left = m.left().node();
     989             :   Node* right = m.right().node();
     990       56007 :   if (g.CanBeImmediate(right)) {
     991       49545 :     selector->Emit(opcode, g.DefineAsRegister(node), g.Use(left),
     992       49545 :                    g.UseImmediate(right));
     993             :   } else {
     994        6463 :     if (g.CanBeBetterLeftOperand(right)) {
     995             :       std::swap(left, right);
     996             :     }
     997        6463 :     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
     998        6463 :                    g.Use(right));
     999             :   }
    1000       56007 : }
    1001             : 
    1002        5841 : void VisitMulHigh(InstructionSelector* selector, Node* node,
    1003             :                   ArchOpcode opcode) {
    1004             :   X64OperandGenerator g(selector);
    1005             :   Node* left = node->InputAt(0);
    1006             :   Node* right = node->InputAt(1);
    1007        5841 :   if (selector->IsLive(left) && !selector->IsLive(right)) {
    1008             :     std::swap(left, right);
    1009             :   }
    1010        5841 :   InstructionOperand temps[] = {g.TempRegister(rax)};
    1011             :   // TODO(turbofan): We use UseUniqueRegister here to improve register
    1012             :   // allocation.
    1013        5841 :   selector->Emit(opcode, g.DefineAsFixed(node, rdx), g.UseFixed(left, rax),
    1014        5841 :                  g.UseUniqueRegister(right), arraysize(temps), temps);
    1015        5841 : }
    1016             : 
    1017       32962 : void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
    1018             :   X64OperandGenerator g(selector);
    1019       32962 :   InstructionOperand temps[] = {g.TempRegister(rdx)};
    1020       65925 :   selector->Emit(
    1021             :       opcode, g.DefineAsFixed(node, rax), g.UseFixed(node->InputAt(0), rax),
    1022       32963 :       g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
    1023       32963 : }
    1024             : 
    1025       32656 : void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
    1026             :   X64OperandGenerator g(selector);
    1027       32656 :   InstructionOperand temps[] = {g.TempRegister(rax)};
    1028       65315 :   selector->Emit(
    1029             :       opcode, g.DefineAsFixed(node, rdx), g.UseFixed(node->InputAt(0), rax),
    1030       32660 :       g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
    1031       32660 : }
    1032             : 
    1033             : }  // namespace
    1034             : 
    1035       57320 : void InstructionSelector::VisitInt32Mul(Node* node) {
    1036       57320 :   Int32ScaleMatcher m(node, true);
    1037       57320 :   if (m.matches()) {
    1038             :     Node* index = node->InputAt(0);
    1039       21868 :     Node* base = m.power_of_two_plus_one() ? index : nullptr;
    1040             :     EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr,
    1041       21868 :             kPositiveDisplacement);
    1042       21868 :     return;
    1043             :   }
    1044       35452 :   VisitMul(this, node, kX64Imul32);
    1045             : }
    1046             : 
    1047       13924 : void InstructionSelector::VisitInt32MulWithOverflow(Node* node) {
    1048             :   // TODO(mvstanton): Use Int32ScaleMatcher somehow.
    1049       13924 :   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
    1050             :     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
    1051       13924 :     return VisitBinop(this, node, kX64Imul32, &cont);
    1052             :   }
    1053             :   FlagsContinuation cont;
    1054           0 :   VisitBinop(this, node, kX64Imul32, &cont);
    1055             : }
    1056             : 
    1057       20556 : void InstructionSelector::VisitInt64Mul(Node* node) {
    1058       20556 :   VisitMul(this, node, kX64Imul);
    1059       20555 : }
    1060             : 
    1061        4198 : void InstructionSelector::VisitInt32MulHigh(Node* node) {
    1062        4198 :   VisitMulHigh(this, node, kX64ImulHigh32);
    1063        4198 : }
    1064             : 
    1065       15712 : void InstructionSelector::VisitInt32Div(Node* node) {
    1066       15712 :   VisitDiv(this, node, kX64Idiv32);
    1067       15712 : }
    1068             : 
    1069        1852 : void InstructionSelector::VisitInt64Div(Node* node) {
    1070        1852 :   VisitDiv(this, node, kX64Idiv);
    1071        1852 : }
    1072             : 
    1073       14503 : void InstructionSelector::VisitUint32Div(Node* node) {
    1074       14503 :   VisitDiv(this, node, kX64Udiv32);
    1075       14503 : }
    1076             : 
    1077         896 : void InstructionSelector::VisitUint64Div(Node* node) {
    1078         896 :   VisitDiv(this, node, kX64Udiv);
    1079         896 : }
    1080             : 
    1081       16290 : void InstructionSelector::VisitInt32Mod(Node* node) {
    1082       16290 :   VisitMod(this, node, kX64Idiv32);
    1083       16290 : }
    1084             : 
    1085         880 : void InstructionSelector::VisitInt64Mod(Node* node) {
    1086         880 :   VisitMod(this, node, kX64Idiv);
    1087         880 : }
    1088             : 
    1089       14618 : void InstructionSelector::VisitUint32Mod(Node* node) {
    1090       14618 :   VisitMod(this, node, kX64Udiv32);
    1091       14618 : }
    1092             : 
    1093         872 : void InstructionSelector::VisitUint64Mod(Node* node) {
    1094         872 :   VisitMod(this, node, kX64Udiv);
    1095         872 : }
    1096             : 
    1097        1643 : void InstructionSelector::VisitUint32MulHigh(Node* node) {
    1098        1643 :   VisitMulHigh(this, node, kX64UmulHigh32);
    1099        1643 : }
    1100             : 
    1101          52 : void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
    1102             :   X64OperandGenerator g(this);
    1103          52 :   InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
    1104         260 :   InstructionOperand outputs[2];
    1105             :   size_t output_count = 0;
    1106          52 :   outputs[output_count++] = g.DefineAsRegister(node);
    1107             : 
    1108          52 :   Node* success_output = NodeProperties::FindProjection(node, 1);
    1109          52 :   if (success_output) {
    1110          48 :     outputs[output_count++] = g.DefineAsRegister(success_output);
    1111             :   }
    1112             : 
    1113          52 :   Emit(kSSEFloat32ToInt64, output_count, outputs, 1, inputs);
    1114          52 : }
    1115             : 
    1116        1010 : void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
    1117             :   X64OperandGenerator g(this);
    1118        1010 :   InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
    1119        5075 :   InstructionOperand outputs[2];
    1120             :   size_t output_count = 0;
    1121        1015 :   outputs[output_count++] = g.DefineAsRegister(node);
    1122             : 
    1123        1018 :   Node* success_output = NodeProperties::FindProjection(node, 1);
    1124        1022 :   if (success_output) {
    1125        1018 :     outputs[output_count++] = g.DefineAsRegister(success_output);
    1126             :   }
    1127             : 
    1128        1021 :   Emit(kSSEFloat64ToInt64, output_count, outputs, 1, inputs);
    1129        1024 : }
    1130             : 
    1131          52 : void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
    1132             :   X64OperandGenerator g(this);
    1133          52 :   InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
    1134         260 :   InstructionOperand outputs[2];
    1135             :   size_t output_count = 0;
    1136          52 :   outputs[output_count++] = g.DefineAsRegister(node);
    1137             : 
    1138          52 :   Node* success_output = NodeProperties::FindProjection(node, 1);
    1139          52 :   if (success_output) {
    1140          48 :     outputs[output_count++] = g.DefineAsRegister(success_output);
    1141             :   }
    1142             : 
    1143          52 :   Emit(kSSEFloat32ToUint64, output_count, outputs, 1, inputs);
    1144          52 : }
    1145             : 
    1146          60 : void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
    1147             :   X64OperandGenerator g(this);
    1148          60 :   InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
    1149         300 :   InstructionOperand outputs[2];
    1150             :   size_t output_count = 0;
    1151          60 :   outputs[output_count++] = g.DefineAsRegister(node);
    1152             : 
    1153          60 :   Node* success_output = NodeProperties::FindProjection(node, 1);
    1154          60 :   if (success_output) {
    1155          56 :     outputs[output_count++] = g.DefineAsRegister(success_output);
    1156             :   }
    1157             : 
    1158          60 :   Emit(kSSEFloat64ToUint64, output_count, outputs, 1, inputs);
    1159          60 : }
    1160             : 
    1161      253573 : void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
    1162             :   X64OperandGenerator g(this);
    1163             :   Node* const value = node->InputAt(0);
    1164      253573 :   if (value->opcode() == IrOpcode::kLoad && CanCover(node, value)) {
    1165      168403 :     LoadRepresentation load_rep = LoadRepresentationOf(value->op());
    1166             :     MachineRepresentation rep = load_rep.representation();
    1167             :     InstructionCode opcode = kArchNop;
    1168      168403 :     switch (rep) {
    1169             :       case MachineRepresentation::kBit:  // Fall through.
    1170             :       case MachineRepresentation::kWord8:
    1171       27525 :         opcode = load_rep.IsSigned() ? kX64Movsxbq : kX64Movzxbq;
    1172             :         break;
    1173             :       case MachineRepresentation::kWord16:
    1174        9697 :         opcode = load_rep.IsSigned() ? kX64Movsxwq : kX64Movzxwq;
    1175             :         break;
    1176             :       case MachineRepresentation::kWord32:
    1177      131181 :         opcode = load_rep.IsSigned() ? kX64Movsxlq : kX64Movl;
    1178             :         break;
    1179             :       default:
    1180           0 :         UNREACHABLE();
    1181             :         return;
    1182             :     }
    1183      168403 :     InstructionOperand outputs[] = {g.DefineAsRegister(node)};
    1184      168403 :     size_t input_count = 0;
    1185     1178821 :     InstructionOperand inputs[3];
    1186             :     AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
    1187      168403 :         node->InputAt(0), inputs, &input_count);
    1188      168403 :     opcode |= AddressingModeField::encode(mode);
    1189      168403 :     Emit(opcode, 1, outputs, input_count, inputs);
    1190             :   } else {
    1191       85170 :     Emit(kX64Movsxlq, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
    1192             :   }
    1193      253574 : }
    1194             : 
    1195             : namespace {
    1196             : 
    1197      281881 : bool ZeroExtendsWord32ToWord64(Node* node) {
    1198      281881 :   switch (node->opcode()) {
    1199             :     case IrOpcode::kWord32And:
    1200             :     case IrOpcode::kWord32Or:
    1201             :     case IrOpcode::kWord32Xor:
    1202             :     case IrOpcode::kWord32Shl:
    1203             :     case IrOpcode::kWord32Shr:
    1204             :     case IrOpcode::kWord32Sar:
    1205             :     case IrOpcode::kWord32Ror:
    1206             :     case IrOpcode::kWord32Equal:
    1207             :     case IrOpcode::kInt32Add:
    1208             :     case IrOpcode::kInt32Sub:
    1209             :     case IrOpcode::kInt32Mul:
    1210             :     case IrOpcode::kInt32MulHigh:
    1211             :     case IrOpcode::kInt32Div:
    1212             :     case IrOpcode::kInt32LessThan:
    1213             :     case IrOpcode::kInt32LessThanOrEqual:
    1214             :     case IrOpcode::kInt32Mod:
    1215             :     case IrOpcode::kUint32Div:
    1216             :     case IrOpcode::kUint32LessThan:
    1217             :     case IrOpcode::kUint32LessThanOrEqual:
    1218             :     case IrOpcode::kUint32Mod:
    1219             :     case IrOpcode::kUint32MulHigh:
    1220             :     case IrOpcode::kTruncateInt64ToInt32:
    1221             :       // These 32-bit operations implicitly zero-extend to 64-bit on x64, so the
    1222             :       // zero-extension is a no-op.
    1223             :       return true;
    1224             :     case IrOpcode::kProjection: {
    1225             :       Node* const value = node->InputAt(0);
    1226             :       switch (value->opcode()) {
    1227             :         case IrOpcode::kInt32AddWithOverflow:
    1228             :         case IrOpcode::kInt32SubWithOverflow:
    1229             :         case IrOpcode::kInt32MulWithOverflow:
    1230             :           return true;
    1231             :         default:
    1232           0 :           return false;
    1233             :       }
    1234             :     }
    1235             :     case IrOpcode::kLoad:
    1236             :     case IrOpcode::kProtectedLoad:
    1237             :     case IrOpcode::kPoisonedLoad: {
    1238             :       // The movzxbl/movsxbl/movzxwl/movsxwl/movl operations implicitly
    1239             :       // zero-extend to 64-bit on x64, so the zero-extension is a no-op.
    1240      158671 :       LoadRepresentation load_rep = LoadRepresentationOf(node->op());
    1241      158670 :       switch (load_rep.representation()) {
    1242             :         case MachineRepresentation::kWord8:
    1243             :         case MachineRepresentation::kWord16:
    1244             :         case MachineRepresentation::kWord32:
    1245             :           return true;
    1246             :         default:
    1247         176 :           return false;
    1248             :       }
    1249             :     }
    1250             :     default:
    1251       34511 :       return false;
    1252             :   }
    1253             : }
    1254             : 
    1255             : }  // namespace
    1256             : 
    1257      281877 : void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
    1258             :   X64OperandGenerator g(this);
    1259             :   Node* value = node->InputAt(0);
    1260      281877 :   if (ZeroExtendsWord32ToWord64(value)) {
    1261             :     // These 32-bit operations implicitly zero-extend to 64-bit on x64, so the
    1262             :     // zero-extension is a no-op.
    1263      247198 :     return EmitIdentity(node);
    1264             :   }
    1265       34707 :   Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
    1266             : }
    1267             : 
    1268           0 : void InstructionSelector::VisitChangeTaggedToCompressed(Node* node) {
    1269             :   X64OperandGenerator g(this);
    1270             :   Node* value = node->InputAt(0);
    1271           0 :   Emit(kX64CompressAny, g.DefineAsRegister(node), g.Use(value));
    1272           0 : }
    1273             : 
    1274           0 : void InstructionSelector::VisitChangeTaggedPointerToCompressedPointer(
    1275             :     Node* node) {
    1276             :   X64OperandGenerator g(this);
    1277             :   Node* value = node->InputAt(0);
    1278           0 :   Emit(kX64CompressPointer, g.DefineAsRegister(node), g.Use(value));
    1279           0 : }
    1280             : 
    1281           0 : void InstructionSelector::VisitChangeTaggedSignedToCompressedSigned(
    1282             :     Node* node) {
    1283             :   X64OperandGenerator g(this);
    1284             :   Node* value = node->InputAt(0);
    1285           0 :   Emit(kX64CompressSigned, g.DefineAsRegister(node), g.Use(value));
    1286           0 : }
    1287             : 
    1288           0 : void InstructionSelector::VisitChangeCompressedToTagged(Node* node) {
    1289             :   X64OperandGenerator g(this);
    1290             :   Node* const value = node->InputAt(0);
    1291           0 :   Emit(kX64DecompressAny, g.DefineAsRegister(node), g.Use(value));
    1292           0 : }
    1293             : 
    1294           0 : void InstructionSelector::VisitChangeCompressedPointerToTaggedPointer(
    1295             :     Node* node) {
    1296             :   X64OperandGenerator g(this);
    1297             :   Node* const value = node->InputAt(0);
    1298           0 :   Emit(kX64DecompressPointer, g.DefineAsRegister(node), g.Use(value));
    1299           0 : }
    1300             : 
    1301           0 : void InstructionSelector::VisitChangeCompressedSignedToTaggedSigned(
    1302             :     Node* node) {
    1303             :   X64OperandGenerator g(this);
    1304             :   Node* const value = node->InputAt(0);
    1305           0 :   Emit(kX64DecompressSigned, g.DefineAsRegister(node), g.Use(value));
    1306           0 : }
    1307             : 
    1308             : namespace {
    1309             : 
    1310      774568 : void VisitRO(InstructionSelector* selector, Node* node,
    1311             :              InstructionCode opcode) {
    1312             :   X64OperandGenerator g(selector);
    1313      774568 :   selector->Emit(opcode, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
    1314      774654 : }
    1315             : 
    1316       98396 : void VisitRR(InstructionSelector* selector, Node* node,
    1317             :              InstructionCode opcode) {
    1318             :   X64OperandGenerator g(selector);
    1319       98396 :   selector->Emit(opcode, g.DefineAsRegister(node),
    1320       98397 :                  g.UseRegister(node->InputAt(0)));
    1321       98400 : }
    1322             : 
    1323         722 : void VisitRRO(InstructionSelector* selector, Node* node,
    1324             :               InstructionCode opcode) {
    1325             :   X64OperandGenerator g(selector);
    1326        1444 :   selector->Emit(opcode, g.DefineSameAsFirst(node),
    1327         721 :                  g.UseRegister(node->InputAt(0)), g.Use(node->InputAt(1)));
    1328         721 : }
    1329             : 
    1330      124963 : void VisitFloatBinop(InstructionSelector* selector, Node* node,
    1331             :                      ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
    1332             :   X64OperandGenerator g(selector);
    1333      124963 :   InstructionOperand operand0 = g.UseRegister(node->InputAt(0));
    1334      124964 :   InstructionOperand operand1 = g.Use(node->InputAt(1));
    1335      124969 :   if (selector->IsSupported(AVX)) {
    1336      124361 :     selector->Emit(avx_opcode, g.DefineAsRegister(node), operand0, operand1);
    1337             :   } else {
    1338         608 :     selector->Emit(sse_opcode, g.DefineSameAsFirst(node), operand0, operand1);
    1339             :   }
    1340      124956 : }
    1341             : 
    1342       10595 : void VisitFloatUnop(InstructionSelector* selector, Node* node, Node* input,
    1343             :                     ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
    1344             :   X64OperandGenerator g(selector);
    1345       10595 :   if (selector->IsSupported(AVX)) {
    1346       10511 :     selector->Emit(avx_opcode, g.DefineAsRegister(node), g.Use(input));
    1347             :   } else {
    1348          84 :     selector->Emit(sse_opcode, g.DefineSameAsFirst(node), g.UseRegister(input));
    1349             :   }
    1350       10597 : }
    1351             : 
    1352             : }  // namespace
    1353             : 
    1354             : #define RO_OP_LIST(V)                                                    \
    1355             :   V(Word64Clz, kX64Lzcnt)                                                \
    1356             :   V(Word32Clz, kX64Lzcnt32)                                              \
    1357             :   V(Word64Ctz, kX64Tzcnt)                                                \
    1358             :   V(Word32Ctz, kX64Tzcnt32)                                              \
    1359             :   V(Word64Popcnt, kX64Popcnt)                                            \
    1360             :   V(Word32Popcnt, kX64Popcnt32)                                          \
    1361             :   V(Float64Sqrt, kSSEFloat64Sqrt)                                        \
    1362             :   V(Float32Sqrt, kSSEFloat32Sqrt)                                        \
    1363             :   V(ChangeFloat64ToInt32, kSSEFloat64ToInt32)                            \
    1364             :   V(ChangeFloat64ToInt64, kSSEFloat64ToInt64)                            \
    1365             :   V(ChangeFloat64ToUint32, kSSEFloat64ToUint32 | MiscField::encode(1))   \
    1366             :   V(TruncateFloat64ToInt64, kSSEFloat64ToInt64)                          \
    1367             :   V(TruncateFloat64ToUint32, kSSEFloat64ToUint32 | MiscField::encode(0)) \
    1368             :   V(ChangeFloat64ToUint64, kSSEFloat64ToUint64)                          \
    1369             :   V(TruncateFloat64ToFloat32, kSSEFloat64ToFloat32)                      \
    1370             :   V(ChangeFloat32ToFloat64, kSSEFloat32ToFloat64)                        \
    1371             :   V(TruncateFloat32ToInt32, kSSEFloat32ToInt32)                          \
    1372             :   V(TruncateFloat32ToUint32, kSSEFloat32ToUint32)                        \
    1373             :   V(ChangeInt32ToFloat64, kSSEInt32ToFloat64)                            \
    1374             :   V(ChangeInt64ToFloat64, kSSEInt64ToFloat64)                            \
    1375             :   V(ChangeUint32ToFloat64, kSSEUint32ToFloat64)                          \
    1376             :   V(RoundFloat64ToInt32, kSSEFloat64ToInt32)                             \
    1377             :   V(RoundInt32ToFloat32, kSSEInt32ToFloat32)                             \
    1378             :   V(RoundInt64ToFloat32, kSSEInt64ToFloat32)                             \
    1379             :   V(RoundUint64ToFloat32, kSSEUint64ToFloat32)                           \
    1380             :   V(RoundInt64ToFloat64, kSSEInt64ToFloat64)                             \
    1381             :   V(RoundUint64ToFloat64, kSSEUint64ToFloat64)                           \
    1382             :   V(RoundUint32ToFloat32, kSSEUint32ToFloat32)                           \
    1383             :   V(BitcastFloat32ToInt32, kX64BitcastFI)                                \
    1384             :   V(BitcastFloat64ToInt64, kX64BitcastDL)                                \
    1385             :   V(BitcastInt32ToFloat32, kX64BitcastIF)                                \
    1386             :   V(BitcastInt64ToFloat64, kX64BitcastLD)                                \
    1387             :   V(Float64ExtractLowWord32, kSSEFloat64ExtractLowWord32)                \
    1388             :   V(Float64ExtractHighWord32, kSSEFloat64ExtractHighWord32)              \
    1389             :   V(SignExtendWord8ToInt32, kX64Movsxbl)                                 \
    1390             :   V(SignExtendWord16ToInt32, kX64Movsxwl)                                \
    1391             :   V(SignExtendWord8ToInt64, kX64Movsxbq)                                 \
    1392             :   V(SignExtendWord16ToInt64, kX64Movsxwq)                                \
    1393             :   V(SignExtendWord32ToInt64, kX64Movsxlq)
    1394             : 
    1395             : #define RR_OP_LIST(V)                                                         \
    1396             :   V(Float32RoundDown, kSSEFloat32Round | MiscField::encode(kRoundDown))       \
    1397             :   V(Float64RoundDown, kSSEFloat64Round | MiscField::encode(kRoundDown))       \
    1398             :   V(Float32RoundUp, kSSEFloat32Round | MiscField::encode(kRoundUp))           \
    1399             :   V(Float64RoundUp, kSSEFloat64Round | MiscField::encode(kRoundUp))           \
    1400             :   V(Float32RoundTruncate, kSSEFloat32Round | MiscField::encode(kRoundToZero)) \
    1401             :   V(Float64RoundTruncate, kSSEFloat64Round | MiscField::encode(kRoundToZero)) \
    1402             :   V(Float32RoundTiesEven,                                                     \
    1403             :     kSSEFloat32Round | MiscField::encode(kRoundToNearest))                    \
    1404             :   V(Float64RoundTiesEven, kSSEFloat64Round | MiscField::encode(kRoundToNearest))
    1405             : 
    1406             : #define RO_VISITOR(Name, opcode)                      \
    1407             :   void InstructionSelector::Visit##Name(Node* node) { \
    1408             :     VisitRO(this, node, opcode);                      \
    1409             :   }
    1410      774583 : RO_OP_LIST(RO_VISITOR)
    1411             : #undef RO_VISITOR
    1412             : #undef RO_OP_LIST
    1413             : 
    1414             : #define RR_VISITOR(Name, opcode)                      \
    1415             :   void InstructionSelector::Visit##Name(Node* node) { \
    1416             :     VisitRR(this, node, opcode);                      \
    1417             :   }
    1418       44922 : RR_OP_LIST(RR_VISITOR)
    1419             : #undef RR_VISITOR
    1420             : #undef RR_OP_LIST
    1421             : 
    1422       53474 : void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
    1423       53474 :   VisitRR(this, node, kArchTruncateDoubleToI);
    1424       53479 : }
    1425             : 
    1426      495541 : void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
    1427             :   // We rely on the fact that TruncateInt64ToInt32 zero extends the
    1428             :   // value (see ZeroExtendsWord32ToWord64). So all code paths here
    1429             :   // have to satisfy that condition.
    1430             :   X64OperandGenerator g(this);
    1431             :   Node* value = node->InputAt(0);
    1432      495541 :   if (CanCover(node, value)) {
    1433      474737 :     switch (value->opcode()) {
    1434             :       case IrOpcode::kWord64Sar:
    1435             :       case IrOpcode::kWord64Shr: {
    1436      470734 :         Int64BinopMatcher m(value);
    1437      470738 :         if (m.right().Is(32)) {
    1438      739331 :           if (CanCoverTransitively(node, value, value->InputAt(0)) &&
    1439      269313 :               TryMatchLoadWord64AndShiftRight(this, value, kX64Movl)) {
    1440      630027 :             return EmitIdentity(node);
    1441             :           }
    1442      620030 :           Emit(kX64Shr, g.DefineSameAsFirst(node),
    1443      310018 :                g.UseRegister(m.left().node()), g.TempImmediate(32));
    1444      310018 :           return;
    1445             :         }
    1446         722 :         break;
    1447             :       }
    1448             :       case IrOpcode::kLoad: {
    1449          20 :         if (TryMergeTruncateInt64ToInt32IntoLoad(this, node, value)) {
    1450             :           return;
    1451             :         }
    1452             :         break;
    1453             :       }
    1454             :       default:
    1455             :         break;
    1456             :     }
    1457             :   }
    1458       25519 :   Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
    1459             : }
    1460             : 
    1461        1761 : void InstructionSelector::VisitFloat32Add(Node* node) {
    1462        1761 :   VisitFloatBinop(this, node, kAVXFloat32Add, kSSEFloat32Add);
    1463        1761 : }
    1464             : 
    1465        2609 : void InstructionSelector::VisitFloat32Sub(Node* node) {
    1466        2609 :   VisitFloatBinop(this, node, kAVXFloat32Sub, kSSEFloat32Sub);
    1467        2609 : }
    1468             : 
    1469         889 : void InstructionSelector::VisitFloat32Mul(Node* node) {
    1470         889 :   VisitFloatBinop(this, node, kAVXFloat32Mul, kSSEFloat32Mul);
    1471         890 : }
    1472             : 
    1473         385 : void InstructionSelector::VisitFloat32Div(Node* node) {
    1474         385 :   VisitFloatBinop(this, node, kAVXFloat32Div, kSSEFloat32Div);
    1475         385 : }
    1476             : 
    1477          68 : void InstructionSelector::VisitFloat32Abs(Node* node) {
    1478          68 :   VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat32Abs, kSSEFloat32Abs);
    1479          68 : }
    1480             : 
    1481          66 : void InstructionSelector::VisitFloat32Max(Node* node) {
    1482          66 :   VisitRRO(this, node, kSSEFloat32Max);
    1483          66 : }
    1484             : 
    1485          66 : void InstructionSelector::VisitFloat32Min(Node* node) {
    1486          66 :   VisitRRO(this, node, kSSEFloat32Min);
    1487          66 : }
    1488             : 
    1489       79583 : void InstructionSelector::VisitFloat64Add(Node* node) {
    1490       79583 :   VisitFloatBinop(this, node, kAVXFloat64Add, kSSEFloat64Add);
    1491       79586 : }
    1492             : 
    1493       15820 : void InstructionSelector::VisitFloat64Sub(Node* node) {
    1494       15820 :   VisitFloatBinop(this, node, kAVXFloat64Sub, kSSEFloat64Sub);
    1495       15820 : }
    1496             : 
    1497       11904 : void InstructionSelector::VisitFloat64Mul(Node* node) {
    1498       11904 :   VisitFloatBinop(this, node, kAVXFloat64Mul, kSSEFloat64Mul);
    1499       11904 : }
    1500             : 
    1501       12023 : void InstructionSelector::VisitFloat64Div(Node* node) {
    1502       12023 :   VisitFloatBinop(this, node, kAVXFloat64Div, kSSEFloat64Div);
    1503       12023 : }
    1504             : 
    1505        1576 : void InstructionSelector::VisitFloat64Mod(Node* node) {
    1506             :   X64OperandGenerator g(this);
    1507        1576 :   InstructionOperand temps[] = {g.TempRegister(rax)};
    1508        3152 :   Emit(kSSEFloat64Mod, g.DefineSameAsFirst(node),
    1509             :        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)), 1,
    1510        1576 :        temps);
    1511        1576 : }
    1512             : 
    1513         252 : void InstructionSelector::VisitFloat64Max(Node* node) {
    1514         252 :   VisitRRO(this, node, kSSEFloat64Max);
    1515         252 : }
    1516             : 
    1517         339 : void InstructionSelector::VisitFloat64Min(Node* node) {
    1518         339 :   VisitRRO(this, node, kSSEFloat64Min);
    1519         339 : }
    1520             : 
    1521         629 : void InstructionSelector::VisitFloat64Abs(Node* node) {
    1522         629 :   VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat64Abs, kSSEFloat64Abs);
    1523         629 : }
    1524             : 
    1525           0 : void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
    1526           0 :   UNREACHABLE();
    1527             : }
    1528             : 
    1529         176 : void InstructionSelector::VisitFloat32Neg(Node* node) {
    1530         176 :   VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat32Neg, kSSEFloat32Neg);
    1531         175 : }
    1532             : 
    1533        9726 : void InstructionSelector::VisitFloat64Neg(Node* node) {
    1534        9726 :   VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat64Neg, kSSEFloat64Neg);
    1535        9729 : }
    1536             : 
    1537         463 : void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
    1538             :                                                    InstructionCode opcode) {
    1539             :   X64OperandGenerator g(this);
    1540             :   Emit(opcode, g.DefineAsFixed(node, xmm0), g.UseFixed(node->InputAt(0), xmm0),
    1541             :        g.UseFixed(node->InputAt(1), xmm1))
    1542         926 :       ->MarkAsCall();
    1543         463 : }
    1544             : 
    1545        2775 : void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
    1546             :                                                   InstructionCode opcode) {
    1547             :   X64OperandGenerator g(this);
    1548             :   Emit(opcode, g.DefineAsFixed(node, xmm0), g.UseFixed(node->InputAt(0), xmm0))
    1549        2775 :       ->MarkAsCall();
    1550        2775 : }
    1551             : 
    1552     6063455 : void InstructionSelector::EmitPrepareArguments(
    1553             :     ZoneVector<PushParameter>* arguments, const CallDescriptor* call_descriptor,
    1554             :     Node* node) {
    1555             :   X64OperandGenerator g(this);
    1556             : 
    1557             :   // Prepare for C function call.
    1558     6063455 :   if (call_descriptor->IsCFunctionCall()) {
    1559       25882 :     Emit(kArchPrepareCallCFunction | MiscField::encode(static_cast<int>(
    1560             :                                          call_descriptor->ParameterCount())),
    1561       25882 :          0, nullptr, 0, nullptr);
    1562             : 
    1563             :     // Poke any stack arguments.
    1564       32666 :     for (size_t n = 0; n < arguments->size(); ++n) {
    1565        3392 :       PushParameter input = (*arguments)[n];
    1566        3392 :       if (input.node) {
    1567             :         int slot = static_cast<int>(n);
    1568        3392 :         InstructionOperand value = g.CanBeImmediate(input.node)
    1569             :                                        ? g.UseImmediate(input.node)
    1570        3392 :                                        : g.UseRegister(input.node);
    1571        3392 :         Emit(kX64Poke | MiscField::encode(slot), g.NoOutput(), value);
    1572             :       }
    1573             :     }
    1574             :   } else {
    1575             :     // Push any stack arguments.
    1576     6037573 :     int effect_level = GetEffectLevel(node);
    1577     9581434 :     for (PushParameter input : base::Reversed(*arguments)) {
    1578             :       // Skip any alignment holes in pushed nodes. We may have one in case of a
    1579             :       // Simd128 stack argument.
    1580     3543845 :       if (input.node == nullptr) continue;
    1581     3543465 :       if (g.CanBeImmediate(input.node)) {
    1582      523831 :         Emit(kX64Push, g.NoOutput(), g.UseImmediate(input.node));
    1583     6039266 :       } else if (IsSupported(ATOM) ||
    1584     3019633 :                  sequence()->IsFP(GetVirtualRegister(input.node))) {
    1585             :         // TODO(titzer): X64Push cannot handle stack->stack double moves
    1586             :         // because there is no way to encode fixed double slots.
    1587           0 :         Emit(kX64Push, g.NoOutput(), g.UseRegister(input.node));
    1588     3019633 :       } else if (g.CanBeMemoryOperand(kX64Push, node, input.node,
    1589             :                                       effect_level)) {
    1590       56091 :         InstructionOperand outputs[1];
    1591      168273 :         InstructionOperand inputs[4];
    1592       18697 :         size_t input_count = 0;
    1593             :         InstructionCode opcode = kX64Push;
    1594             :         AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
    1595       18697 :             input.node, inputs, &input_count);
    1596       18697 :         opcode |= AddressingModeField::encode(mode);
    1597       18697 :         Emit(opcode, 0, outputs, input_count, inputs);
    1598             :       } else {
    1599     3000932 :         Emit(kX64Push, g.NoOutput(), g.UseAny(input.node));
    1600             :       }
    1601             :     }
    1602             :   }
    1603     6063471 : }
    1604             : 
    1605     6063322 : void InstructionSelector::EmitPrepareResults(
    1606             :     ZoneVector<PushParameter>* results, const CallDescriptor* call_descriptor,
    1607             :     Node* node) {
    1608             :   X64OperandGenerator g(this);
    1609             : 
    1610             :   int reverse_slot = 0;
    1611    12008832 :   for (PushParameter output : *results) {
    1612    11885831 :     if (!output.location.IsCallerFrameSlot()) continue;
    1613       14699 :     reverse_slot += output.location.GetSizeInPointers();
    1614             :     // Skip any alignment holes in nodes.
    1615       14699 :     if (output.node == nullptr) continue;
    1616             :     DCHECK(!call_descriptor->IsCFunctionCall());
    1617        5067 :     if (output.location.GetType() == MachineType::Float32()) {
    1618             :       MarkAsFloat32(output.node);
    1619        3803 :     } else if (output.location.GetType() == MachineType::Float64()) {
    1620             :       MarkAsFloat64(output.node);
    1621             :     }
    1622        5067 :     InstructionOperand result = g.DefineAsRegister(output.node);
    1623        5067 :     InstructionOperand slot = g.UseImmediate(reverse_slot);
    1624        5067 :     Emit(kX64Peek, 1, &result, 1, &slot);
    1625             :   }
    1626     6063383 : }
    1627             : 
    1628      119826 : bool InstructionSelector::IsTailCallAddressImmediate() { return true; }
    1629             : 
    1630        1288 : int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
    1631             : 
    1632             : namespace {
    1633             : 
    1634     1083819 : void VisitCompareWithMemoryOperand(InstructionSelector* selector,
    1635             :                                    InstructionCode opcode, Node* left,
    1636             :                                    InstructionOperand right,
    1637             :                                    FlagsContinuation* cont) {
    1638             :   DCHECK_EQ(IrOpcode::kLoad, left->opcode());
    1639             :   X64OperandGenerator g(selector);
    1640     1083819 :   size_t input_count = 0;
    1641     9754275 :   InstructionOperand inputs[4];
    1642             :   AddressingMode addressing_mode =
    1643     1083819 :       g.GetEffectiveAddressMemoryOperand(left, inputs, &input_count);
    1644     1083884 :   opcode |= AddressingModeField::encode(addressing_mode);
    1645     1083884 :   inputs[input_count++] = right;
    1646             : 
    1647     1083884 :   selector->EmitWithContinuation(opcode, 0, nullptr, input_count, inputs, cont);
    1648     1083977 : }
    1649             : 
    1650             : // Shared routine for multiple compare operations.
    1651             : void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
    1652             :                   InstructionOperand left, InstructionOperand right,
    1653             :                   FlagsContinuation* cont) {
    1654     4255379 :   selector->EmitWithContinuation(opcode, left, right, cont);
    1655             : }
    1656             : 
    1657             : // Shared routine for multiple compare operations.
    1658      885427 : void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
    1659             :                   Node* left, Node* right, FlagsContinuation* cont,
    1660             :                   bool commutative) {
    1661             :   X64OperandGenerator g(selector);
    1662     1184133 :   if (commutative && g.CanBeBetterLeftOperand(right)) {
    1663             :     std::swap(left, right);
    1664             :   }
    1665      885439 :   VisitCompare(selector, opcode, g.UseRegister(left), g.Use(right), cont);
    1666      885511 : }
    1667             : 
    1668     6791126 : MachineType MachineTypeForNarrow(Node* node, Node* hint_node) {
    1669     6791126 :   if (hint_node->opcode() == IrOpcode::kLoad) {
    1670     1380509 :     MachineType hint = LoadRepresentationOf(hint_node->op());
    1671     1380507 :     if (node->opcode() == IrOpcode::kInt32Constant ||
    1672             :         node->opcode() == IrOpcode::kInt64Constant) {
    1673             :       int64_t constant = node->opcode() == IrOpcode::kInt32Constant
    1674      640862 :                              ? OpParameter<int32_t>(node->op())
    1675     1350139 :                              : OpParameter<int64_t>(node->op());
    1676      709277 :       if (hint == MachineType::Int8()) {
    1677           0 :         if (constant >= std::numeric_limits<int8_t>::min() &&
    1678             :             constant <= std::numeric_limits<int8_t>::max()) {
    1679           0 :           return hint;
    1680             :         }
    1681      709277 :       } else if (hint == MachineType::Uint8()) {
    1682      111221 :         if (constant >= std::numeric_limits<uint8_t>::min() &&
    1683             :             constant <= std::numeric_limits<uint8_t>::max()) {
    1684      107189 :           return hint;
    1685             :         }
    1686      598056 :       } else if (hint == MachineType::Int16()) {
    1687         112 :         if (constant >= std::numeric_limits<int16_t>::min() &&
    1688             :             constant <= std::numeric_limits<int16_t>::max()) {
    1689         112 :           return hint;
    1690             :         }
    1691      597944 :       } else if (hint == MachineType::Uint16()) {
    1692      401305 :         if (constant >= std::numeric_limits<uint16_t>::min() &&
    1693             :             constant <= std::numeric_limits<uint16_t>::max()) {
    1694      399349 :           return hint;
    1695             :         }
    1696      196639 :       } else if (hint == MachineType::Int32()) {
    1697       23186 :         return hint;
    1698      173453 :       } else if (hint == MachineType::Uint32()) {
    1699      103962 :         if (constant >= 0) return hint;
    1700             :       }
    1701             :     }
    1702             :   }
    1703             :   return node->opcode() == IrOpcode::kLoad ? LoadRepresentationOf(node->op())
    1704     6163270 :                                            : MachineType::None();
    1705             : }
    1706             : 
    1707             : // Tries to match the size of the given opcode to that of the operands, if
    1708             : // possible.
    1709     3395538 : InstructionCode TryNarrowOpcodeSize(InstructionCode opcode, Node* left,
    1710             :                                     Node* right, FlagsContinuation* cont) {
    1711             :   // TODO(epertoso): we can probably get some size information out phi nodes.
    1712             :   // If the load representations don't match, both operands will be
    1713             :   // zero/sign-extended to 32bit.
    1714     3395538 :   MachineType left_type = MachineTypeForNarrow(left, right);
    1715     3395567 :   MachineType right_type = MachineTypeForNarrow(right, left);
    1716     3395605 :   if (left_type == right_type) {
    1717     2750033 :     switch (left_type.representation()) {
    1718             :       case MachineRepresentation::kBit:
    1719             :       case MachineRepresentation::kWord8: {
    1720      109420 :         if (opcode == kX64Test32) return kX64Test8;
    1721        3765 :         if (opcode == kX64Cmp32) {
    1722        3765 :           if (left_type.semantic() == MachineSemantic::kUint32) {
    1723             :             cont->OverwriteUnsignedIfSigned();
    1724             :           } else {
    1725         112 :             CHECK_EQ(MachineSemantic::kInt32, left_type.semantic());
    1726             :           }
    1727             :           return kX64Cmp8;
    1728             :         }
    1729             :         break;
    1730             :       }
    1731             :       case MachineRepresentation::kWord16:
    1732      401524 :         if (opcode == kX64Test32) return kX64Test16;
    1733      378362 :         if (opcode == kX64Cmp32) {
    1734      378362 :           if (left_type.semantic() == MachineSemantic::kUint32) {
    1735             :             cont->OverwriteUnsignedIfSigned();
    1736             :           } else {
    1737         112 :             CHECK_EQ(MachineSemantic::kInt32, left_type.semantic());
    1738             :           }
    1739             :           return kX64Cmp16;
    1740             :         }
    1741             :         break;
    1742             : #ifdef V8_COMPRESS_POINTERS
    1743             :       case MachineRepresentation::kTaggedSigned:
    1744             :       case MachineRepresentation::kTaggedPointer:
    1745             :       case MachineRepresentation::kTagged:
    1746             :         // When pointer compression is enabled the lower 32-bits uniquely
    1747             :         // identify tagged value.
    1748             :         if (opcode == kX64Cmp) return kX64Cmp32;
    1749             :         break;
    1750             : #endif
    1751             :       default:
    1752             :         break;
    1753             :     }
    1754             :   }
    1755             :   return opcode;
    1756             : }
    1757             : 
    1758             : // Shared routine for multiple word compare operations.
    1759     3395536 : void VisitWordCompare(InstructionSelector* selector, Node* node,
    1760             :                       InstructionCode opcode, FlagsContinuation* cont) {
    1761             :   X64OperandGenerator g(selector);
    1762             :   Node* left = node->InputAt(0);
    1763             :   Node* right = node->InputAt(1);
    1764             : 
    1765             :   // The 32-bit comparisons automatically truncate Word64
    1766             :   // values to Word32 range, no need to do that explicitly.
    1767     3395536 :   if (opcode == kX64Cmp32 || opcode == kX64Test32) {
    1768     1453217 :     if (left->opcode() == IrOpcode::kTruncateInt64ToInt32 &&
    1769       34802 :         selector->CanCover(node, left)) {
    1770             :       left = left->InputAt(0);
    1771             :     }
    1772             : 
    1773     1456456 :     if (right->opcode() == IrOpcode::kTruncateInt64ToInt32 &&
    1774       38041 :         selector->CanCover(node, right)) {
    1775             :       right = right->InputAt(0);
    1776             :     }
    1777             :   }
    1778             : 
    1779     3395536 :   opcode = TryNarrowOpcodeSize(opcode, left, right, cont);
    1780             : 
    1781             :   // If one of the two inputs is an immediate, make sure it's on the right, or
    1782             :   // if one of the two inputs is a memory operand, make sure it's on the left.
    1783     3395607 :   int effect_level = selector->GetEffectLevel(node);
    1784     3395628 :   if (cont->IsBranch()) {
    1785             :     effect_level = selector->GetEffectLevel(
    1786     2914740 :         cont->true_block()->PredecessorAt(0)->control_input());
    1787             :   }
    1788             : 
    1789     9925286 :   if ((!g.CanBeImmediate(right) && g.CanBeImmediate(left)) ||
    1790     3509736 :       (g.CanBeMemoryOperand(opcode, node, right, effect_level) &&
    1791      375717 :        !g.CanBeMemoryOperand(opcode, node, left, effect_level))) {
    1792      620600 :     if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
    1793             :     std::swap(left, right);
    1794             :   }
    1795             : 
    1796             :   // Match immediates on right side of comparison.
    1797     3395565 :   if (g.CanBeImmediate(right)) {
    1798     2271886 :     if (g.CanBeMemoryOperand(opcode, node, left, effect_level)) {
    1799      433874 :       return VisitCompareWithMemoryOperand(selector, opcode, left,
    1800      433874 :                                            g.UseImmediate(right), cont);
    1801             :     }
    1802     1838031 :     return VisitCompare(selector, opcode, g.Use(left), g.UseImmediate(right),
    1803             :                         cont);
    1804             :   }
    1805             : 
    1806             :   // Match memory operands on left side of comparison.
    1807     1123695 :   if (g.CanBeMemoryOperand(opcode, node, left, effect_level)) {
    1808      503184 :     return VisitCompareWithMemoryOperand(selector, opcode, left,
    1809      503185 :                                          g.UseRegister(right), cont);
    1810             :   }
    1811             : 
    1812      620512 :   return VisitCompare(selector, opcode, left, right, cont,
    1813      620512 :                       node->op()->HasProperty(Operator::kCommutative));
    1814             : }
    1815             : 
    1816             : // Shared routine for 64-bit word comparison operations.
    1817     3011430 : void VisitWord64Compare(InstructionSelector* selector, Node* node,
    1818             :                         FlagsContinuation* cont) {
    1819             :   X64OperandGenerator g(selector);
    1820     3011430 :   if (selector->CanUseRootsRegister()) {
    1821             :     const RootsTable& roots_table = selector->isolate()->roots_table();
    1822             :     RootIndex root_index;
    1823     2699718 :     HeapObjectBinopMatcher m(node);
    1824     3789235 :     if (m.right().HasValue() &&
    1825             :         roots_table.IsRootHandle(m.right().Value(), &root_index)) {
    1826     1012240 :       if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
    1827             :       InstructionCode opcode =
    1828             :           kX64Cmp | AddressingModeField::encode(kMode_Root);
    1829     1012240 :       return VisitCompare(
    1830             :           selector, opcode,
    1831             :           g.TempImmediate(
    1832             :               TurboAssemblerBase::RootRegisterOffsetForRootIndex(root_index)),
    1833     1012110 :           g.UseRegister(m.left().node()), cont);
    1834     1687487 :     } else if (m.left().HasValue() &&
    1835             :                roots_table.IsRootHandle(m.left().Value(), &root_index)) {
    1836             :       InstructionCode opcode =
    1837             :           kX64Cmp | AddressingModeField::encode(kMode_Root);
    1838           0 :       return VisitCompare(
    1839             :           selector, opcode,
    1840             :           g.TempImmediate(
    1841             :               TurboAssemblerBase::RootRegisterOffsetForRootIndex(root_index)),
    1842             :           g.UseRegister(m.right().node()), cont);
    1843             :     }
    1844             :   }
    1845     1999296 :   if (selector->isolate() != nullptr) {
    1846             :     StackCheckMatcher<Int64BinopMatcher, IrOpcode::kUint64LessThan> m(
    1847             :         selector->isolate(), node);
    1848     1775007 :     if (m.Matched()) {
    1849             :       // Compare(Load(js_stack_limit), LoadStackPointer)
    1850      562862 :       if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
    1851             :       InstructionCode opcode = cont->Encode(kX64StackCheck);
    1852      562863 :       CHECK(cont->IsBranch());
    1853      562863 :       selector->EmitWithContinuation(opcode, cont);
    1854      562866 :       return;
    1855             :     }
    1856             :   }
    1857             :   WasmStackCheckMatcher<Int64BinopMatcher, IrOpcode::kUint64LessThan> wasm_m(
    1858             :       node);
    1859     1436431 :   if (wasm_m.Matched()) {
    1860             :     // This is a wasm stack check. By structure, we know that we can use the
    1861             :     // stack pointer directly, as wasm code does not modify the stack at points
    1862             :     // where stack checks are performed.
    1863             :     Node* left = node->InputAt(0);
    1864             :     LocationOperand rsp(InstructionOperand::EXPLICIT, LocationOperand::REGISTER,
    1865             :                         InstructionSequence::DefaultRepresentation(),
    1866             :                         RegisterCode::kRegCode_rsp);
    1867      129510 :     return VisitCompareWithMemoryOperand(selector, kX64Cmp, left, rsp, cont);
    1868             :   }
    1869     1306931 :   VisitWordCompare(selector, node, kX64Cmp, cont);
    1870             : }
    1871             : 
    1872             : // Shared routine for comparison with zero.
    1873      672066 : void VisitCompareZero(InstructionSelector* selector, Node* user, Node* node,
    1874             :                       InstructionCode opcode, FlagsContinuation* cont) {
    1875             :   X64OperandGenerator g(selector);
    1876     1344132 :   if (cont->IsBranch() &&
    1877      215593 :       (cont->condition() == kNotEqual || cont->condition() == kEqual)) {
    1878      530056 :     switch (node->opcode()) {
    1879             : #define FLAGS_SET_BINOP_LIST(V)        \
    1880             :   V(kInt32Add, VisitBinop, kX64Add32)  \
    1881             :   V(kInt32Sub, VisitBinop, kX64Sub32)  \
    1882             :   V(kWord32And, VisitBinop, kX64And32) \
    1883             :   V(kWord32Or, VisitBinop, kX64Or32)   \
    1884             :   V(kInt64Add, VisitBinop, kX64Add)    \
    1885             :   V(kInt64Sub, VisitBinop, kX64Sub)    \
    1886             :   V(kWord64And, VisitBinop, kX64And)   \
    1887             :   V(kWord64Or, VisitBinop, kX64Or)
    1888             : #define FLAGS_SET_BINOP(opcode, Visit, archOpcode)           \
    1889             :   case IrOpcode::opcode:                                     \
    1890             :     if (selector->IsOnlyUserOfNodeInSameBlock(user, node)) { \
    1891             :       return Visit(selector, node, archOpcode, cont);        \
    1892             :     }                                                        \
    1893             :     break;
    1894      142338 :       FLAGS_SET_BINOP_LIST(FLAGS_SET_BINOP)
    1895             : #undef FLAGS_SET_BINOP_LIST
    1896             : #undef FLAGS_SET_BINOP
    1897             : 
    1898             : #define TRY_VISIT_WORD32_SHIFT TryVisitWordShift<Int32BinopMatcher, 32>
    1899             : #define TRY_VISIT_WORD64_SHIFT TryVisitWordShift<Int64BinopMatcher, 64>
    1900             : // Skip Word64Sar/Word32Sar since no instruction reduction in most cases.
    1901             : #define FLAGS_SET_SHIFT_LIST(V)                    \
    1902             :   V(kWord32Shl, TRY_VISIT_WORD32_SHIFT, kX64Shl32) \
    1903             :   V(kWord32Shr, TRY_VISIT_WORD32_SHIFT, kX64Shr32) \
    1904             :   V(kWord64Shl, TRY_VISIT_WORD64_SHIFT, kX64Shl)   \
    1905             :   V(kWord64Shr, TRY_VISIT_WORD64_SHIFT, kX64Shr)
    1906             : #define FLAGS_SET_SHIFT(opcode, TryVisit, archOpcode)         \
    1907             :   case IrOpcode::opcode:                                      \
    1908             :     if (selector->IsOnlyUserOfNodeInSameBlock(user, node)) {  \
    1909             :       if (TryVisit(selector, node, archOpcode, cont)) return; \
    1910             :     }                                                         \
    1911             :     break;
    1912         131 :       FLAGS_SET_SHIFT_LIST(FLAGS_SET_SHIFT)
    1913             : #undef TRY_VISIT_WORD32_SHIFT
    1914             : #undef TRY_VISIT_WORD64_SHIFT
    1915             : #undef FLAGS_SET_SHIFT_LIST
    1916             : #undef FLAGS_SET_SHIFT
    1917             :       default:
    1918             :         break;
    1919             :     }
    1920             :   }
    1921      536847 :   int effect_level = selector->GetEffectLevel(node);
    1922      536828 :   if (cont->IsBranch()) {
    1923             :     effect_level = selector->GetEffectLevel(
    1924      394841 :         cont->true_block()->PredecessorAt(0)->control_input());
    1925             :   }
    1926      536860 :   if (node->opcode() == IrOpcode::kLoad) {
    1927       31341 :     switch (LoadRepresentationOf(node->op()).representation()) {
    1928             :       case MachineRepresentation::kWord8:
    1929        6852 :         if (opcode == kX64Cmp32) {
    1930             :           opcode = kX64Cmp8;
    1931           0 :         } else if (opcode == kX64Test32) {
    1932             :           opcode = kX64Test8;
    1933             :         }
    1934             :         break;
    1935             :       case MachineRepresentation::kWord16:
    1936         833 :         if (opcode == kX64Cmp32) {
    1937             :           opcode = kX64Cmp16;
    1938           0 :         } else if (opcode == kX64Test32) {
    1939             :           opcode = kX64Test16;
    1940             :         }
    1941             :         break;
    1942             :       default:
    1943             :         break;
    1944             :     }
    1945             :   }
    1946      536860 :   if (g.CanBeMemoryOperand(opcode, user, node, effect_level)) {
    1947       17278 :     VisitCompareWithMemoryOperand(selector, opcode, node, g.TempImmediate(0),
    1948       17278 :                                   cont);
    1949             :   } else {
    1950      519592 :     VisitCompare(selector, opcode, g.Use(node), g.TempImmediate(0), cont);
    1951             :   }
    1952             : }
    1953             : 
    1954             : // Shared routine for multiple float32 compare operations (inputs commuted).
    1955        1542 : void VisitFloat32Compare(InstructionSelector* selector, Node* node,
    1956             :                          FlagsContinuation* cont) {
    1957             :   Node* const left = node->InputAt(0);
    1958             :   Node* const right = node->InputAt(1);
    1959             :   InstructionCode const opcode =
    1960        1542 :       selector->IsSupported(AVX) ? kAVXFloat32Cmp : kSSEFloat32Cmp;
    1961        1542 :   VisitCompare(selector, opcode, right, left, cont, false);
    1962        1545 : }
    1963             : 
    1964             : // Shared routine for multiple float64 compare operations (inputs commuted).
    1965      205129 : void VisitFloat64Compare(InstructionSelector* selector, Node* node,
    1966             :                          FlagsContinuation* cont) {
    1967             :   Node* const left = node->InputAt(0);
    1968             :   Node* const right = node->InputAt(1);
    1969             :   InstructionCode const opcode =
    1970      205129 :       selector->IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
    1971      205129 :   VisitCompare(selector, opcode, right, left, cont, false);
    1972      205132 : }
    1973             : 
    1974             : // Shared routine for Word32/Word64 Atomic Binops
    1975       24866 : void VisitAtomicBinop(InstructionSelector* selector, Node* node,
    1976             :                       ArchOpcode opcode) {
    1977             :   X64OperandGenerator g(selector);
    1978             :   Node* base = node->InputAt(0);
    1979             :   Node* index = node->InputAt(1);
    1980             :   Node* value = node->InputAt(2);
    1981             :   AddressingMode addressing_mode;
    1982             :   InstructionOperand inputs[] = {
    1983             :       g.UseUniqueRegister(value), g.UseUniqueRegister(base),
    1984       24866 :       g.GetEffectiveIndexOperand(index, &addressing_mode)};
    1985       24866 :   InstructionOperand outputs[] = {g.DefineAsFixed(node, rax)};
    1986       24892 :   InstructionOperand temps[] = {g.TempRegister()};
    1987       49784 :   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
    1988             :   selector->Emit(code, arraysize(outputs), outputs, arraysize(inputs), inputs,
    1989       24892 :                  arraysize(temps), temps);
    1990       24992 : }
    1991             : 
    1992             : // Shared routine for Word32/Word64 Atomic CmpExchg
    1993        1109 : void VisitAtomicCompareExchange(InstructionSelector* selector, Node* node,
    1994             :                                 ArchOpcode opcode) {
    1995             :   X64OperandGenerator g(selector);
    1996             :   Node* base = node->InputAt(0);
    1997             :   Node* index = node->InputAt(1);
    1998             :   Node* old_value = node->InputAt(2);
    1999             :   Node* new_value = node->InputAt(3);
    2000             :   AddressingMode addressing_mode;
    2001             :   InstructionOperand inputs[] = {
    2002             :       g.UseFixed(old_value, rax), g.UseUniqueRegister(new_value),
    2003             :       g.UseUniqueRegister(base),
    2004        1109 :       g.GetEffectiveIndexOperand(index, &addressing_mode)};
    2005        1109 :   InstructionOperand outputs[] = {g.DefineAsFixed(node, rax)};
    2006        2218 :   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
    2007        1109 :   selector->Emit(code, arraysize(outputs), outputs, arraysize(inputs), inputs);
    2008        1109 : }
    2009             : 
    2010             : // Shared routine for Word32/Word64 Atomic Exchange
    2011        8766 : void VisitAtomicExchange(InstructionSelector* selector, Node* node,
    2012             :                          ArchOpcode opcode) {
    2013             :   X64OperandGenerator g(selector);
    2014             :   Node* base = node->InputAt(0);
    2015             :   Node* index = node->InputAt(1);
    2016             :   Node* value = node->InputAt(2);
    2017             :   AddressingMode addressing_mode;
    2018             :   InstructionOperand inputs[] = {
    2019             :       g.UseUniqueRegister(value), g.UseUniqueRegister(base),
    2020        8766 :       g.GetEffectiveIndexOperand(index, &addressing_mode)};
    2021        8758 :   InstructionOperand outputs[] = {g.DefineSameAsFirst(node)};
    2022       17528 :   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
    2023        8764 :   selector->Emit(code, arraysize(outputs), outputs, arraysize(inputs), inputs);
    2024        8762 : }
    2025             : 
    2026             : }  // namespace
    2027             : 
    2028             : // Shared routine for word comparison against zero.
    2029     5889836 : void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
    2030             :                                                FlagsContinuation* cont) {
    2031             :   // Try to combine with comparisons against 0 by simply inverting the branch.
    2032     7930096 :   while (value->opcode() == IrOpcode::kWord32Equal && CanCover(user, value)) {
    2033     1524272 :     Int32BinopMatcher m(value);
    2034     1524289 :     if (!m.right().Is(0)) break;
    2035             : 
    2036             :     user = value;
    2037             :     value = m.left().node();
    2038             :     cont->Negate();
    2039             :   }
    2040             : 
    2041     5889932 :   if (CanCover(user, value)) {
    2042     5532529 :     switch (value->opcode()) {
    2043             :       case IrOpcode::kWord32Equal:
    2044             :         cont->OverwriteAndNegateIfEqual(kEqual);
    2045      504141 :         return VisitWordCompare(this, value, kX64Cmp32, cont);
    2046             :       case IrOpcode::kInt32LessThan:
    2047             :         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
    2048      218923 :         return VisitWordCompare(this, value, kX64Cmp32, cont);
    2049             :       case IrOpcode::kInt32LessThanOrEqual:
    2050             :         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
    2051       54486 :         return VisitWordCompare(this, value, kX64Cmp32, cont);
    2052             :       case IrOpcode::kUint32LessThan:
    2053             :         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
    2054      138929 :         return VisitWordCompare(this, value, kX64Cmp32, cont);
    2055             :       case IrOpcode::kUint32LessThanOrEqual:
    2056             :         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
    2057       55542 :         return VisitWordCompare(this, value, kX64Cmp32, cont);
    2058             :       case IrOpcode::kWord64Equal: {
    2059             :         cont->OverwriteAndNegateIfEqual(kEqual);
    2060     2404746 :         Int64BinopMatcher m(value);
    2061     2404742 :         if (m.right().Is(0)) {
    2062             :           // Try to combine the branch with a comparison.
    2063             :           Node* const user = m.node();
    2064             :           Node* const value = m.left().node();
    2065      791840 :           if (CanCover(user, value)) {
    2066      703269 :             switch (value->opcode()) {
    2067             :               case IrOpcode::kInt64Sub:
    2068          12 :                 return VisitWord64Compare(this, value, cont);
    2069             :               case IrOpcode::kWord64And:
    2070      665768 :                 return VisitWordCompare(this, value, kX64Test, cont);
    2071             :               default:
    2072             :                 break;
    2073             :             }
    2074             :           }
    2075      126067 :           return VisitCompareZero(this, user, value, kX64Cmp, cont);
    2076             :         }
    2077     1612902 :         return VisitWord64Compare(this, value, cont);
    2078             :       }
    2079             :       case IrOpcode::kInt64LessThan:
    2080             :         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
    2081       92612 :         return VisitWord64Compare(this, value, cont);
    2082             :       case IrOpcode::kInt64LessThanOrEqual:
    2083             :         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
    2084       29685 :         return VisitWord64Compare(this, value, cont);
    2085             :       case IrOpcode::kUint64LessThan:
    2086             :         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
    2087     1148796 :         return VisitWord64Compare(this, value, cont);
    2088             :       case IrOpcode::kUint64LessThanOrEqual:
    2089             :         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
    2090       85349 :         return VisitWord64Compare(this, value, cont);
    2091             :       case IrOpcode::kFloat32Equal:
    2092             :         cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
    2093         490 :         return VisitFloat32Compare(this, value, cont);
    2094             :       case IrOpcode::kFloat32LessThan:
    2095             :         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
    2096         545 :         return VisitFloat32Compare(this, value, cont);
    2097             :       case IrOpcode::kFloat32LessThanOrEqual:
    2098             :         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
    2099         182 :         return VisitFloat32Compare(this, value, cont);
    2100             :       case IrOpcode::kFloat64Equal:
    2101             :         cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
    2102      168765 :         return VisitFloat64Compare(this, value, cont);
    2103             :       case IrOpcode::kFloat64LessThan: {
    2104       74457 :         Float64BinopMatcher m(value);
    2105      129743 :         if (m.left().Is(0.0) && m.right().IsFloat64Abs()) {
    2106             :           // This matches the pattern
    2107             :           //
    2108             :           //   Float64LessThan(#0.0, Float64Abs(x))
    2109             :           //
    2110             :           // which TurboFan generates for NumberToBoolean in the general case,
    2111             :           // and which evaluates to false if x is 0, -0 or NaN. We can compile
    2112             :           // this to a simple (v)ucomisd using not_equal flags condition, which
    2113             :           // avoids the costly Float64Abs.
    2114             :           cont->OverwriteAndNegateIfEqual(kNotEqual);
    2115             :           InstructionCode const opcode =
    2116       54673 :               IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
    2117             :           return VisitCompare(this, opcode, m.left().node(),
    2118       54673 :                               m.right().InputAt(0), cont, false);
    2119             :         }
    2120             :         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
    2121       19784 :         return VisitFloat64Compare(this, value, cont);
    2122             :       }
    2123             :       case IrOpcode::kFloat64LessThanOrEqual:
    2124             :         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
    2125       11382 :         return VisitFloat64Compare(this, value, cont);
    2126             :       case IrOpcode::kProjection:
    2127             :         // Check if this is the overflow output projection of an
    2128             :         // <Operation>WithOverflow node.
    2129       85346 :         if (ProjectionIndexOf(value->op()) == 1u) {
    2130             :           // We cannot combine the <Operation>WithOverflow with this branch
    2131             :           // unless the 0th projection (the use of the actual value of the
    2132             :           // <Operation> is either nullptr, which means there's no use of the
    2133             :           // actual value, or was already defined, which means it is scheduled
    2134             :           // *AFTER* this branch).
    2135             :           Node* const node = value->InputAt(0);
    2136       85316 :           Node* const result = NodeProperties::FindProjection(node, 0);
    2137       85313 :           if (result == nullptr || IsDefined(result)) {
    2138       85313 :             switch (node->opcode()) {
    2139             :               case IrOpcode::kInt32AddWithOverflow:
    2140             :                 cont->OverwriteAndNegateIfEqual(kOverflow);
    2141       56008 :                 return VisitBinop(this, node, kX64Add32, cont);
    2142             :               case IrOpcode::kInt32SubWithOverflow:
    2143             :                 cont->OverwriteAndNegateIfEqual(kOverflow);
    2144       10811 :                 return VisitBinop(this, node, kX64Sub32, cont);
    2145             :               case IrOpcode::kInt32MulWithOverflow:
    2146             :                 cont->OverwriteAndNegateIfEqual(kOverflow);
    2147        3023 :                 return VisitBinop(this, node, kX64Imul32, cont);
    2148             :               case IrOpcode::kInt64AddWithOverflow:
    2149             :                 cont->OverwriteAndNegateIfEqual(kOverflow);
    2150        9640 :                 return VisitBinop(this, node, kX64Add, cont);
    2151             :               case IrOpcode::kInt64SubWithOverflow:
    2152             :                 cont->OverwriteAndNegateIfEqual(kOverflow);
    2153        5832 :                 return VisitBinop(this, node, kX64Sub, cont);
    2154             :               default:
    2155             :                 break;
    2156             :             }
    2157             :           }
    2158             :         }
    2159             :         break;
    2160             :       case IrOpcode::kInt32Sub:
    2161         968 :         return VisitWordCompare(this, value, kX64Cmp32, cont);
    2162             :       case IrOpcode::kWord32And:
    2163      268519 :         return VisitWordCompare(this, value, kX64Test32, cont);
    2164             :       default:
    2165             :         break;
    2166             :     }
    2167             :   }
    2168             : 
    2169             :   // Branch could not be combined with a compare, emit compare against 0.
    2170      546179 :   VisitCompareZero(this, user, value, kX64Cmp32, cont);
    2171             : }
    2172             : 
    2173       34729 : void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
    2174             :   X64OperandGenerator g(this);
    2175       34729 :   InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
    2176             : 
    2177             :   // Emit either ArchTableSwitch or ArchLookupSwitch.
    2178       34729 :   if (enable_switch_jump_table_ == kEnableSwitchJumpTable) {
    2179             :     static const size_t kMaxTableSwitchValueRange = 2 << 16;
    2180             :     size_t table_space_cost = 4 + sw.value_range();
    2181             :     size_t table_time_cost = 3;
    2182       12969 :     size_t lookup_space_cost = 3 + 2 * sw.case_count();
    2183             :     size_t lookup_time_cost = sw.case_count();
    2184       13320 :     if (sw.case_count() > 4 &&
    2185         351 :         table_space_cost + 3 * table_time_cost <=
    2186         695 :             lookup_space_cost + 3 * lookup_time_cost &&
    2187       13313 :         sw.min_value() > std::numeric_limits<int32_t>::min() &&
    2188             :         sw.value_range() <= kMaxTableSwitchValueRange) {
    2189         344 :       InstructionOperand index_operand = g.TempRegister();
    2190         344 :       if (sw.min_value()) {
    2191             :         // The leal automatically zero extends, so result is a valid 64-bit
    2192             :         // index.
    2193           3 :         Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI), index_operand,
    2194           3 :              value_operand, g.TempImmediate(-sw.min_value()));
    2195             :       } else {
    2196             :         // Zero extend, because we use it as 64-bit index into the jump table.
    2197         341 :         Emit(kX64Movl, index_operand, value_operand);
    2198             :       }
    2199             :       // Generate a table lookup.
    2200         344 :       return EmitTableSwitch(sw, index_operand);
    2201             :     }
    2202             :   }
    2203             : 
    2204             :   // Generate a tree of conditional jumps.
    2205       34385 :   return EmitBinarySearchSwitch(sw, value_operand);
    2206             : }
    2207             : 
    2208      108135 : void InstructionSelector::VisitWord32Equal(Node* const node) {
    2209             :   Node* user = node;
    2210             :   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
    2211      108139 :   Int32BinopMatcher m(user);
    2212      108137 :   if (m.right().Is(0)) {
    2213       48237 :     return VisitWordCompareZero(m.node(), m.left().node(), &cont);
    2214             :   }
    2215       59900 :   VisitWordCompare(this, node, kX64Cmp32, &cont);
    2216             : }
    2217             : 
    2218       28119 : void InstructionSelector::VisitInt32LessThan(Node* node) {
    2219             :   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
    2220       28119 :   VisitWordCompare(this, node, kX64Cmp32, &cont);
    2221       28119 : }
    2222             : 
    2223       27760 : void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
    2224             :   FlagsContinuation cont =
    2225             :       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
    2226       27760 :   VisitWordCompare(this, node, kX64Cmp32, &cont);
    2227       27760 : }
    2228             : 
    2229       33215 : void InstructionSelector::VisitUint32LessThan(Node* node) {
    2230             :   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
    2231       33215 :   VisitWordCompare(this, node, kX64Cmp32, &cont);
    2232       33215 : }
    2233             : 
    2234       27927 : void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
    2235             :   FlagsContinuation cont =
    2236             :       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
    2237       27927 :   VisitWordCompare(this, node, kX64Cmp32, &cont);
    2238       27927 : }
    2239             : 
    2240       38264 : void InstructionSelector::VisitWord64Equal(Node* const node) {
    2241             :   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
    2242       38264 :   Int64BinopMatcher m(node);
    2243       38264 :   if (m.right().Is(0)) {
    2244             :     // Try to combine the equality check with a comparison.
    2245             :     Node* const user = m.node();
    2246             :     Node* const value = m.left().node();
    2247        5140 :     if (CanCover(user, value)) {
    2248        4632 :       switch (value->opcode()) {
    2249             :         case IrOpcode::kInt64Sub:
    2250        4456 :           return VisitWord64Compare(this, value, &cont);
    2251             :         case IrOpcode::kWord64And:
    2252        4456 :           return VisitWordCompare(this, value, kX64Test, &cont);
    2253             :         default:
    2254             :           break;
    2255             :       }
    2256             :     }
    2257             :   }
    2258       33808 :   VisitWord64Compare(this, node, &cont);
    2259             : }
    2260             : 
    2261       13924 : void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
    2262       13924 :   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
    2263             :     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
    2264       13924 :     return VisitBinop(this, node, kX64Add32, &cont);
    2265             :   }
    2266             :   FlagsContinuation cont;
    2267           0 :   VisitBinop(this, node, kX64Add32, &cont);
    2268             : }
    2269             : 
    2270       13924 : void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
    2271       13924 :   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
    2272             :     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
    2273       13924 :     return VisitBinop(this, node, kX64Sub32, &cont);
    2274             :   }
    2275             :   FlagsContinuation cont;
    2276           0 :   VisitBinop(this, node, kX64Sub32, &cont);
    2277             : }
    2278             : 
    2279        1365 : void InstructionSelector::VisitInt64LessThan(Node* node) {
    2280             :   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
    2281        1365 :   VisitWord64Compare(this, node, &cont);
    2282        1365 : }
    2283             : 
    2284         827 : void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
    2285             :   FlagsContinuation cont =
    2286             :       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
    2287         827 :   VisitWord64Compare(this, node, &cont);
    2288         827 : }
    2289             : 
    2290        5727 : void InstructionSelector::VisitUint64LessThan(Node* node) {
    2291             :   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
    2292        5727 :   VisitWord64Compare(this, node, &cont);
    2293        5730 : }
    2294             : 
    2295         408 : void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
    2296             :   FlagsContinuation cont =
    2297             :       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
    2298         408 :   VisitWord64Compare(this, node, &cont);
    2299         408 : }
    2300             : 
    2301         114 : void InstructionSelector::VisitFloat32Equal(Node* node) {
    2302             :   FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
    2303         114 :   VisitFloat32Compare(this, node, &cont);
    2304         114 : }
    2305             : 
    2306         106 : void InstructionSelector::VisitFloat32LessThan(Node* node) {
    2307             :   FlagsContinuation cont =
    2308             :       FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
    2309         106 :   VisitFloat32Compare(this, node, &cont);
    2310         106 : }
    2311             : 
    2312         111 : void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
    2313             :   FlagsContinuation cont =
    2314             :       FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
    2315         111 :   VisitFloat32Compare(this, node, &cont);
    2316         111 : }
    2317             : 
    2318        2773 : void InstructionSelector::VisitFloat64Equal(Node* node) {
    2319             :   FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
    2320        2773 :   VisitFloat64Compare(this, node, &cont);
    2321        2773 : }
    2322             : 
    2323        4907 : void InstructionSelector::VisitFloat64LessThan(Node* node) {
    2324        4907 :   Float64BinopMatcher m(node);
    2325        8629 :   if (m.left().Is(0.0) && m.right().IsFloat64Abs()) {
    2326             :     // This matches the pattern
    2327             :     //
    2328             :     //   Float64LessThan(#0.0, Float64Abs(x))
    2329             :     //
    2330             :     // which TurboFan generates for NumberToBoolean in the general case,
    2331             :     // and which evaluates to false if x is 0, -0 or NaN. We can compile
    2332             :     // this to a simple (v)ucomisd using not_equal flags condition, which
    2333             :     // avoids the costly Float64Abs.
    2334             :     FlagsContinuation cont = FlagsContinuation::ForSet(kNotEqual, node);
    2335             :     InstructionCode const opcode =
    2336        3625 :         IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
    2337             :     return VisitCompare(this, opcode, m.left().node(), m.right().InputAt(0),
    2338        3625 :                         &cont, false);
    2339             :   }
    2340             :   FlagsContinuation cont =
    2341             :       FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
    2342        1282 :   VisitFloat64Compare(this, node, &cont);
    2343             : }
    2344             : 
    2345        1148 : void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
    2346             :   FlagsContinuation cont =
    2347             :       FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
    2348        1148 :   VisitFloat64Compare(this, node, &cont);
    2349        1148 : }
    2350             : 
    2351         116 : void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
    2352             :   X64OperandGenerator g(this);
    2353             :   Node* left = node->InputAt(0);
    2354             :   Node* right = node->InputAt(1);
    2355             :   Float64Matcher mleft(left);
    2356         116 :   if (mleft.HasValue() && (bit_cast<uint64_t>(mleft.Value()) >> 32) == 0u) {
    2357         112 :     Emit(kSSEFloat64LoadLowWord32, g.DefineAsRegister(node), g.Use(right));
    2358         112 :     return;
    2359             :   }
    2360           4 :   Emit(kSSEFloat64InsertLowWord32, g.DefineSameAsFirst(node),
    2361           4 :        g.UseRegister(left), g.Use(right));
    2362             : }
    2363             : 
    2364         116 : void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
    2365             :   X64OperandGenerator g(this);
    2366             :   Node* left = node->InputAt(0);
    2367             :   Node* right = node->InputAt(1);
    2368         116 :   Emit(kSSEFloat64InsertHighWord32, g.DefineSameAsFirst(node),
    2369         116 :        g.UseRegister(left), g.Use(right));
    2370         116 : }
    2371             : 
    2372        6464 : void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
    2373             :   X64OperandGenerator g(this);
    2374        6464 :   Emit(kSSEFloat64SilenceNaN, g.DefineSameAsFirst(node),
    2375        6464 :        g.UseRegister(node->InputAt(0)));
    2376        6464 : }
    2377             : 
    2378        1049 : void InstructionSelector::VisitWord32AtomicLoad(Node* node) {
    2379        1049 :   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
    2380             :   DCHECK(load_rep.representation() == MachineRepresentation::kWord8 ||
    2381             :          load_rep.representation() == MachineRepresentation::kWord16 ||
    2382             :          load_rep.representation() == MachineRepresentation::kWord32);
    2383             :   USE(load_rep);
    2384        1050 :   VisitLoad(node);
    2385        1049 : }
    2386             : 
    2387         580 : void InstructionSelector::VisitWord64AtomicLoad(Node* node) {
    2388         580 :   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
    2389             :   USE(load_rep);
    2390         580 :   VisitLoad(node);
    2391         580 : }
    2392             : 
    2393        2076 : void InstructionSelector::VisitWord32AtomicStore(Node* node) {
    2394        2076 :   MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
    2395             :   ArchOpcode opcode = kArchNop;
    2396        2075 :   switch (rep) {
    2397             :     case MachineRepresentation::kWord8:
    2398             :       opcode = kWord32AtomicExchangeInt8;
    2399             :       break;
    2400             :     case MachineRepresentation::kWord16:
    2401             :       opcode = kWord32AtomicExchangeInt16;
    2402         722 :       break;
    2403             :     case MachineRepresentation::kWord32:
    2404             :       opcode = kWord32AtomicExchangeWord32;
    2405         667 :       break;
    2406             :     default:
    2407           0 :       UNREACHABLE();
    2408             :       return;
    2409             :   }
    2410        2075 :   VisitAtomicExchange(this, node, opcode);
    2411        2078 : }
    2412             : 
    2413        2183 : void InstructionSelector::VisitWord64AtomicStore(Node* node) {
    2414        2183 :   MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
    2415             :   ArchOpcode opcode = kArchNop;
    2416        2184 :   switch (rep) {
    2417             :     case MachineRepresentation::kWord8:
    2418             :       opcode = kX64Word64AtomicExchangeUint8;
    2419             :       break;
    2420             :     case MachineRepresentation::kWord16:
    2421             :       opcode = kX64Word64AtomicExchangeUint16;
    2422         646 :       break;
    2423             :     case MachineRepresentation::kWord32:
    2424             :       opcode = kX64Word64AtomicExchangeUint32;
    2425         420 :       break;
    2426             :     case MachineRepresentation::kWord64:
    2427             :       opcode = kX64Word64AtomicExchangeUint64;
    2428         414 :       break;
    2429             :     default:
    2430           0 :       UNREACHABLE();
    2431             :       return;
    2432             :   }
    2433        2184 :   VisitAtomicExchange(this, node, opcode);
    2434        2183 : }
    2435             : 
    2436        2279 : void InstructionSelector::VisitWord32AtomicExchange(Node* node) {
    2437        2279 :   MachineType type = AtomicOpType(node->op());
    2438             :   ArchOpcode opcode = kArchNop;
    2439        2281 :   if (type == MachineType::Int8()) {
    2440             :     opcode = kWord32AtomicExchangeInt8;
    2441        2171 :   } else if (type == MachineType::Uint8()) {
    2442             :     opcode = kWord32AtomicExchangeUint8;
    2443        1524 :   } else if (type == MachineType::Int16()) {
    2444             :     opcode = kWord32AtomicExchangeInt16;
    2445        1412 :   } else if (type == MachineType::Uint16()) {
    2446             :     opcode = kWord32AtomicExchangeUint16;
    2447        1110 :   } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
    2448             :     opcode = kWord32AtomicExchangeWord32;
    2449             :   } else {
    2450           0 :     UNREACHABLE();
    2451             :     return;
    2452             :   }
    2453        2281 :   VisitAtomicExchange(this, node, opcode);
    2454        2282 : }
    2455             : 
    2456        2226 : void InstructionSelector::VisitWord64AtomicExchange(Node* node) {
    2457        2226 :   MachineType type = AtomicOpType(node->op());
    2458             :   ArchOpcode opcode = kArchNop;
    2459        2226 :   if (type == MachineType::Uint8()) {
    2460             :     opcode = kX64Word64AtomicExchangeUint8;
    2461        1478 :   } else if (type == MachineType::Uint16()) {
    2462             :     opcode = kX64Word64AtomicExchangeUint16;
    2463         844 :   } else if (type == MachineType::Uint32()) {
    2464             :     opcode = kX64Word64AtomicExchangeUint32;
    2465         550 :   } else if (type == MachineType::Uint64()) {
    2466             :     opcode = kX64Word64AtomicExchangeUint64;
    2467             :   } else {
    2468           0 :     UNREACHABLE();
    2469             :     return;
    2470             :   }
    2471        2226 :   VisitAtomicExchange(this, node, opcode);
    2472        2228 : }
    2473             : 
    2474         754 : void InstructionSelector::VisitWord32AtomicCompareExchange(Node* node) {
    2475         754 :   MachineType type = AtomicOpType(node->op());
    2476             :   ArchOpcode opcode = kArchNop;
    2477         753 :   if (type == MachineType::Int8()) {
    2478             :     opcode = kWord32AtomicCompareExchangeInt8;
    2479         641 :   } else if (type == MachineType::Uint8()) {
    2480             :     opcode = kWord32AtomicCompareExchangeUint8;
    2481         508 :   } else if (type == MachineType::Int16()) {
    2482             :     opcode = kWord32AtomicCompareExchangeInt16;
    2483         396 :   } else if (type == MachineType::Uint16()) {
    2484             :     opcode = kWord32AtomicCompareExchangeUint16;
    2485         416 :   } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
    2486             :     opcode = kWord32AtomicCompareExchangeWord32;
    2487             :   } else {
    2488           0 :     UNREACHABLE();
    2489             :     return;
    2490             :   }
    2491         753 :   VisitAtomicCompareExchange(this, node, opcode);
    2492         753 : }
    2493             : 
    2494         356 : void InstructionSelector::VisitWord64AtomicCompareExchange(Node* node) {
    2495         356 :   MachineType type = AtomicOpType(node->op());
    2496             :   ArchOpcode opcode = kArchNop;
    2497         356 :   if (type == MachineType::Uint8()) {
    2498             :     opcode = kX64Word64AtomicCompareExchangeUint8;
    2499         335 :   } else if (type == MachineType::Uint16()) {
    2500             :     opcode = kX64Word64AtomicCompareExchangeUint16;
    2501         306 :   } else if (type == MachineType::Uint32()) {
    2502             :     opcode = kX64Word64AtomicCompareExchangeUint32;
    2503         277 :   } else if (type == MachineType::Uint64()) {
    2504             :     opcode = kX64Word64AtomicCompareExchangeUint64;
    2505             :   } else {
    2506           0 :     UNREACHABLE();
    2507             :     return;
    2508             :   }
    2509         356 :   VisitAtomicCompareExchange(this, node, opcode);
    2510         356 : }
    2511             : 
    2512       13193 : void InstructionSelector::VisitWord32AtomicBinaryOperation(
    2513             :     Node* node, ArchOpcode int8_op, ArchOpcode uint8_op, ArchOpcode int16_op,
    2514             :     ArchOpcode uint16_op, ArchOpcode word32_op) {
    2515       13193 :   MachineType type = AtomicOpType(node->op());
    2516             :   ArchOpcode opcode = kArchNop;
    2517       13206 :   if (type == MachineType::Int8()) {
    2518             :     opcode = int8_op;
    2519       12665 :   } else if (type == MachineType::Uint8()) {
    2520             :     opcode = uint8_op;
    2521        7734 :   } else if (type == MachineType::Int16()) {
    2522             :     opcode = int16_op;
    2523        7174 :   } else if (type == MachineType::Uint16()) {
    2524             :     opcode = uint16_op;
    2525        6198 :   } else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
    2526             :     opcode = word32_op;
    2527             :   } else {
    2528           0 :     UNREACHABLE();
    2529             :     return;
    2530             :   }
    2531       13206 :   VisitAtomicBinop(this, node, opcode);
    2532       13276 : }
    2533             : 
    2534             : #define VISIT_ATOMIC_BINOP(op)                                   \
    2535             :   void InstructionSelector::VisitWord32Atomic##op(Node* node) {  \
    2536             :     VisitWord32AtomicBinaryOperation(                            \
    2537             :         node, kWord32Atomic##op##Int8, kWord32Atomic##op##Uint8, \
    2538             :         kWord32Atomic##op##Int16, kWord32Atomic##op##Uint16,     \
    2539             :         kWord32Atomic##op##Word32);                              \
    2540             :   }
    2541        2836 : VISIT_ATOMIC_BINOP(Add)
    2542        2675 : VISIT_ATOMIC_BINOP(Sub)
    2543        2652 : VISIT_ATOMIC_BINOP(And)
    2544        2441 : VISIT_ATOMIC_BINOP(Or)
    2545        2712 : VISIT_ATOMIC_BINOP(Xor)
    2546             : #undef VISIT_ATOMIC_BINOP
    2547             : 
    2548       11658 : void InstructionSelector::VisitWord64AtomicBinaryOperation(
    2549             :     Node* node, ArchOpcode uint8_op, ArchOpcode uint16_op, ArchOpcode uint32_op,
    2550             :     ArchOpcode word64_op) {
    2551       11658 :   MachineType type = AtomicOpType(node->op());
    2552             :   ArchOpcode opcode = kArchNop;
    2553       11661 :   if (type == MachineType::Uint8()) {
    2554             :     opcode = uint8_op;
    2555        7928 :   } else if (type == MachineType::Uint16()) {
    2556             :     opcode = uint16_op;
    2557        4733 :   } else if (type == MachineType::Uint32()) {
    2558             :     opcode = uint32_op;
    2559        2588 :   } else if (type == MachineType::Uint64()) {
    2560             :     opcode = word64_op;
    2561             :   } else {
    2562           0 :     UNREACHABLE();
    2563             :     return;
    2564             :   }
    2565       11661 :   VisitAtomicBinop(this, node, opcode);
    2566       11709 : }
    2567             : 
    2568             : #define VISIT_ATOMIC_BINOP(op)                                           \
    2569             :   void InstructionSelector::VisitWord64Atomic##op(Node* node) {          \
    2570             :     VisitWord64AtomicBinaryOperation(                                    \
    2571             :         node, kX64Word64Atomic##op##Uint8, kX64Word64Atomic##op##Uint16, \
    2572             :         kX64Word64Atomic##op##Uint32, kX64Word64Atomic##op##Uint64);     \
    2573             :   }
    2574        2488 : VISIT_ATOMIC_BINOP(Add)
    2575        2128 : VISIT_ATOMIC_BINOP(Sub)
    2576        2378 : VISIT_ATOMIC_BINOP(And)
    2577        2592 : VISIT_ATOMIC_BINOP(Or)
    2578        2153 : VISIT_ATOMIC_BINOP(Xor)
    2579             : #undef VISIT_ATOMIC_BINOP
    2580             : 
    2581             : #define SIMD_TYPES(V) \
    2582             :   V(F32x4)            \
    2583             :   V(I32x4)            \
    2584             :   V(I16x8)            \
    2585             :   V(I8x16)
    2586             : 
    2587             : #define SIMD_BINOP_LIST(V) \
    2588             :   V(F32x4Add)              \
    2589             :   V(F32x4AddHoriz)         \
    2590             :   V(F32x4Sub)              \
    2591             :   V(F32x4Mul)              \
    2592             :   V(F32x4Min)              \
    2593             :   V(F32x4Max)              \
    2594             :   V(F32x4Eq)               \
    2595             :   V(F32x4Ne)               \
    2596             :   V(F32x4Lt)               \
    2597             :   V(F32x4Le)               \
    2598             :   V(I32x4Add)              \
    2599             :   V(I32x4AddHoriz)         \
    2600             :   V(I32x4Sub)              \
    2601             :   V(I32x4Mul)              \
    2602             :   V(I32x4MinS)             \
    2603             :   V(I32x4MaxS)             \
    2604             :   V(I32x4Eq)               \
    2605             :   V(I32x4Ne)               \
    2606             :   V(I32x4GtS)              \
    2607             :   V(I32x4GeS)              \
    2608             :   V(I32x4MinU)             \
    2609             :   V(I32x4MaxU)             \
    2610             :   V(I32x4GtU)              \
    2611             :   V(I32x4GeU)              \
    2612             :   V(I16x8SConvertI32x4)    \
    2613             :   V(I16x8Add)              \
    2614             :   V(I16x8AddSaturateS)     \
    2615             :   V(I16x8AddHoriz)         \
    2616             :   V(I16x8Sub)              \
    2617             :   V(I16x8SubSaturateS)     \
    2618             :   V(I16x8Mul)              \
    2619             :   V(I16x8MinS)             \
    2620             :   V(I16x8MaxS)             \
    2621             :   V(I16x8Eq)               \
    2622             :   V(I16x8Ne)               \
    2623             :   V(I16x8GtS)              \
    2624             :   V(I16x8GeS)              \
    2625             :   V(I16x8AddSaturateU)     \
    2626             :   V(I16x8SubSaturateU)     \
    2627             :   V(I16x8MinU)             \
    2628             :   V(I16x8MaxU)             \
    2629             :   V(I16x8GtU)              \
    2630             :   V(I16x8GeU)              \
    2631             :   V(I8x16SConvertI16x8)    \
    2632             :   V(I8x16Add)              \
    2633             :   V(I8x16AddSaturateS)     \
    2634             :   V(I8x16Sub)              \
    2635             :   V(I8x16SubSaturateS)     \
    2636             :   V(I8x16MinS)             \
    2637             :   V(I8x16MaxS)             \
    2638             :   V(I8x16Eq)               \
    2639             :   V(I8x16Ne)               \
    2640             :   V(I8x16GtS)              \
    2641             :   V(I8x16GeS)              \
    2642             :   V(I8x16AddSaturateU)     \
    2643             :   V(I8x16SubSaturateU)     \
    2644             :   V(I8x16MinU)             \
    2645             :   V(I8x16MaxU)             \
    2646             :   V(I8x16GtU)              \
    2647             :   V(I8x16GeU)              \
    2648             :   V(S128And)               \
    2649             :   V(S128Or)                \
    2650             :   V(S128Xor)
    2651             : 
    2652             : #define SIMD_UNOP_LIST(V)   \
    2653             :   V(F32x4SConvertI32x4)     \
    2654             :   V(F32x4Abs)               \
    2655             :   V(F32x4Neg)               \
    2656             :   V(F32x4RecipApprox)       \
    2657             :   V(F32x4RecipSqrtApprox)   \
    2658             :   V(I32x4SConvertI16x8Low)  \
    2659             :   V(I32x4SConvertI16x8High) \
    2660             :   V(I32x4Neg)               \
    2661             :   V(I32x4UConvertI16x8Low)  \
    2662             :   V(I32x4UConvertI16x8High) \
    2663             :   V(I16x8SConvertI8x16Low)  \
    2664             :   V(I16x8SConvertI8x16High) \
    2665             :   V(I16x8Neg)               \
    2666             :   V(I16x8UConvertI8x16Low)  \
    2667             :   V(I16x8UConvertI8x16High) \
    2668             :   V(I8x16Neg)               \
    2669             :   V(S128Not)
    2670             : 
    2671             : #define SIMD_SHIFT_OPCODES(V) \
    2672             :   V(I32x4Shl)                 \
    2673             :   V(I32x4ShrS)                \
    2674             :   V(I32x4ShrU)                \
    2675             :   V(I16x8Shl)                 \
    2676             :   V(I16x8ShrS)                \
    2677             :   V(I16x8ShrU)                \
    2678             :   V(I8x16Shl)                 \
    2679             :   V(I8x16ShrS)                \
    2680             :   V(I8x16ShrU)
    2681             : 
    2682             : #define SIMD_ANYTRUE_LIST(V) \
    2683             :   V(S1x4AnyTrue)             \
    2684             :   V(S1x8AnyTrue)             \
    2685             :   V(S1x16AnyTrue)
    2686             : 
    2687             : #define SIMD_ALLTRUE_LIST(V) \
    2688             :   V(S1x4AllTrue)             \
    2689             :   V(S1x8AllTrue)             \
    2690             :   V(S1x16AllTrue)
    2691             : 
    2692          22 : void InstructionSelector::VisitS128Zero(Node* node) {
    2693             :   X64OperandGenerator g(this);
    2694          22 :   Emit(kX64S128Zero, g.DefineAsRegister(node));
    2695          22 : }
    2696             : 
    2697             : #define VISIT_SIMD_SPLAT(Type)                               \
    2698             :   void InstructionSelector::Visit##Type##Splat(Node* node) { \
    2699             :     X64OperandGenerator g(this);                             \
    2700             :     Emit(kX64##Type##Splat, g.DefineAsRegister(node),        \
    2701             :          g.Use(node->InputAt(0)));                           \
    2702             :   }
    2703        3968 : SIMD_TYPES(VISIT_SIMD_SPLAT)
    2704             : #undef VISIT_SIMD_SPLAT
    2705             : 
    2706             : #define VISIT_SIMD_EXTRACT_LANE(Type)                              \
    2707             :   void InstructionSelector::Visit##Type##ExtractLane(Node* node) { \
    2708             :     X64OperandGenerator g(this);                                   \
    2709             :     int32_t lane = OpParameter<int32_t>(node->op());               \
    2710             :     Emit(kX64##Type##ExtractLane, g.DefineAsRegister(node),        \
    2711             :          g.UseRegister(node->InputAt(0)), g.UseImmediate(lane));   \
    2712             :   }
    2713        6108 : SIMD_TYPES(VISIT_SIMD_EXTRACT_LANE)
    2714             : #undef VISIT_SIMD_EXTRACT_LANE
    2715             : 
    2716             : #define VISIT_SIMD_REPLACE_LANE(Type)                              \
    2717             :   void InstructionSelector::Visit##Type##ReplaceLane(Node* node) { \
    2718             :     X64OperandGenerator g(this);                                   \
    2719             :     int32_t lane = OpParameter<int32_t>(node->op());               \
    2720             :     Emit(kX64##Type##ReplaceLane, g.DefineSameAsFirst(node),       \
    2721             :          g.UseRegister(node->InputAt(0)), g.UseImmediate(lane),    \
    2722             :          g.Use(node->InputAt(1)));                                 \
    2723             :   }
    2724        7808 : SIMD_TYPES(VISIT_SIMD_REPLACE_LANE)
    2725             : #undef VISIT_SIMD_REPLACE_LANE
    2726             : 
    2727             : #define VISIT_SIMD_SHIFT(Opcode)                                  \
    2728             :   void InstructionSelector::Visit##Opcode(Node* node) {           \
    2729             :     X64OperandGenerator g(this);                                  \
    2730             :     int32_t value = OpParameter<int32_t>(node->op());             \
    2731             :     Emit(kX64##Opcode, g.DefineSameAsFirst(node),                 \
    2732             :          g.UseRegister(node->InputAt(0)), g.UseImmediate(value)); \
    2733             :   }
    2734        1908 : SIMD_SHIFT_OPCODES(VISIT_SIMD_SHIFT)
    2735             : #undef VISIT_SIMD_SHIFT
    2736             : #undef SIMD_SHIFT_OPCODES
    2737             : 
    2738             : #define VISIT_SIMD_UNOP(Opcode)                         \
    2739             :   void InstructionSelector::Visit##Opcode(Node* node) { \
    2740             :     X64OperandGenerator g(this);                        \
    2741             :     Emit(kX64##Opcode, g.DefineAsRegister(node),        \
    2742             :          g.UseRegister(node->InputAt(0)));              \
    2743             :   }
    2744         136 : SIMD_UNOP_LIST(VISIT_SIMD_UNOP)
    2745             : #undef VISIT_SIMD_UNOP
    2746             : #undef SIMD_UNOP_LIST
    2747             : 
    2748             : #define VISIT_SIMD_BINOP(Opcode)                                            \
    2749             :   void InstructionSelector::Visit##Opcode(Node* node) {                     \
    2750             :     X64OperandGenerator g(this);                                            \
    2751             :     Emit(kX64##Opcode, g.DefineSameAsFirst(node),                           \
    2752             :          g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1))); \
    2753             :   }
    2754        1248 : SIMD_BINOP_LIST(VISIT_SIMD_BINOP)
    2755             : #undef VISIT_SIMD_BINOP
    2756             : #undef SIMD_BINOP_LIST
    2757             : 
    2758             : #define VISIT_SIMD_ANYTRUE(Opcode)                                        \
    2759             :   void InstructionSelector::Visit##Opcode(Node* node) {                   \
    2760             :     X64OperandGenerator g(this);                                          \
    2761             :     InstructionOperand temps[] = {g.TempRegister()};                      \
    2762             :     Emit(kX64##Opcode, g.DefineAsRegister(node),                          \
    2763             :          g.UseUniqueRegister(node->InputAt(0)), arraysize(temps), temps); \
    2764             :   }
    2765         180 : SIMD_ANYTRUE_LIST(VISIT_SIMD_ANYTRUE)
    2766             : #undef VISIT_SIMD_ANYTRUE
    2767             : #undef SIMD_ANYTRUE_LIST
    2768             : 
    2769             : #define VISIT_SIMD_ALLTRUE(Opcode)                                        \
    2770             :   void InstructionSelector::Visit##Opcode(Node* node) {                   \
    2771             :     X64OperandGenerator g(this);                                          \
    2772             :     InstructionOperand temps[] = {g.TempRegister()};                      \
    2773             :     Emit(kX64##Opcode, g.DefineAsRegister(node),                          \
    2774             :          g.UseUniqueRegister(node->InputAt(0)), arraysize(temps), temps); \
    2775             :   }
    2776         180 : SIMD_ALLTRUE_LIST(VISIT_SIMD_ALLTRUE)
    2777             : #undef VISIT_SIMD_ALLTRUE
    2778             : #undef SIMD_ALLTRUE_LIST
    2779             : #undef SIMD_TYPES
    2780             : 
    2781          28 : void InstructionSelector::VisitS128Select(Node* node) {
    2782             :   X64OperandGenerator g(this);
    2783          84 :   Emit(kX64S128Select, g.DefineSameAsFirst(node),
    2784             :        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)),
    2785          28 :        g.UseRegister(node->InputAt(2)));
    2786          28 : }
    2787             : 
    2788           4 : void InstructionSelector::VisitF32x4UConvertI32x4(Node* node) {
    2789             :   X64OperandGenerator g(this);
    2790           4 :   Emit(kX64F32x4UConvertI32x4, g.DefineSameAsFirst(node),
    2791           4 :        g.UseRegister(node->InputAt(0)));
    2792           4 : }
    2793             : 
    2794           4 : void InstructionSelector::VisitI32x4SConvertF32x4(Node* node) {
    2795             :   X64OperandGenerator g(this);
    2796           4 :   Emit(kX64I32x4SConvertF32x4, g.DefineSameAsFirst(node),
    2797           4 :        g.UseRegister(node->InputAt(0)));
    2798           4 : }
    2799             : 
    2800           4 : void InstructionSelector::VisitI32x4UConvertF32x4(Node* node) {
    2801             :   X64OperandGenerator g(this);
    2802           4 :   InstructionOperand temps[] = {g.TempSimd128Register()};
    2803           4 :   Emit(kX64I32x4UConvertF32x4, g.DefineSameAsFirst(node),
    2804           4 :        g.UseRegister(node->InputAt(0)), arraysize(temps), temps);
    2805           4 : }
    2806             : 
    2807           4 : void InstructionSelector::VisitI16x8UConvertI32x4(Node* node) {
    2808             :   X64OperandGenerator g(this);
    2809           8 :   Emit(kX64I16x8UConvertI32x4, g.DefineSameAsFirst(node),
    2810           4 :        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
    2811           4 : }
    2812             : 
    2813           4 : void InstructionSelector::VisitI8x16UConvertI16x8(Node* node) {
    2814             :   X64OperandGenerator g(this);
    2815           8 :   Emit(kX64I8x16UConvertI16x8, g.DefineSameAsFirst(node),
    2816           4 :        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
    2817           4 : }
    2818             : 
    2819           4 : void InstructionSelector::VisitI8x16Mul(Node* node) {
    2820             :   X64OperandGenerator g(this);
    2821           4 :   InstructionOperand temps[] = {g.TempSimd128Register()};
    2822           8 :   Emit(kX64I8x16Mul, g.DefineSameAsFirst(node),
    2823             :        g.UseUniqueRegister(node->InputAt(0)),
    2824           4 :        g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
    2825           4 : }
    2826             : 
    2827           0 : void InstructionSelector::VisitInt32AbsWithOverflow(Node* node) {
    2828           0 :   UNREACHABLE();
    2829             : }
    2830             : 
    2831           0 : void InstructionSelector::VisitInt64AbsWithOverflow(Node* node) {
    2832           0 :   UNREACHABLE();
    2833             : }
    2834             : 
    2835             : namespace {
    2836             : 
    2837             : // Packs a 4 lane shuffle into a single imm8 suitable for use by pshufd,
    2838             : // pshuflw, and pshufhw.
    2839             : uint8_t PackShuffle4(uint8_t* shuffle) {
    2840        5448 :   return (shuffle[0] & 3) | ((shuffle[1] & 3) << 2) | ((shuffle[2] & 3) << 4) |
    2841        5448 :          ((shuffle[3] & 3) << 6);
    2842             : }
    2843             : 
    2844             : // Gets an 8 bit lane mask suitable for 16x8 pblendw.
    2845             : uint8_t PackBlend8(const uint8_t* shuffle16x8) {
    2846             :   int8_t result = 0;
    2847         544 :   for (int i = 0; i < 8; ++i) {
    2848         256 :     result |= (shuffle16x8[i] >= 8 ? 1 : 0) << i;
    2849             :   }
    2850          32 :   return result;
    2851             : }
    2852             : 
    2853             : // Gets an 8 bit lane mask suitable for 32x4 pblendw.
    2854             : uint8_t PackBlend4(const uint8_t* shuffle32x4) {
    2855             :   int8_t result = 0;
    2856        4392 :   for (int i = 0; i < 4; ++i) {
    2857        1952 :     result |= (shuffle32x4[i] >= 4 ? 0x3 : 0) << (i * 2);
    2858             :   }
    2859          24 :   return result;
    2860             : }
    2861             : 
    2862             : // Returns true if shuffle can be decomposed into two 16x4 half shuffles
    2863             : // followed by a 16x8 blend.
    2864             : // E.g. [3 2 1 0 15 14 13 12].
    2865             : bool TryMatch16x8HalfShuffle(uint8_t* shuffle16x8, uint8_t* blend_mask) {
    2866             :   *blend_mask = 0;
    2867        9356 :   for (int i = 0; i < 8; i++) {
    2868        4632 :     if ((shuffle16x8[i] & 0x4) != (i & 0x4)) return false;
    2869        4320 :     *blend_mask |= (shuffle16x8[i] > 7 ? 1 : 0) << i;
    2870             :   }
    2871             :   return true;
    2872             : }
    2873             : 
    2874             : struct ShuffleEntry {
    2875             :   uint8_t shuffle[kSimd128Size];
    2876             :   ArchOpcode opcode;
    2877             :   bool src0_needs_reg;
    2878             :   bool src1_needs_reg;
    2879             : };
    2880             : 
    2881             : // Shuffles that map to architecture-specific instruction sequences. These are
    2882             : // matched very early, so we shouldn't include shuffles that match better in
    2883             : // later tests, like 32x4 and 16x8 shuffles. In general, these patterns should
    2884             : // map to either a single instruction, or be finer grained, such as zip/unzip or
    2885             : // transpose patterns.
    2886             : static const ShuffleEntry arch_shuffles[] = {
    2887             :     {{0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23},
    2888             :      kX64S64x2UnpackLow,
    2889             :      true,
    2890             :      false},
    2891             :     {{8, 9, 10, 11, 12, 13, 14, 15, 24, 25, 26, 27, 28, 29, 30, 31},
    2892             :      kX64S64x2UnpackHigh,
    2893             :      true,
    2894             :      false},
    2895             :     {{0, 1, 2, 3, 16, 17, 18, 19, 4, 5, 6, 7, 20, 21, 22, 23},
    2896             :      kX64S32x4UnpackLow,
    2897             :      true,
    2898             :      false},
    2899             :     {{8, 9, 10, 11, 24, 25, 26, 27, 12, 13, 14, 15, 28, 29, 30, 31},
    2900             :      kX64S32x4UnpackHigh,
    2901             :      true,
    2902             :      false},
    2903             :     {{0, 1, 16, 17, 2, 3, 18, 19, 4, 5, 20, 21, 6, 7, 22, 23},
    2904             :      kX64S16x8UnpackLow,
    2905             :      true,
    2906             :      false},
    2907             :     {{8, 9, 24, 25, 10, 11, 26, 27, 12, 13, 28, 29, 14, 15, 30, 31},
    2908             :      kX64S16x8UnpackHigh,
    2909             :      true,
    2910             :      false},
    2911             :     {{0, 16, 1, 17, 2, 18, 3, 19, 4, 20, 5, 21, 6, 22, 7, 23},
    2912             :      kX64S8x16UnpackLow,
    2913             :      true,
    2914             :      false},
    2915             :     {{8, 24, 9, 25, 10, 26, 11, 27, 12, 28, 13, 29, 14, 30, 15, 31},
    2916             :      kX64S8x16UnpackHigh,
    2917             :      true,
    2918             :      false},
    2919             : 
    2920             :     {{0, 1, 4, 5, 8, 9, 12, 13, 16, 17, 20, 21, 24, 25, 28, 29},
    2921             :      kX64S16x8UnzipLow,
    2922             :      true,
    2923             :      false},
    2924             :     {{2, 3, 6, 7, 10, 11, 14, 15, 18, 19, 22, 23, 26, 27, 30, 31},
    2925             :      kX64S16x8UnzipHigh,
    2926             :      true,
    2927             :      true},
    2928             :     {{0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30},
    2929             :      kX64S8x16UnzipLow,
    2930             :      true,
    2931             :      true},
    2932             :     {{1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31},
    2933             :      kX64S8x16UnzipHigh,
    2934             :      true,
    2935             :      true},
    2936             :     {{0, 16, 2, 18, 4, 20, 6, 22, 8, 24, 10, 26, 12, 28, 14, 30},
    2937             :      kX64S8x16TransposeLow,
    2938             :      true,
    2939             :      true},
    2940             :     {{1, 17, 3, 19, 5, 21, 7, 23, 9, 25, 11, 27, 13, 29, 15, 31},
    2941             :      kX64S8x16TransposeHigh,
    2942             :      true,
    2943             :      true},
    2944             :     {{7, 6, 5, 4, 3, 2, 1, 0, 15, 14, 13, 12, 11, 10, 9, 8},
    2945             :      kX64S8x8Reverse,
    2946             :      false,
    2947             :      false},
    2948             :     {{3, 2, 1, 0, 7, 6, 5, 4, 11, 10, 9, 8, 15, 14, 13, 12},
    2949             :      kX64S8x4Reverse,
    2950             :      false,
    2951             :      false},
    2952             :     {{1, 0, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14},
    2953             :      kX64S8x2Reverse,
    2954             :      true,
    2955             :      true}};
    2956             : 
    2957             : bool TryMatchArchShuffle(const uint8_t* shuffle, const ShuffleEntry* table,
    2958             :                          size_t num_entries, bool is_swizzle,
    2959             :                          const ShuffleEntry** arch_shuffle) {
    2960        4768 :   uint8_t mask = is_swizzle ? kSimd128Size - 1 : 2 * kSimd128Size - 1;
    2961      142608 :   for (size_t i = 0; i < num_entries; ++i) {
    2962       70440 :     const ShuffleEntry& entry = table[i];
    2963             :     int j = 0;
    2964      176784 :     for (; j < kSimd128Size; ++j) {
    2965      122092 :       if ((entry.shuffle[j] & mask) != (shuffle[j] & mask)) {
    2966             :         break;
    2967             :       }
    2968             :     }
    2969       70440 :     if (j == kSimd128Size) {
    2970             :       *arch_shuffle = &entry;
    2971             :       return true;
    2972             :     }
    2973             :   }
    2974             :   return false;
    2975             : }
    2976             : 
    2977             : }  // namespace
    2978             : 
    2979        5008 : void InstructionSelector::VisitS8x16Shuffle(Node* node) {
    2980             :   uint8_t shuffle[kSimd128Size];
    2981             :   bool is_swizzle;
    2982        5008 :   CanonicalizeShuffle(node, shuffle, &is_swizzle);
    2983             : 
    2984             :   int imm_count = 0;
    2985             :   static const int kMaxImms = 6;
    2986             :   uint32_t imms[kMaxImms];
    2987             :   int temp_count = 0;
    2988             :   static const int kMaxTemps = 2;
    2989       25040 :   InstructionOperand temps[kMaxTemps];
    2990             : 
    2991             :   X64OperandGenerator g(this);
    2992             :   // Swizzles don't generally need DefineSameAsFirst to avoid a move.
    2993        5008 :   bool no_same_as_first = is_swizzle;
    2994             :   // We generally need UseRegister for input0, Use for input1.
    2995             :   bool src0_needs_reg = true;
    2996             :   bool src1_needs_reg = false;
    2997             :   ArchOpcode opcode = kX64S8x16Shuffle;  // general shuffle is the default
    2998             : 
    2999             :   uint8_t offset;
    3000             :   uint8_t shuffle32x4[4];
    3001             :   uint8_t shuffle16x8[8];
    3002             :   int index;
    3003             :   const ShuffleEntry* arch_shuffle;
    3004        5008 :   if (TryMatchConcat(shuffle, &offset)) {
    3005             :     // Swap inputs from the normal order for (v)palignr.
    3006         240 :     SwapShuffleInputs(node);
    3007         240 :     is_swizzle = false;        // It's simpler to just handle the general case.
    3008             :     no_same_as_first = false;  // SSE requires same-as-first.
    3009             :     opcode = kX64S8x16Alignr;
    3010             :     // palignr takes a single imm8 offset.
    3011         240 :     imms[imm_count++] = offset;
    3012        9536 :   } else if (TryMatchArchShuffle(shuffle, arch_shuffles,
    3013             :                                  arraysize(arch_shuffles), is_swizzle,
    3014             :                                  &arch_shuffle)) {
    3015        1520 :     opcode = arch_shuffle->opcode;
    3016        1520 :     src0_needs_reg = arch_shuffle->src0_needs_reg;
    3017             :     // SSE can't take advantage of both operands in registers and needs
    3018             :     // same-as-first.
    3019             :     src1_needs_reg = false;
    3020             :     no_same_as_first = false;
    3021        3248 :   } else if (TryMatch32x4Shuffle(shuffle, shuffle32x4)) {
    3022             :     uint8_t shuffle_mask = PackShuffle4(shuffle32x4);
    3023        1008 :     if (is_swizzle) {
    3024         520 :       if (TryMatchIdentity(shuffle)) {
    3025             :         // Bypass normal shuffle code generation in this case.
    3026         240 :         EmitIdentity(node);
    3027         240 :         return;
    3028             :       } else {
    3029             :         // pshufd takes a single imm8 shuffle mask.
    3030             :         opcode = kX64S32x4Swizzle;
    3031             :         no_same_as_first = true;
    3032             :         src0_needs_reg = false;
    3033         280 :         imms[imm_count++] = shuffle_mask;
    3034             :       }
    3035             :     } else {
    3036             :       // 2 operand shuffle
    3037             :       // A blend is more efficient than a general 32x4 shuffle; try it first.
    3038         488 :       if (TryMatchBlend(shuffle)) {
    3039             :         opcode = kX64S16x8Blend;
    3040             :         uint8_t blend_mask = PackBlend4(shuffle32x4);
    3041          24 :         imms[imm_count++] = blend_mask;
    3042             :       } else {
    3043             :         opcode = kX64S32x4Shuffle;
    3044             :         no_same_as_first = true;
    3045             :         src0_needs_reg = false;
    3046         464 :         imms[imm_count++] = shuffle_mask;
    3047             :         int8_t blend_mask = PackBlend4(shuffle32x4);
    3048         464 :         imms[imm_count++] = blend_mask;
    3049             :       }
    3050             :     }
    3051        2240 :   } else if (TryMatch16x8Shuffle(shuffle, shuffle16x8)) {
    3052             :     uint8_t blend_mask;
    3053         928 :     if (TryMatchBlend(shuffle)) {
    3054             :       opcode = kX64S16x8Blend;
    3055             :       blend_mask = PackBlend8(shuffle16x8);
    3056          32 :       imms[imm_count++] = blend_mask;
    3057         896 :     } else if (TryMatchDup<8>(shuffle, &index)) {
    3058             :       opcode = kX64S16x8Dup;
    3059             :       src0_needs_reg = false;
    3060         180 :       imms[imm_count++] = index;
    3061         716 :     } else if (TryMatch16x8HalfShuffle(shuffle16x8, &blend_mask)) {
    3062         404 :       opcode = is_swizzle ? kX64S16x8HalfShuffle1 : kX64S16x8HalfShuffle2;
    3063             :       // Half-shuffles don't need DefineSameAsFirst or UseRegister(src0).
    3064             :       no_same_as_first = true;
    3065             :       src0_needs_reg = false;
    3066             :       uint8_t mask_lo = PackShuffle4(shuffle16x8);
    3067             :       uint8_t mask_hi = PackShuffle4(shuffle16x8 + 4);
    3068         404 :       imms[imm_count++] = mask_lo;
    3069         404 :       imms[imm_count++] = mask_hi;
    3070         404 :       if (!is_swizzle) imms[imm_count++] = blend_mask;
    3071             :     }
    3072        1312 :   } else if (TryMatchDup<16>(shuffle, &index)) {
    3073             :     opcode = kX64S8x16Dup;
    3074             :     no_same_as_first = false;
    3075             :     src0_needs_reg = true;
    3076         164 :     imms[imm_count++] = index;
    3077             :   }
    3078        4768 :   if (opcode == kX64S8x16Shuffle) {
    3079             :     // Use same-as-first for general swizzle, but not shuffle.
    3080        1460 :     no_same_as_first = !is_swizzle;
    3081             :     src0_needs_reg = !no_same_as_first;
    3082        1460 :     imms[imm_count++] = Pack4Lanes(shuffle);
    3083        1460 :     imms[imm_count++] = Pack4Lanes(shuffle + 4);
    3084        1460 :     imms[imm_count++] = Pack4Lanes(shuffle + 8);
    3085        1460 :     imms[imm_count++] = Pack4Lanes(shuffle + 12);
    3086        1460 :     temps[temp_count++] = g.TempRegister();
    3087             :   }
    3088             : 
    3089             :   // Use DefineAsRegister(node) and Use(src0) if we can without forcing an extra
    3090             :   // move instruction in the CodeGenerator.
    3091             :   Node* input0 = node->InputAt(0);
    3092             :   InstructionOperand dst =
    3093        4768 :       no_same_as_first ? g.DefineAsRegister(node) : g.DefineSameAsFirst(node);
    3094             :   InstructionOperand src0 =
    3095        4768 :       src0_needs_reg ? g.UseRegister(input0) : g.Use(input0);
    3096             : 
    3097             :   int input_count = 0;
    3098      100128 :   InstructionOperand inputs[2 + kMaxImms + kMaxTemps];
    3099        4768 :   inputs[input_count++] = src0;
    3100        4768 :   if (!is_swizzle) {
    3101             :     Node* input1 = node->InputAt(1);
    3102             :     inputs[input_count++] =
    3103        2836 :         src1_needs_reg ? g.UseRegister(input1) : g.Use(input1);
    3104             :   }
    3105       22144 :   for (int i = 0; i < imm_count; ++i) {
    3106        8688 :     inputs[input_count++] = g.UseImmediate(imms[i]);
    3107             :   }
    3108        4768 :   Emit(opcode, 1, &dst, input_count, inputs, temp_count, temps);
    3109             : }
    3110             : 
    3111             : // static
    3112             : MachineOperatorBuilder::Flags
    3113     5264263 : InstructionSelector::SupportedMachineOperatorFlags() {
    3114             :   MachineOperatorBuilder::Flags flags =
    3115             :       MachineOperatorBuilder::kWord32ShiftIsSafe |
    3116             :       MachineOperatorBuilder::kWord32Ctz | MachineOperatorBuilder::kWord64Ctz;
    3117     5264263 :   if (CpuFeatures::IsSupported(POPCNT)) {
    3118             :     flags |= MachineOperatorBuilder::kWord32Popcnt |
    3119             :              MachineOperatorBuilder::kWord64Popcnt;
    3120             :   }
    3121     5264263 :   if (CpuFeatures::IsSupported(SSE4_1)) {
    3122             :     flags |= MachineOperatorBuilder::kFloat32RoundDown |
    3123             :              MachineOperatorBuilder::kFloat64RoundDown |
    3124             :              MachineOperatorBuilder::kFloat32RoundUp |
    3125             :              MachineOperatorBuilder::kFloat64RoundUp |
    3126             :              MachineOperatorBuilder::kFloat32RoundTruncate |
    3127             :              MachineOperatorBuilder::kFloat64RoundTruncate |
    3128             :              MachineOperatorBuilder::kFloat32RoundTiesEven |
    3129             :              MachineOperatorBuilder::kFloat64RoundTiesEven;
    3130             :   }
    3131     5264263 :   return flags;
    3132             : }
    3133             : 
    3134             : // static
    3135             : MachineOperatorBuilder::AlignmentRequirements
    3136     5263696 : InstructionSelector::AlignmentRequirements() {
    3137             :   return MachineOperatorBuilder::AlignmentRequirements::
    3138     5263696 :       FullUnalignedAccessSupport();
    3139             : }
    3140             : 
    3141             : }  // namespace compiler
    3142             : }  // namespace internal
    3143      121996 : }  // namespace v8

Generated by: LCOV version 1.10