LCOV - code coverage report
Current view: top level - src/compiler - move-optimizer.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 184 184 100.0 %
Date: 2017-04-26 Functions: 15 15 100.0 %

          Line data    Source code
       1             : // Copyright 2014 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/compiler/move-optimizer.h"
       6             : 
       7             : namespace v8 {
       8             : namespace internal {
       9             : namespace compiler {
      10             : 
      11             : namespace {
      12             : 
      13             : struct MoveKey {
      14             :   InstructionOperand source;
      15             :   InstructionOperand destination;
      16             : };
      17             : 
      18             : struct MoveKeyCompare {
      19    47497717 :   bool operator()(const MoveKey& a, const MoveKey& b) const {
      20    94996586 :     if (a.source.EqualsCanonicalized(b.source)) {
      21    18479426 :       return a.destination.CompareCanonicalized(b.destination);
      22             :     }
      23             :     return a.source.CompareCanonicalized(b.source);
      24             :   }
      25             : };
      26             : 
      27             : typedef ZoneMap<MoveKey, unsigned, MoveKeyCompare> MoveMap;
      28             : 
      29             : class OperandSet {
      30             :  public:
      31             :   explicit OperandSet(ZoneVector<InstructionOperand>* buffer)
      32             :       : set_(buffer), fp_reps_(0) {
      33             :     buffer->clear();
      34             :   }
      35             : 
      36             :   void InsertOp(const InstructionOperand& op) {
      37    63853566 :     set_->push_back(op);
      38             : 
      39             :     if (!kSimpleFPAliasing && op.IsFPRegister())
      40             :       fp_reps_ |= RepBit(LocationOperand::cast(op).representation());
      41             :   }
      42             : 
      43    44579092 :   bool Contains(const InstructionOperand& op) const {
      44   167565954 :     for (const InstructionOperand& elem : *set_) {
      45    87029009 :       if (elem.EqualsCanonicalized(op)) return true;
      46             :     }
      47             :     return false;
      48             :   }
      49             : 
      50             :   bool ContainsOpOrAlias(const InstructionOperand& op) const {
      51    44579619 :     if (Contains(op)) return true;
      52             : 
      53             :     if (!kSimpleFPAliasing && op.IsFPRegister()) {
      54             :       // Platforms where FP registers have complex aliasing need extra checks.
      55             :       const LocationOperand& loc = LocationOperand::cast(op);
      56             :       MachineRepresentation rep = loc.representation();
      57             :       // If haven't encountered mixed rep FP registers, skip the extra checks.
      58             :       if (!HasMixedFPReps(fp_reps_ | RepBit(rep))) return false;
      59             : 
      60             :       // Check register against aliasing registers of other FP representations.
      61             :       MachineRepresentation other_rep1, other_rep2;
      62             :       switch (rep) {
      63             :         case MachineRepresentation::kFloat32:
      64             :           other_rep1 = MachineRepresentation::kFloat64;
      65             :           other_rep2 = MachineRepresentation::kSimd128;
      66             :           break;
      67             :         case MachineRepresentation::kFloat64:
      68             :           other_rep1 = MachineRepresentation::kFloat32;
      69             :           other_rep2 = MachineRepresentation::kSimd128;
      70             :           break;
      71             :         case MachineRepresentation::kSimd128:
      72             :           other_rep1 = MachineRepresentation::kFloat32;
      73             :           other_rep2 = MachineRepresentation::kFloat64;
      74             :           break;
      75             :         default:
      76             :           UNREACHABLE();
      77             :           break;
      78             :       }
      79             :       const RegisterConfiguration* config = RegisterConfiguration::Turbofan();
      80             :       int base = -1;
      81             :       int aliases =
      82             :           config->GetAliases(rep, loc.register_code(), other_rep1, &base);
      83             :       DCHECK(aliases > 0 || (aliases == 0 && base == -1));
      84             :       while (aliases--) {
      85             :         if (Contains(AllocatedOperand(LocationOperand::REGISTER, other_rep1,
      86             :                                       base + aliases))) {
      87             :           return true;
      88             :         }
      89             :       }
      90             :       aliases = config->GetAliases(rep, loc.register_code(), other_rep2, &base);
      91             :       DCHECK(aliases > 0 || (aliases == 0 && base == -1));
      92             :       while (aliases--) {
      93             :         if (Contains(AllocatedOperand(LocationOperand::REGISTER, other_rep2,
      94             :                                       base + aliases))) {
      95             :           return true;
      96             :         }
      97             :       }
      98             :     }
      99             :     return false;
     100             :   }
     101             : 
     102             :  private:
     103             :   static int RepBit(MachineRepresentation rep) {
     104             :     return 1 << static_cast<int>(rep);
     105             :   }
     106             : 
     107             :   static bool HasMixedFPReps(int reps) {
     108             :     return reps && !base::bits::IsPowerOfTwo32(reps);
     109             :   }
     110             : 
     111             :   ZoneVector<InstructionOperand>* set_;
     112             :   int fp_reps_;
     113             : };
     114             : 
     115    39122944 : int FindFirstNonEmptySlot(const Instruction* instr) {
     116             :   int i = Instruction::FIRST_GAP_POSITION;
     117    94607731 :   for (; i <= Instruction::LAST_GAP_POSITION; i++) {
     118    68486520 :     ParallelMove* moves = instr->parallel_moves()[i];
     119    68486520 :     if (moves == nullptr) continue;
     120    49087036 :     for (MoveOperands* move : *moves) {
     121    21402167 :       if (!move->IsRedundant()) return i;
     122             :       move->Eliminate();
     123             :     }
     124             :     moves->clear();  // Clear this redundant move.
     125             :   }
     126             :   return i;
     127             : }
     128             : 
     129             : }  // namespace
     130             : 
     131      915089 : MoveOptimizer::MoveOptimizer(Zone* local_zone, InstructionSequence* code)
     132             :     : local_zone_(local_zone),
     133             :       code_(code),
     134             :       local_vector_(local_zone),
     135             :       operand_buffer1(local_zone),
     136     1830178 :       operand_buffer2(local_zone) {}
     137             : 
     138     5975387 : void MoveOptimizer::Run() {
     139    79161117 :   for (Instruction* instruction : code()->instructions()) {
     140    39122974 :     CompressGaps(instruction);
     141             :   }
     142    13288368 :   for (InstructionBlock* block : code()->instruction_blocks()) {
     143    11458080 :     CompressBlock(block);
     144             :   }
     145    15754117 :   for (InstructionBlock* block : code()->instruction_blocks()) {
     146    11458113 :     if (block->PredecessorCount() <= 1) continue;
     147     2465801 :     if (!block->IsDeferred()) {
     148             :       bool has_only_deferred = true;
     149     2314978 :       for (RpoNumber& pred_id : block->predecessors()) {
     150     2314939 :         if (!code()->InstructionBlockAt(pred_id)->IsDeferred()) {
     151             :           has_only_deferred = false;
     152             :           break;
     153             :         }
     154             :       }
     155             :       // This would pull down common moves. If the moves occur in deferred
     156             :       // blocks, and the closest common successor is not deferred, we lose the
     157             :       // optimization of just spilling/filling in deferred blocks, when the
     158             :       // current block is not deferred.
     159     2190118 :       if (has_only_deferred) continue;
     160             :     }
     161     2465799 :     OptimizeMerge(block);
     162             :   }
     163    79159543 :   for (Instruction* gap : code()->instructions()) {
     164    39122224 :     FinalizeMoves(gap);
     165             :   }
     166      915095 : }
     167             : 
     168   117783207 : void MoveOptimizer::RemoveClobberedDestinations(Instruction* instruction) {
     169    40042108 :   if (instruction->IsCall()) return;
     170    36536255 :   ParallelMove* moves = instruction->parallel_moves()[0];
     171    36536255 :   if (moves == nullptr) return;
     172             : 
     173             :   DCHECK(instruction->parallel_moves()[1] == nullptr ||
     174             :          instruction->parallel_moves()[1]->empty());
     175             : 
     176    15689702 :   OperandSet outputs(&operand_buffer1);
     177    15689702 :   OperandSet inputs(&operand_buffer2);
     178             : 
     179             :   // Outputs and temps are treated together as potentially clobbering a
     180             :   // destination operand.
     181    44841650 :   for (size_t i = 0; i < instruction->OutputCount(); ++i) {
     182     6731117 :     outputs.InsertOp(*instruction->OutputAt(i));
     183             :   }
     184    16736552 :   for (size_t i = 0; i < instruction->TempCount(); ++i) {
     185      523421 :     outputs.InsertOp(*instruction->TempAt(i));
     186             :   }
     187             : 
     188             :   // Input operands block elisions.
     189    62524579 :   for (size_t i = 0; i < instruction->InputCount(); ++i) {
     190    23417355 :     inputs.InsertOp(*instruction->InputAt(i));
     191             :   }
     192             : 
     193             :   // Elide moves made redundant by the instruction.
     194    51446906 :   for (MoveOperands* move : *moves) {
     195    40912934 :     if (outputs.ContainsOpOrAlias(move->destination()) &&
     196             :         !inputs.ContainsOpOrAlias(move->destination())) {
     197             :       move->Eliminate();
     198             :     }
     199             :   }
     200             : 
     201             :   // The ret instruction makes any assignment before it unnecessary, except for
     202             :   // the one for its input.
     203    30536908 :   if (instruction->IsRet() || instruction->IsTailCall()) {
     204     2688888 :     for (MoveOperands* move : *moves) {
     205     1793619 :       if (!inputs.ContainsOpOrAlias(move->destination())) {
     206             :         move->Eliminate();
     207             :       }
     208             :     }
     209             :   }
     210             : }
     211             : 
     212    85668504 : void MoveOptimizer::MigrateMoves(Instruction* to, Instruction* from) {
     213    53048797 :   if (from->IsCall()) return;
     214             : 
     215    24878710 :   ParallelMove* from_moves = from->parallel_moves()[0];
     216    35911226 :   if (from_moves == nullptr || from_moves->empty()) return;
     217             : 
     218     8610294 :   OperandSet dst_cant_be(&operand_buffer1);
     219     8610294 :   OperandSet src_cant_be(&operand_buffer2);
     220             : 
     221             :   // If an operand is an input to the instruction, we cannot move assignments
     222             :   // where it appears on the LHS.
     223    45294486 :   for (size_t i = 0; i < from->InputCount(); ++i) {
     224    14036933 :     dst_cant_be.InsertOp(*from->InputAt(i));
     225             :   }
     226             :   // If an operand is output to the instruction, we cannot move assignments
     227             :   // where it appears on the RHS, because we would lose its value before the
     228             :   // instruction.
     229             :   // Same for temp operands.
     230             :   // The output can't appear on the LHS because we performed
     231             :   // RemoveClobberedDestinations for the "from" instruction.
     232    17297988 :   for (size_t i = 0; i < from->OutputCount(); ++i) {
     233     4343839 :     src_cant_be.InsertOp(*from->OutputAt(i));
     234             :   }
     235     9650950 :   for (size_t i = 0; i < from->TempCount(); ++i) {
     236      520320 :     src_cant_be.InsertOp(*from->TempAt(i));
     237             :   }
     238    30821414 :   for (MoveOperands* move : *from_moves) {
     239    13600793 :     if (move->IsRedundant()) continue;
     240             :     // Assume dest has a value "V". If we have a "dest = y" move, then we can't
     241             :     // move "z = dest", because z would become y rather than "V".
     242             :     // We assume CompressMoves has happened before this, which means we don't
     243             :     // have more than one assignment to dest.
     244    13453781 :     src_cant_be.InsertOp(move->destination());
     245             :   }
     246             : 
     247             :   ZoneSet<MoveKey, MoveKeyCompare> move_candidates(local_zone());
     248             :   // We start with all the moves that don't have conflicting source or
     249             :   // destination operands are eligible for being moved down.
     250    30821411 :   for (MoveOperands* move : *from_moves) {
     251    13600796 :     if (move->IsRedundant()) continue;
     252    26907549 :     if (!dst_cant_be.ContainsOpOrAlias(move->destination())) {
     253     7785293 :       MoveKey key = {move->source(), move->destination()};
     254             :       move_candidates.insert(key);
     255             :     }
     256             :   }
     257     8610304 :   if (move_candidates.empty()) return;
     258             : 
     259             :   // Stabilize the candidate set.
     260             :   bool changed = false;
     261     4448384 :   do {
     262             :     changed = false;
     263    18148207 :     for (auto iter = move_candidates.begin(); iter != move_candidates.end();) {
     264             :       auto current = iter;
     265             :       ++iter;
     266     9251415 :       InstructionOperand src = current->source;
     267     9251465 :       if (src_cant_be.ContainsOpOrAlias(src)) {
     268      559247 :         src_cant_be.InsertOp(current->destination);
     269             :         move_candidates.erase(current);
     270             :         changed = true;
     271             :       }
     272             :     }
     273             :   } while (changed);
     274             : 
     275             :   ParallelMove to_move(local_zone());
     276    16616020 :   for (MoveOperands* move : *from_moves) {
     277     8779345 :     if (move->IsRedundant()) continue;
     278     8686561 :     MoveKey key = {move->source(), move->destination()};
     279     8686558 :     if (move_candidates.find(key) != move_candidates.end()) {
     280     7226026 :       to_move.AddMove(move->source(), move->destination(), code_zone());
     281             :       move->Eliminate();
     282             :     }
     283             :   }
     284     3941515 :   if (to_move.empty()) return;
     285             : 
     286             :   ParallelMove* dest =
     287     3720469 :       to->GetOrCreateParallelMove(Instruction::GapPosition::START, code_zone());
     288             : 
     289     3720485 :   CompressMoves(&to_move, dest);
     290             :   DCHECK(dest->empty());
     291    18263691 :   for (MoveOperands* m : to_move) {
     292    10822804 :     dest->push_back(m);
     293             :   }
     294             : }
     295             : 
     296    13545468 : void MoveOptimizer::CompressMoves(ParallelMove* left, MoveOpVector* right) {
     297    27090909 :   if (right == nullptr) return;
     298             : 
     299     7989853 :   MoveOpVector& eliminated = local_vector();
     300             :   DCHECK(eliminated.empty());
     301             : 
     302     7989853 :   if (!left->empty()) {
     303             :     // Modify the right moves in place and collect moves that will be killed by
     304             :     // merging the two gaps.
     305    31678220 :     for (MoveOperands* move : *right) {
     306    15698522 :       if (move->IsRedundant()) continue;
     307    10283220 :       left->PrepareInsertAfter(move, &eliminated);
     308             :     }
     309             :     // Eliminate dead moves.
     310    16026044 :     for (MoveOperands* to_eliminate : eliminated) {
     311             :       to_eliminate->Eliminate();
     312             :     }
     313             :     eliminated.clear();
     314             :   }
     315             :   // Add all possibly modified moves from right side.
     316    31678194 :   for (MoveOperands* move : *right) {
     317    15698521 :     if (move->IsRedundant()) continue;
     318    10036033 :     left->push_back(move);
     319             :   }
     320             :   // Nuke right.
     321             :   right->clear();
     322             :   DCHECK(eliminated.empty());
     323             : }
     324             : 
     325    39122959 : void MoveOptimizer::CompressGaps(Instruction* instruction) {
     326    39122959 :   int i = FindFirstNonEmptySlot(instruction);
     327             :   bool has_moves = i <= Instruction::LAST_GAP_POSITION;
     328             :   USE(has_moves);
     329             : 
     330    39123021 :   if (i == Instruction::LAST_GAP_POSITION) {
     331             :     std::swap(instruction->parallel_moves()[Instruction::FIRST_GAP_POSITION],
     332             :               instruction->parallel_moves()[Instruction::LAST_GAP_POSITION]);
     333    35880475 :   } else if (i == Instruction::FIRST_GAP_POSITION) {
     334             :     CompressMoves(
     335             :         instruction->parallel_moves()[Instruction::FIRST_GAP_POSITION],
     336     9759466 :         instruction->parallel_moves()[Instruction::LAST_GAP_POSITION]);
     337             :   }
     338             :   // We either have no moves, or, after swapping or compressing, we have
     339             :   // all the moves in the first gap position, and none in the second/end gap
     340             :   // position.
     341             :   ParallelMove* first =
     342             :       instruction->parallel_moves()[Instruction::FIRST_GAP_POSITION];
     343             :   ParallelMove* last =
     344             :       instruction->parallel_moves()[Instruction::LAST_GAP_POSITION];
     345             :   USE(first);
     346             :   USE(last);
     347             : 
     348             :   DCHECK(!has_moves ||
     349             :          (first != nullptr && (last == nullptr || last->empty())));
     350    39122967 : }
     351             : 
     352    40042600 : void MoveOptimizer::CompressBlock(InstructionBlock* block) {
     353             :   int first_instr_index = block->first_instruction_index();
     354             :   int last_instr_index = block->last_instruction_index();
     355             : 
     356             :   // Start by removing gap assignments where the output of the subsequent
     357             :   // instruction appears on LHS, as long as they are not needed by its input.
     358    11658009 :   Instruction* prev_instr = code()->instructions()[first_instr_index];
     359    11658009 :   RemoveClobberedDestinations(prev_instr);
     360             : 
     361    40042580 :   for (int index = first_instr_index + 1; index <= last_instr_index; ++index) {
     362    28384661 :     Instruction* instr = code()->instructions()[index];
     363             :     // Migrate to the gap of prev_instr eligible moves from instr.
     364    28384661 :     MigrateMoves(instr, prev_instr);
     365             :     // Remove gap assignments clobbered by instr's output.
     366    28384517 :     RemoveClobberedDestinations(instr);
     367             :     prev_instr = instr;
     368             :   }
     369    11657971 : }
     370             : 
     371             : 
     372     3639007 : const Instruction* MoveOptimizer::LastInstruction(
     373     7278014 :     const InstructionBlock* block) const {
     374     3639029 :   return code()->instructions()[block->last_instruction_index()];
     375             : }
     376             : 
     377             : 
     378    14423429 : void MoveOptimizer::OptimizeMerge(InstructionBlock* block) {
     379             :   DCHECK(block->PredecessorCount() > 1);
     380             :   // Ensure that the last instruction in all incoming blocks don't contain
     381             :   // things that would prevent moving gap moves across them.
     382    10577243 :   for (RpoNumber& pred_index : block->predecessors()) {
     383    11586008 :     const InstructionBlock* pred = code()->InstructionBlockAt(pred_index);
     384             : 
     385             :     // If the predecessor has more than one successor, we shouldn't attempt to
     386             :     // move down to this block (one of the successors) any of the gap moves,
     387             :     // because their effect may be necessary to the other successors.
     388     5792914 :     if (pred->SuccessorCount() > 1) return;
     389             : 
     390     5793092 :     const Instruction* last_instr =
     391     5793092 :         code()->instructions()[pred->last_instruction_index()];
     392     5793092 :     if (last_instr->IsCall()) return;
     393     5793096 :     if (last_instr->TempCount() != 0) return;
     394     5792957 :     if (last_instr->OutputCount() != 0) return;
     395    17320099 :     for (size_t i = 0; i < last_instr->InputCount(); ++i) {
     396     5910450 :       const InstructionOperand* op = last_instr->InputAt(i);
     397     5910450 :       if (!op->IsConstant() && !op->IsImmediate()) return;
     398             :     }
     399             :   }
     400             :   // TODO(dcarney): pass a ZoneStats down for this?
     401             :   MoveMap move_map(local_zone());
     402             :   size_t correct_counts = 0;
     403             :   // Accumulate set of shared moves.
     404     5919948 :   for (RpoNumber& pred_index : block->predecessors()) {
     405     3177547 :     const InstructionBlock* pred = code()->InstructionBlockAt(pred_index);
     406     3177540 :     const Instruction* instr = LastInstruction(pred);
     407     4604462 :     if (instr->parallel_moves()[0] == nullptr ||
     408             :         instr->parallel_moves()[0]->empty()) {
     409             :       return;
     410             :     }
     411     5250631 :     for (const MoveOperands* move : *instr->parallel_moves()[0]) {
     412     2685638 :       if (move->IsRedundant()) continue;
     413     2339637 :       InstructionOperand src = move->source();
     414     2339637 :       InstructionOperand dst = move->destination();
     415             :       MoveKey key = {src, dst};
     416     2339638 :       auto res = move_map.insert(std::make_pair(key, 1));
     417     2339638 :       if (!res.second) {
     418      651792 :         res.first->second++;
     419     1303584 :         if (res.first->second == block->PredecessorCount()) {
     420      280302 :           correct_counts++;
     421             :         }
     422             :       }
     423             :     }
     424             :   }
     425      423675 :   if (move_map.empty() || correct_counts == 0) return;
     426             : 
     427             :   // Find insertion point.
     428      207155 :   Instruction* instr = code()->instructions()[block->first_instruction_index()];
     429             : 
     430      207155 :   if (correct_counts != move_map.size()) {
     431             :     // Moves that are unique to each predecessor won't be pushed to the common
     432             :     // successor.
     433       83548 :     OperandSet conflicting_srcs(&operand_buffer1);
     434      556359 :     for (auto iter = move_map.begin(), end = move_map.end(); iter != end;) {
     435             :       auto current = iter;
     436             :       ++iter;
     437      778524 :       if (current->second != block->PredecessorCount()) {
     438      259473 :         InstructionOperand dest = current->first.destination;
     439             :         // Not all the moves in all the gaps are the same. Maybe some are. If
     440             :         // there are such moves, we could move them, but the destination of the
     441             :         // moves staying behind can't appear as a source of a common move,
     442             :         // because the move staying behind will clobber this destination.
     443             :         conflicting_srcs.InsertOp(dest);
     444             :         move_map.erase(current);
     445             :       }
     446             :     }
     447             : 
     448             :     bool changed = false;
     449       91456 :     do {
     450             :       // If a common move can't be pushed to the common successor, then its
     451             :       // destination also can't appear as source to any move being pushed.
     452             :       changed = false;
     453      314910 :       for (auto iter = move_map.begin(), end = move_map.end(); iter != end;) {
     454             :         auto current = iter;
     455             :         ++iter;
     456             :         DCHECK_EQ(block->PredecessorCount(), current->second);
     457      263996 :         if (conflicting_srcs.ContainsOpOrAlias(current->first.source)) {
     458        8080 :           conflicting_srcs.InsertOp(current->first.destination);
     459             :           move_map.erase(current);
     460             :           changed = true;
     461             :         }
     462             :       }
     463             :     } while (changed);
     464             :   }
     465             : 
     466      207155 :   if (move_map.empty()) return;
     467             : 
     468             :   DCHECK_NOT_NULL(instr);
     469             :   bool gap_initialized = true;
     470      292264 :   if (instr->parallel_moves()[0] != nullptr &&
     471             :       !instr->parallel_moves()[0]->empty()) {
     472             :     // Will compress after insertion.
     473             :     gap_initialized = false;
     474             :     std::swap(instr->parallel_moves()[0], instr->parallel_moves()[1]);
     475             :   }
     476             :   ParallelMove* moves = instr->GetOrCreateParallelMove(
     477      199951 :       static_cast<Instruction::GapPosition>(0), code_zone());
     478             :   // Delete relevant entries in predecessors and move everything to block.
     479             :   bool first_iteration = true;
     480      861390 :   for (RpoNumber& pred_index : block->predecessors()) {
     481      461484 :     const InstructionBlock* pred = code()->InstructionBlockAt(pred_index);
     482     2322392 :     for (MoveOperands* move : *LastInstruction(pred)->parallel_moves()[0]) {
     483     1007790 :       if (move->IsRedundant()) continue;
     484      868081 :       MoveKey key = {move->source(), move->destination()};
     485             :       auto it = move_map.find(key);
     486      868075 :       if (it != move_map.end()) {
     487      621075 :         if (first_iteration) {
     488      272217 :           moves->AddMove(move->source(), move->destination());
     489             :         }
     490             :         move->Eliminate();
     491             :       }
     492             :     }
     493             :     first_iteration = false;
     494             :   }
     495             :   // Compress.
     496      199954 :   if (!gap_initialized) {
     497       65592 :     CompressMoves(instr->parallel_moves()[0], instr->parallel_moves()[1]);
     498             :   }
     499      199954 :   CompressBlock(block);
     500             : }
     501             : 
     502             : 
     503             : namespace {
     504             : 
     505    13626811 : bool IsSlot(const InstructionOperand& op) {
     506    21268584 :   return op.IsStackSlot() || op.IsFPStackSlot();
     507             : }
     508             : 
     509             : 
     510    17442590 : bool LoadCompare(const MoveOperands* a, const MoveOperands* b) {
     511    34885216 :   if (!a->source().EqualsCanonicalized(b->source())) {
     512    16358930 :     return a->source().CompareCanonicalized(b->source());
     513             :   }
     514     1083696 :   if (IsSlot(a->destination()) && !IsSlot(b->destination())) return false;
     515      871259 :   if (!IsSlot(a->destination()) && IsSlot(b->destination())) return true;
     516     1740366 :   return a->destination().CompareCanonicalized(b->destination());
     517             : }
     518             : 
     519             : }  // namespace
     520             : 
     521             : 
     522             : // Split multiple loads of the same constant or stack slot off into the second
     523             : // slot and keep remaining moves in the first slot.
     524    39122247 : void MoveOptimizer::FinalizeMoves(Instruction* instr) {
     525             :   MoveOpVector& loads = local_vector();
     526             :   DCHECK(loads.empty());
     527             : 
     528    39122247 :   ParallelMove* parallel_moves = instr->parallel_moves()[0];
     529    39122247 :   if (parallel_moves == nullptr) return;
     530             :   // Find all the loads.
     531    69911307 :   for (MoveOperands* move : *parallel_moves) {
     532    32487556 :     if (move->IsRedundant()) continue;
     533    23679366 :     if (move->source().IsConstant() || IsSlot(move->source())) {
     534    19058735 :       loads.push_back(move);
     535             :     }
     536             :   }
     537    18711878 :   if (loads.empty()) return;
     538             :   // Group the loads by source, moving the preferred destination to the
     539             :   // beginning of the group.
     540    10415116 :   std::sort(loads.begin(), loads.end(), LoadCompare);
     541             :   MoveOperands* group_begin = nullptr;
     542    39888895 :   for (MoveOperands* load : loads) {
     543             :     // New group.
     544    27702475 :     if (group_begin == nullptr ||
     545     8643769 :         !load->source().EqualsCanonicalized(group_begin->source())) {
     546             :       group_begin = load;
     547             :       continue;
     548             :     }
     549             :     // Nothing to be gained from splitting here.
     550      858754 :     if (IsSlot(group_begin->destination())) continue;
     551             :     // Insert new move into slot 1.
     552             :     ParallelMove* slot_1 = instr->GetOrCreateParallelMove(
     553      855810 :         static_cast<Instruction::GapPosition>(1), code_zone());
     554      855810 :     slot_1->AddMove(group_begin->destination(), load->destination());
     555             :     load->Eliminate();
     556             :   }
     557             :   loads.clear();
     558             : }
     559             : 
     560             : }  // namespace compiler
     561             : }  // namespace internal
     562             : }  // namespace v8

Generated by: LCOV version 1.10