LCOV - code coverage report
Current view: top level - src/compiler/backend - register-allocator.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 1210 1685 71.8 %
Date: 2019-03-21 Functions: 113 190 59.5 %

          Line data    Source code
       1             : // Copyright 2014 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/compiler/backend/register-allocator.h"
       6             : 
       7             : #include <iomanip>
       8             : 
       9             : #include "src/assembler-inl.h"
      10             : #include "src/base/adapters.h"
      11             : #include "src/base/small-vector.h"
      12             : #include "src/compiler/linkage.h"
      13             : #include "src/string-stream.h"
      14             : #include "src/vector.h"
      15             : 
      16             : namespace v8 {
      17             : namespace internal {
      18             : namespace compiler {
      19             : 
      20             : #define TRACE(...)                             \
      21             :   do {                                         \
      22             :     if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
      23             :   } while (false)
      24             : 
      25             : namespace {
      26             : 
      27             : static constexpr int kFloat32Bit =
      28             :     RepresentationBit(MachineRepresentation::kFloat32);
      29             : static constexpr int kSimd128Bit =
      30             :     RepresentationBit(MachineRepresentation::kSimd128);
      31             : 
      32             : int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) {
      33             :   return kind == FP_REGISTERS ? cfg->num_double_registers()
      34     2723002 :                               : cfg->num_general_registers();
      35             : }
      36             : 
      37             : int GetAllocatableRegisterCount(const RegisterConfiguration* cfg,
      38             :                                 RegisterKind kind) {
      39             :   return kind == FP_REGISTERS ? cfg->num_allocatable_double_registers()
      40     2723002 :                               : cfg->num_allocatable_general_registers();
      41             : }
      42             : 
      43             : const int* GetAllocatableRegisterCodes(const RegisterConfiguration* cfg,
      44             :                                        RegisterKind kind) {
      45             :   return kind == FP_REGISTERS ? cfg->allocatable_double_codes()
      46     2723002 :                               : cfg->allocatable_general_codes();
      47             : }
      48             : 
      49             : const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence,
      50             :                                           const InstructionBlock* block) {
      51             :   RpoNumber index = block->loop_header();
      52     4125673 :   if (!index.IsValid()) return nullptr;
      53      425709 :   return sequence->InstructionBlockAt(index);
      54             : }
      55             : 
      56             : const InstructionBlock* GetInstructionBlock(const InstructionSequence* code,
      57             :                                             LifetimePosition pos) {
      58    13765359 :   return code->GetInstructionBlock(pos.ToInstructionIndex());
      59             : }
      60             : 
      61             : Instruction* GetLastInstruction(InstructionSequence* code,
      62             :                                 const InstructionBlock* block) {
      63             :   return code->InstructionAt(block->last_instruction_index());
      64             : }
      65             : 
      66             : // TODO(dcarney): fix frame to allow frame accesses to half size location.
      67             : int GetByteWidth(MachineRepresentation rep) {
      68     5919292 :   switch (rep) {
      69             :     case MachineRepresentation::kBit:
      70             :     case MachineRepresentation::kWord8:
      71             :     case MachineRepresentation::kWord16:
      72             :     case MachineRepresentation::kWord32:
      73             :     case MachineRepresentation::kFloat32:
      74             :       return kSystemPointerSize;
      75             :     case MachineRepresentation::kTaggedSigned:
      76             :     case MachineRepresentation::kTaggedPointer:
      77             :     case MachineRepresentation::kTagged:
      78             :     case MachineRepresentation::kCompressedSigned:
      79             :     case MachineRepresentation::kCompressedPointer:
      80             :     case MachineRepresentation::kCompressed:
      81             :       // TODO(ishell): kTaggedSize once half size locations are supported.
      82             :       return kSystemPointerSize;
      83             :     case MachineRepresentation::kWord64:
      84             :     case MachineRepresentation::kFloat64:
      85             :       return kDoubleSize;
      86             :     case MachineRepresentation::kSimd128:
      87             :       return kSimd128Size;
      88             :     case MachineRepresentation::kNone:
      89             :       break;
      90             :   }
      91           0 :   UNREACHABLE();
      92             : }
      93             : 
      94             : }  // namespace
      95             : 
      96             : class LiveRangeBound {
      97             :  public:
      98             :   explicit LiveRangeBound(LiveRange* range, bool skip)
      99    44082798 :       : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
     100             :     DCHECK(!range->IsEmpty());
     101             :   }
     102             : 
     103             :   bool CanCover(LifetimePosition position) {
     104   261298149 :     return start_ <= position && position < end_;
     105             :   }
     106             : 
     107             :   LiveRange* const range_;
     108             :   const LifetimePosition start_;
     109             :   const LifetimePosition end_;
     110             :   const bool skip_;
     111             : 
     112             :  private:
     113             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
     114             : };
     115             : 
     116             : struct FindResult {
     117             :   LiveRange* cur_cover_;
     118             :   LiveRange* pred_cover_;
     119             : };
     120             : 
     121             : class LiveRangeBoundArray {
     122             :  public:
     123    91463880 :   LiveRangeBoundArray() : length_(0), start_(nullptr) {}
     124             : 
     125             :   bool ShouldInitialize() { return start_ == nullptr; }
     126             : 
     127     6965317 :   void Initialize(Zone* zone, TopLevelLiveRange* range) {
     128     6965317 :     size_t max_child_count = range->GetMaxChildCount();
     129             : 
     130     6965347 :     start_ = zone->NewArray<LiveRangeBound>(max_child_count);
     131     6965347 :     length_ = 0;
     132             :     LiveRangeBound* curr = start_;
     133             :     // Normally, spilled ranges do not need connecting moves, because the spill
     134             :     // location has been assigned at definition. For ranges spilled in deferred
     135             :     // blocks, that is not the case, so we need to connect the spilled children.
     136    95130935 :     for (LiveRange *i = range; i != nullptr; i = i->next(), ++curr, ++length_) {
     137    44082794 :       new (curr) LiveRangeBound(i, i->spilled());
     138             :     }
     139     6965347 :   }
     140             : 
     141             :   LiveRangeBound* Find(const LifetimePosition position) const {
     142             :     size_t left_index = 0;
     143   179462621 :     size_t right_index = length_;
     144             :     while (true) {
     145   680081314 :       size_t current_index = left_index + (right_index - left_index) / 2;
     146             :       DCHECK(right_index > current_index);
     147   680081314 :       LiveRangeBound* bound = &start_[current_index];
     148   680081314 :       if (bound->start_ <= position) {
     149   454062023 :         if (position < bound->end_) return bound;
     150             :         DCHECK(left_index < current_index);
     151             :         left_index = current_index;
     152             :       } else {
     153             :         right_index = current_index;
     154             :       }
     155             :     }
     156             :   }
     157             : 
     158             :   LiveRangeBound* FindPred(const InstructionBlock* pred) {
     159             :     LifetimePosition pred_end =
     160             :         LifetimePosition::InstructionFromInstructionIndex(
     161             :             pred->last_instruction_index());
     162             :     return Find(pred_end);
     163             :   }
     164             : 
     165             :   LiveRangeBound* FindSucc(const InstructionBlock* succ) {
     166             :     LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
     167             :         succ->first_instruction_index());
     168             :     return Find(succ_start);
     169             :   }
     170             : 
     171   131184513 :   bool FindConnectableSubranges(const InstructionBlock* block,
     172             :                                 const InstructionBlock* pred,
     173             :                                 FindResult* result) const {
     174             :     LifetimePosition pred_end =
     175             :         LifetimePosition::InstructionFromInstructionIndex(
     176             :             pred->last_instruction_index());
     177             :     LiveRangeBound* bound = Find(pred_end);
     178   131184513 :     result->pred_cover_ = bound->range_;
     179             :     LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
     180             :         block->first_instruction_index());
     181             : 
     182   131184513 :     if (bound->CanCover(cur_start)) {
     183             :       // Both blocks are covered by the same range, so there is nothing to
     184             :       // connect.
     185             :       return false;
     186             :     }
     187             :     bound = Find(cur_start);
     188    46496785 :     if (bound->skip_) {
     189             :       return false;
     190             :     }
     191     9102446 :     result->cur_cover_ = bound->range_;
     192             :     DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
     193     9102446 :     return (result->cur_cover_ != result->pred_cover_);
     194             :   }
     195             : 
     196             :  private:
     197             :   size_t length_;
     198             :   LiveRangeBound* start_;
     199             : 
     200             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
     201             : };
     202             : 
     203             : class LiveRangeFinder {
     204             :  public:
     205     2516641 :   explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
     206             :       : data_(data),
     207             :         bounds_length_(static_cast<int>(data_->live_ranges().size())),
     208     2516641 :         bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
     209     7550839 :         zone_(zone) {
     210   185445061 :     for (int i = 0; i < bounds_length_; ++i) {
     211    91463752 :       new (&bounds_[i]) LiveRangeBoundArray();
     212             :     }
     213     2517557 :   }
     214             : 
     215    86611792 :   LiveRangeBoundArray* ArrayFor(int operand_index) {
     216             :     DCHECK(operand_index < bounds_length_);
     217   173223584 :     TopLevelLiveRange* range = data_->live_ranges()[operand_index];
     218             :     DCHECK(range != nullptr && !range->IsEmpty());
     219    86611792 :     LiveRangeBoundArray* array = &bounds_[operand_index];
     220    86611792 :     if (array->ShouldInitialize()) {
     221     6965348 :       array->Initialize(zone_, range);
     222             :     }
     223    86611793 :     return array;
     224             :   }
     225             : 
     226             :  private:
     227             :   const RegisterAllocationData* const data_;
     228             :   const int bounds_length_;
     229             :   LiveRangeBoundArray* const bounds_;
     230             :   Zone* const zone_;
     231             : 
     232             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
     233             : };
     234             : 
     235             : typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
     236             : 
     237             : struct DelayedInsertionMapCompare {
     238             :   bool operator()(const DelayedInsertionMapKey& a,
     239             :                   const DelayedInsertionMapKey& b) const {
     240         515 :     if (a.first == b.first) {
     241             :       return a.second.Compare(b.second);
     242             :     }
     243         515 :     return a.first < b.first;
     244             :   }
     245             : };
     246             : 
     247             : typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
     248             :                 DelayedInsertionMapCompare>
     249             :     DelayedInsertionMap;
     250             : 
     251   116633965 : UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
     252             :                          void* hint, UsePositionHintType hint_type)
     253   116633965 :     : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) {
     254             :   DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone);
     255             :   bool register_beneficial = true;
     256             :   UsePositionType type = UsePositionType::kRegisterOrSlot;
     257   230222157 :   if (operand_ != nullptr && operand_->IsUnallocated()) {
     258             :     const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
     259   113588658 :     if (unalloc->HasRegisterPolicy()) {
     260             :       type = UsePositionType::kRequiresRegister;
     261    68319811 :     } else if (unalloc->HasSlotPolicy()) {
     262             :       type = UsePositionType::kRequiresSlot;
     263             :       register_beneficial = false;
     264    53462952 :     } else if (unalloc->HasRegisterOrSlotOrConstantPolicy()) {
     265             :       type = UsePositionType::kRegisterOrSlotOrConstant;
     266             :       register_beneficial = false;
     267             :     } else {
     268    53463325 :       register_beneficial = !unalloc->HasRegisterOrSlotPolicy();
     269             :     }
     270             :   }
     271   233267930 :   flags_ = TypeField::encode(type) | HintTypeField::encode(hint_type) |
     272   116633965 :            RegisterBeneficialField::encode(register_beneficial) |
     273   116633965 :            AssignedRegisterField::encode(kUnassignedRegister);
     274             :   DCHECK(pos_.IsValid());
     275   116633965 : }
     276             : 
     277           0 : bool UsePosition::HasHint() const {
     278             :   int hint_register;
     279   116723972 :   return HintRegister(&hint_register);
     280             : }
     281             : 
     282   395774114 : bool UsePosition::HintRegister(int* register_code) const {
     283   395774114 :   if (hint_ == nullptr) return false;
     284   181152628 :   switch (HintTypeField::decode(flags_)) {
     285             :     case UsePositionHintType::kNone:
     286             :     case UsePositionHintType::kUnresolved:
     287             :       return false;
     288             :     case UsePositionHintType::kUsePos: {
     289             :       UsePosition* use_pos = reinterpret_cast<UsePosition*>(hint_);
     290    28543277 :       int assigned_register = AssignedRegisterField::decode(use_pos->flags_);
     291    28543277 :       if (assigned_register == kUnassignedRegister) return false;
     292    12220073 :       *register_code = assigned_register;
     293    12220073 :       return true;
     294             :     }
     295             :     case UsePositionHintType::kOperand: {
     296             :       InstructionOperand* operand =
     297             :           reinterpret_cast<InstructionOperand*>(hint_);
     298    50988569 :       *register_code = LocationOperand::cast(operand)->register_code();
     299    50988569 :       return true;
     300             :     }
     301             :     case UsePositionHintType::kPhi: {
     302             :       RegisterAllocationData::PhiMapValue* phi =
     303             :           reinterpret_cast<RegisterAllocationData::PhiMapValue*>(hint_);
     304             :       int assigned_register = phi->assigned_register();
     305     1444455 :       if (assigned_register == kUnassignedRegister) return false;
     306      241239 :       *register_code = assigned_register;
     307      241239 :       return true;
     308             :     }
     309             :   }
     310           0 :   UNREACHABLE();
     311             : }
     312             : 
     313    51508605 : UsePositionHintType UsePosition::HintTypeForOperand(
     314             :     const InstructionOperand& op) {
     315    51508605 :   switch (op.kind()) {
     316             :     case InstructionOperand::CONSTANT:
     317             :     case InstructionOperand::IMMEDIATE:
     318             :     case InstructionOperand::EXPLICIT:
     319             :       return UsePositionHintType::kNone;
     320             :     case InstructionOperand::UNALLOCATED:
     321    23402623 :       return UsePositionHintType::kUnresolved;
     322             :     case InstructionOperand::ALLOCATED:
     323    29474604 :       if (op.IsRegister() || op.IsFPRegister()) {
     324             :         return UsePositionHintType::kOperand;
     325             :       } else {
     326             :         DCHECK(op.IsStackSlot() || op.IsFPStackSlot());
     327     1224195 :         return UsePositionHintType::kNone;
     328             :       }
     329             :     case InstructionOperand::INVALID:
     330             :       break;
     331             :   }
     332           0 :   UNREACHABLE();
     333             : }
     334             : 
     335           0 : void UsePosition::SetHint(UsePosition* use_pos) {
     336             :   DCHECK_NOT_NULL(use_pos);
     337    15323069 :   hint_ = use_pos;
     338    30646138 :   flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
     339           0 : }
     340             : 
     341           0 : void UsePosition::ResolveHint(UsePosition* use_pos) {
     342             :   DCHECK_NOT_NULL(use_pos);
     343    16461340 :   if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return;
     344     8230675 :   hint_ = use_pos;
     345     8230675 :   flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
     346             : }
     347             : 
     348           0 : void UsePosition::set_type(UsePositionType type, bool register_beneficial) {
     349             :   DCHECK_IMPLIES(type == UsePositionType::kRequiresSlot, !register_beneficial);
     350             :   DCHECK_EQ(kUnassignedRegister, AssignedRegisterField::decode(flags_));
     351    20633552 :   flags_ = TypeField::encode(type) |
     352    20633552 :            RegisterBeneficialField::encode(register_beneficial) |
     353    20633552 :            HintTypeField::encode(HintTypeField::decode(flags_)) |
     354    20633552 :            AssignedRegisterField::encode(kUnassignedRegister);
     355           0 : }
     356             : 
     357           0 : UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
     358             :   DCHECK(Contains(pos) && pos != start());
     359    51789358 :   UseInterval* after = new (zone) UseInterval(pos, end_);
     360    51789800 :   after->next_ = next_;
     361    51789800 :   next_ = nullptr;
     362    51789800 :   end_ = pos;
     363           0 :   return after;
     364             : }
     365             : 
     366           0 : void LifetimePosition::Print() const { StdoutStream{} << *this << std::endl; }
     367             : 
     368           0 : std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) {
     369           0 :   os << '@' << pos.ToInstructionIndex();
     370           0 :   if (pos.IsGapPosition()) {
     371             :     os << 'g';
     372             :   } else {
     373             :     os << 'i';
     374             :   }
     375           0 :   if (pos.IsStart()) {
     376             :     os << 's';
     377             :   } else {
     378             :     os << 'e';
     379             :   }
     380           0 :   return os;
     381             : }
     382             : 
     383           0 : LiveRange::LiveRange(int relative_id, MachineRepresentation rep,
     384             :                      TopLevelLiveRange* top_level)
     385             :     : relative_id_(relative_id),
     386             :       bits_(0),
     387             :       last_interval_(nullptr),
     388             :       first_interval_(nullptr),
     389             :       first_pos_(nullptr),
     390             :       top_level_(top_level),
     391             :       next_(nullptr),
     392             :       current_interval_(nullptr),
     393             :       last_processed_use_(nullptr),
     394             :       current_hint_position_(nullptr),
     395   179936432 :       splitting_pointer_(nullptr) {
     396             :   DCHECK(AllocatedOperand::IsSupportedRepresentation(rep));
     397             :   bits_ = AssignedRegisterField::encode(kUnassignedRegister) |
     398   179936432 :           RepresentationField::encode(rep) |
     399   179936432 :           ControlFlowRegisterHint::encode(kUnassignedRegister);
     400           0 : }
     401             : 
     402           0 : void LiveRange::VerifyPositions() const {
     403             :   // Walk the positions, verifying that each is in an interval.
     404           0 :   UseInterval* interval = first_interval_;
     405           0 :   for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
     406           0 :     CHECK(Start() <= pos->pos());
     407           0 :     CHECK(pos->pos() <= End());
     408           0 :     CHECK_NOT_NULL(interval);
     409           0 :     while (!interval->Contains(pos->pos()) && interval->end() != pos->pos()) {
     410             :       interval = interval->next();
     411           0 :       CHECK_NOT_NULL(interval);
     412             :     }
     413             :   }
     414           0 : }
     415             : 
     416           0 : void LiveRange::VerifyIntervals() const {
     417             :   DCHECK(first_interval()->start() == Start());
     418             :   LifetimePosition last_end = first_interval()->end();
     419           0 :   for (UseInterval* interval = first_interval()->next(); interval != nullptr;
     420             :        interval = interval->next()) {
     421             :     DCHECK(last_end <= interval->start());
     422             :     last_end = interval->end();
     423             :   }
     424             :   DCHECK(last_end == End());
     425           0 : }
     426             : 
     427           0 : void LiveRange::set_assigned_register(int reg) {
     428             :   DCHECK(!HasRegisterAssigned() && !spilled());
     429   192044573 :   bits_ = AssignedRegisterField::update(bits_, reg);
     430           0 : }
     431             : 
     432           0 : void LiveRange::UnsetAssignedRegister() {
     433             :   DCHECK(HasRegisterAssigned() && !spilled());
     434           0 :   bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
     435           0 : }
     436             : 
     437           0 : void LiveRange::AttachToNext() {
     438             :   DCHECK_NOT_NULL(next_);
     439             :   DCHECK_NE(TopLevel()->last_child_covers_, next_);
     440           0 :   last_interval_->set_next(next_->first_interval());
     441           0 :   next_->first_interval_ = nullptr;
     442           0 :   last_interval_ = next_->last_interval_;
     443           0 :   next_->last_interval_ = nullptr;
     444           0 :   if (first_pos() == nullptr) {
     445           0 :     first_pos_ = next_->first_pos();
     446             :   } else {
     447             :     UsePosition* ptr = first_pos_;
     448           0 :     while (ptr->next() != nullptr) {
     449             :       ptr = ptr->next();
     450             :     }
     451           0 :     ptr->set_next(next_->first_pos());
     452             :   }
     453           0 :   next_->first_pos_ = nullptr;
     454           0 :   LiveRange* old_next = next_;
     455           0 :   next_ = next_->next_;
     456           0 :   old_next->next_ = nullptr;
     457           0 : }
     458             : 
     459           0 : void LiveRange::Unspill() {
     460             :   DCHECK(spilled());
     461             :   set_spilled(false);
     462           0 :   bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
     463           0 : }
     464             : 
     465           0 : void LiveRange::Spill() {
     466             :   DCHECK(!spilled());
     467             :   DCHECK(!TopLevel()->HasNoSpillType());
     468             :   set_spilled(true);
     469    26150084 :   bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
     470           0 : }
     471             : 
     472           0 : RegisterKind LiveRange::kind() const {
     473   135436520 :   return IsFloatingPoint(representation()) ? FP_REGISTERS : GENERAL_REGISTERS;
     474             : }
     475             : 
     476           0 : UsePosition* LiveRange::FirstHintPosition(int* register_index) const {
     477    85896200 :   for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
     478   275179878 :     if (pos->HintRegister(register_index)) return pos;
     479             :   }
     480             :   return nullptr;
     481             : }
     482             : 
     483           0 : UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
     484    56887674 :   UsePosition* use_pos = last_processed_use_;
     485    56887674 :   if (use_pos == nullptr || use_pos->pos() > start) {
     486             :     use_pos = first_pos();
     487             :   }
     488  2377152876 :   while (use_pos != nullptr && use_pos->pos() < start) {
     489             :     use_pos = use_pos->next();
     490             :   }
     491    56887674 :   last_processed_use_ = use_pos;
     492           0 :   return use_pos;
     493             : }
     494             : 
     495    28296988 : UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
     496             :     LifetimePosition start) const {
     497             :   UsePosition* pos = NextUsePosition(start);
     498    79507228 :   while (pos != nullptr && !pos->RegisterIsBeneficial()) {
     499             :     pos = pos->next();
     500             :   }
     501    28296988 :   return pos;
     502             : }
     503             : 
     504           0 : LifetimePosition LiveRange::NextLifetimePositionRegisterIsBeneficial(
     505             :     const LifetimePosition& start) const {
     506    12145182 :   UsePosition* next_use = NextUsePositionRegisterIsBeneficial(start);
     507    12145187 :   if (next_use == nullptr) return End();
     508             :   return next_use->pos();
     509             : }
     510             : 
     511           0 : UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
     512             :     LifetimePosition start) const {
     513             :   UsePosition* pos = first_pos();
     514             :   UsePosition* prev = nullptr;
     515      316307 :   while (pos != nullptr && pos->pos() < start) {
     516      254866 :     if (pos->RegisterIsBeneficial()) prev = pos;
     517             :     pos = pos->next();
     518             :   }
     519           0 :   return prev;
     520             : }
     521             : 
     522    25545727 : UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const {
     523             :   UsePosition* pos = NextUsePosition(start);
     524    74362167 :   while (pos != nullptr && pos->type() != UsePositionType::kRequiresRegister) {
     525             :     pos = pos->next();
     526             :   }
     527    25545727 :   return pos;
     528             : }
     529             : 
     530     2401906 : UsePosition* LiveRange::NextSlotPosition(LifetimePosition start) const {
     531    15043106 :   for (UsePosition* pos = NextUsePosition(start); pos != nullptr;
     532             :        pos = pos->next()) {
     533     6321191 :     if (pos->type() != UsePositionType::kRequiresSlot) continue;
     534             :     return pos;
     535             :   }
     536             :   return nullptr;
     537             : }
     538             : 
     539    13197403 : bool LiveRange::CanBeSpilled(LifetimePosition pos) const {
     540             :   // We cannot spill a live range that has a use requiring a register
     541             :   // at the current or the immediate next position.
     542    13197403 :   UsePosition* use_pos = NextRegisterPosition(pos);
     543    13197408 :   if (use_pos == nullptr) return true;
     544             :   return use_pos->pos() > pos.NextStart().End();
     545             : }
     546             : 
     547    93714379 : bool LiveRange::IsTopLevel() const { return top_level_ == this; }
     548             : 
     549   156721923 : InstructionOperand LiveRange::GetAssignedOperand() const {
     550             :   DCHECK(!IsEmpty());
     551   156721923 :   if (HasRegisterAssigned()) {
     552             :     DCHECK(!spilled());
     553             :     return AllocatedOperand(LocationOperand::REGISTER, representation(),
     554    87984547 :                             assigned_register());
     555             :   }
     556             :   DCHECK(spilled());
     557             :   DCHECK(!HasRegisterAssigned());
     558    68737376 :   if (TopLevel()->HasSpillOperand()) {
     559             :     InstructionOperand* op = TopLevel()->GetSpillOperand();
     560             :     DCHECK(!op->IsUnallocated());
     561    42931150 :     return *op;
     562             :   }
     563    25806226 :   return TopLevel()->GetSpillRangeOperand();
     564             : }
     565             : 
     566           0 : UseInterval* LiveRange::FirstSearchIntervalForPosition(
     567             :     LifetimePosition position) const {
     568   994872898 :   if (current_interval_ == nullptr) return first_interval_;
     569   700882444 :   if (current_interval_->start() > position) {
     570     3133625 :     current_interval_ = nullptr;
     571     2573022 :     return first_interval_;
     572             :   }
     573             :   return current_interval_;
     574             : }
     575             : 
     576           0 : void LiveRange::AdvanceLastProcessedMarker(
     577             :     UseInterval* to_start_of, LifetimePosition but_not_past) const {
     578   570660209 :   if (to_start_of == nullptr) return;
     579   570668799 :   if (to_start_of->start() > but_not_past) return;
     580   286672347 :   LifetimePosition start = current_interval_ == nullptr
     581             :                                ? LifetimePosition::Invalid()
     582   286672347 :                                : current_interval_->start();
     583   286672347 :   if (to_start_of->start() > start) {
     584   113419561 :     current_interval_ = to_start_of;
     585             :   }
     586             : }
     587             : 
     588    47576593 : LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
     589             :   int new_id = TopLevel()->GetNextChildId();
     590             :   LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel());
     591    47576186 :   child->set_bundle(bundle_);
     592             :   // If we split, we do so because we're about to switch registers or move
     593             :   // to/from a slot, so there's no value in connecting hints.
     594    47576186 :   DetachAt(position, child, zone, DoNotConnectHints);
     595             : 
     596    47577789 :   child->top_level_ = TopLevel();
     597    47577789 :   child->next_ = next_;
     598    47577789 :   next_ = child;
     599    47577789 :   return child;
     600             : }
     601             : 
     602    80578286 : UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
     603             :                                  Zone* zone,
     604             :                                  HintConnectionOption connect_hints) {
     605             :   DCHECK(Start() < position);
     606             :   DCHECK(End() > position);
     607             :   DCHECK(result->IsEmpty());
     608             :   // Find the last interval that ends before the position. If the
     609             :   // position is contained in one of the intervals in the chain, we
     610             :   // split that interval and use the first part.
     611             :   UseInterval* current = FirstSearchIntervalForPosition(position);
     612             : 
     613             :   // If the split position coincides with the beginning of a use interval
     614             :   // we need to split use positons in a special way.
     615             :   bool split_at_start = false;
     616             : 
     617    80578286 :   if (current->start() == position) {
     618             :     // When splitting at start we need to locate the previous use interval.
     619        1307 :     current = first_interval_;
     620             :   }
     621             : 
     622             :   UseInterval* after = nullptr;
     623   101087890 :   while (current != nullptr) {
     624   101085910 :     if (current->Contains(position)) {
     625             :       after = current->SplitAt(position, zone);
     626    51789800 :       break;
     627             :     }
     628             :     UseInterval* next = current->next();
     629    49296552 :     if (next->start() >= position) {
     630             :       split_at_start = (next->start() == position);
     631             :       after = next;
     632             :       current->set_next(nullptr);
     633             :       break;
     634             :     }
     635             :     current = next;
     636             :   }
     637             :   DCHECK_NOT_NULL(after);
     638             : 
     639             :   // Partition original use intervals to the two live ranges.
     640             :   UseInterval* before = current;
     641             :   result->last_interval_ =
     642    80578728 :       (last_interval_ == before)
     643             :           ? after            // Only interval in the range after split.
     644    80578728 :           : last_interval_;  // Last interval of the original range.
     645    80578728 :   result->first_interval_ = after;
     646    80578728 :   last_interval_ = before;
     647             : 
     648             :   // Find the last use position before the split and the first use
     649             :   // position after it.
     650             :   UsePosition* use_after =
     651    95939335 :       splitting_pointer_ == nullptr || splitting_pointer_->pos() > position
     652             :           ? first_pos()
     653   147270727 :           : splitting_pointer_;
     654             :   UsePosition* use_before = nullptr;
     655    80578728 :   if (split_at_start) {
     656             :     // The split position coincides with the beginning of a use interval (the
     657             :     // end of a lifetime hole). Use at this position should be attributed to
     658             :     // the split child because split child owns use interval covering it.
     659     6429724 :     while (use_after != nullptr && use_after->pos() < position) {
     660             :       use_before = use_after;
     661             :       use_after = use_after->next();
     662             :     }
     663             :   } else {
     664   132468543 :     while (use_after != nullptr && use_after->pos() <= position) {
     665             :       use_before = use_after;
     666             :       use_after = use_after->next();
     667             :     }
     668             :   }
     669             : 
     670             :   // Partition original use positions to the two live ranges.
     671    80578728 :   if (use_before != nullptr) {
     672             :     use_before->set_next(nullptr);
     673             :   } else {
     674    49914286 :     first_pos_ = nullptr;
     675             :   }
     676    80578728 :   result->first_pos_ = use_after;
     677             : 
     678             :   // Discard cached iteration state. It might be pointing
     679             :   // to the use that no longer belongs to this live range.
     680    80578728 :   last_processed_use_ = nullptr;
     681    80578728 :   current_interval_ = nullptr;
     682             : 
     683    97959896 :   if (connect_hints == ConnectHints && use_before != nullptr &&
     684    17381168 :       use_after != nullptr) {
     685             :     use_after->SetHint(use_before);
     686             :   }
     687             : #ifdef DEBUG
     688             :   VerifyChildStructure();
     689             :   result->VerifyChildStructure();
     690             : #endif
     691    80578728 :   return use_before;
     692             : }
     693             : 
     694           0 : void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
     695             :   LiveRange* child = this;
     696    48541445 :   for (; child != nullptr; child = child->next()) {
     697    42780138 :     child->top_level_ = new_top_level;
     698             :   }
     699           0 : }
     700             : 
     701    94180829 : void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
     702             :                                      const InstructionOperand& spill_op) {
     703   323000607 :   for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
     704             :     DCHECK(Start() <= pos->pos() && pos->pos() <= End());
     705   114409889 :     if (!pos->HasOperand()) continue;
     706   113591559 :     switch (pos->type()) {
     707             :       case UsePositionType::kRequiresSlot:
     708             :         DCHECK(spill_op.IsStackSlot() || spill_op.IsFPStackSlot());
     709             :         InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
     710             :         break;
     711             :       case UsePositionType::kRequiresRegister:
     712             :         DCHECK(op.IsRegister() || op.IsFPRegister());
     713             :         V8_FALLTHROUGH;
     714             :       case UsePositionType::kRegisterOrSlot:
     715             :       case UsePositionType::kRegisterOrSlotOrConstant:
     716             :         InstructionOperand::ReplaceWith(pos->operand(), &op);
     717             :         break;
     718             :     }
     719             :   }
     720    94180829 : }
     721             : 
     722             : // This implements an ordering on live ranges so that they are ordered by their
     723             : // start positions.  This is needed for the correctness of the register
     724             : // allocation algorithm.  If two live ranges start at the same offset then there
     725             : // is a tie breaker based on where the value is first used.  This part of the
     726             : // ordering is merely a heuristic.
     727   390412007 : bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
     728             :   LifetimePosition start = Start();
     729             :   LifetimePosition other_start = other->Start();
     730   390412007 :   if (start == other_start) {
     731             :     // Prefer register that has a controlflow hint to make sure it gets
     732             :     // allocated first. This allows the control flow aware alloction to
     733             :     // just put ranges back into the queue without other ranges interfering.
     734    24154725 :     if (controlflow_hint() < other->controlflow_hint()) {
     735             :       return true;
     736             :     }
     737             :     // The other has a smaller hint.
     738    24154729 :     if (other->controlflow_hint() != kUnassignedRegister) {
     739             :       return false;
     740             :     }
     741             :     // No hint, use first use position.
     742             :     UsePosition* pos = first_pos();
     743             :     UsePosition* other_pos = other->first_pos();
     744             :     // To make the order total, handle the case where both positions are null.
     745    24154764 :     if (pos == other_pos) return TopLevel()->vreg() < other->TopLevel()->vreg();
     746    14570061 :     if (pos == nullptr) return false;
     747    14237586 :     if (other_pos == nullptr) return true;
     748             :     // To make the order total, handle the case where both positions are equal.
     749    13845054 :     if (pos->pos() == other_pos->pos())
     750     8099377 :       return TopLevel()->vreg() < other->TopLevel()->vreg();
     751             :     return pos->pos() < other_pos->pos();
     752             :   }
     753   366257282 :   return start < other_start;
     754             : }
     755             : 
     756           0 : void LiveRange::SetUseHints(int register_index) {
     757   139646699 :   for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
     758    96081382 :     if (!pos->HasOperand()) continue;
     759    95258565 :     switch (pos->type()) {
     760             :       case UsePositionType::kRequiresSlot:
     761             :         break;
     762             :       case UsePositionType::kRequiresRegister:
     763             :       case UsePositionType::kRegisterOrSlot:
     764             :       case UsePositionType::kRegisterOrSlotOrConstant:
     765             :         pos->set_assigned_register(register_index);
     766             :         break;
     767             :     }
     768             :   }
     769           0 : }
     770             : 
     771           0 : bool LiveRange::CanCover(LifetimePosition position) const {
     772   273206106 :   if (IsEmpty()) return false;
     773   273206603 :   return Start() <= position && position < End();
     774             : }
     775             : 
     776   272550165 : bool LiveRange::Covers(LifetimePosition position) const {
     777   272550165 :   if (!CanCover(position)) return false;
     778             :   UseInterval* start_search = FirstSearchIntervalForPosition(position);
     779   552353800 :   for (UseInterval* interval = start_search; interval != nullptr;
     780             :        interval = interval->next()) {
     781             :     DCHECK(interval->next() == nullptr ||
     782             :            interval->next()->start() >= interval->start());
     783             :     AdvanceLastProcessedMarker(interval, position);
     784   383852927 :     if (interval->Contains(position)) return true;
     785   265674253 :     if (interval->start() > position) return false;
     786             :   }
     787             :   return false;
     788             : }
     789             : 
     790           0 : LifetimePosition LiveRange::NextEndAfter(LifetimePosition position) const {
     791             :   UseInterval* start_search = FirstSearchIntervalForPosition(position);
     792   124652431 :   while (start_search->end() < position) {
     793             :     start_search = start_search->next();
     794             :   }
     795           0 :   return start_search->end();
     796             : }
     797             : 
     798           0 : LifetimePosition LiveRange::NextStartAfter(LifetimePosition position) const {
     799             :   UseInterval* start_search = FirstSearchIntervalForPosition(position);
     800   240146442 :   while (start_search->start() < position) {
     801             :     start_search = start_search->next();
     802             :   }
     803           0 :   return start_search->start();
     804             : }
     805             : 
     806   428497420 : LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
     807             :   UseInterval* b = other->first_interval();
     808   428497420 :   if (b == nullptr) return LifetimePosition::Invalid();
     809             :   LifetimePosition advance_last_processed_up_to = b->start();
     810             :   UseInterval* a = FirstSearchIntervalForPosition(b->start());
     811  1135001758 :   while (a != nullptr && b != nullptr) {
     812   732386408 :     if (a->start() > other->End()) break;
     813   677071039 :     if (b->start() > End()) break;
     814   671281406 :     LifetimePosition cur_intersection = a->Intersect(b);
     815   671246969 :     if (cur_intersection.IsValid()) {
     816    75142891 :       return cur_intersection;
     817             :     }
     818   596104078 :     if (a->start() < b->start()) {
     819             :       a = a->next();
     820   429659191 :       if (a == nullptr || a->start() > other->End()) break;
     821             :       AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
     822             :     } else {
     823             :       b = b->next();
     824             :     }
     825             :   }
     826             :   return LifetimePosition::Invalid();
     827             : }
     828             : 
     829           0 : void LiveRange::Print(const RegisterConfiguration* config,
     830             :                       bool with_children) const {
     831           0 :   StdoutStream os;
     832             :   PrintableLiveRange wrapper;
     833           0 :   wrapper.register_configuration_ = config;
     834           0 :   for (const LiveRange* i = this; i != nullptr; i = i->next()) {
     835           0 :     wrapper.range_ = i;
     836           0 :     os << wrapper << std::endl;
     837           0 :     if (!with_children) break;
     838             :   }
     839           0 : }
     840             : 
     841           0 : void LiveRange::Print(bool with_children) const {
     842           0 :   Print(RegisterConfiguration::Default(), with_children);
     843           0 : }
     844             : 
     845           0 : bool LiveRange::RegisterFromBundle(int* hint) const {
     846    50053250 :   if (bundle_ == nullptr || bundle_->reg() == kUnassignedRegister) return false;
     847     2866315 :   *hint = bundle_->reg();
     848           0 :   return true;
     849             : }
     850             : 
     851           0 : void LiveRange::UpdateBundleRegister(int reg) const {
     852    43565317 :   if (bundle_ == nullptr || bundle_->reg() != kUnassignedRegister) return;
     853             :   bundle_->set_reg(reg);
     854             : }
     855             : 
     856             : struct TopLevelLiveRange::SpillMoveInsertionList : ZoneObject {
     857             :   SpillMoveInsertionList(int gap_index, InstructionOperand* operand,
     858             :                          SpillMoveInsertionList* next)
     859    27473982 :       : gap_index(gap_index), operand(operand), next(next) {}
     860             :   const int gap_index;
     861             :   InstructionOperand* const operand;
     862             :   SpillMoveInsertionList* const next;
     863             : };
     864             : 
     865          71 : TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineRepresentation rep)
     866             :     : LiveRange(0, rep, this),
     867             :       vreg_(vreg),
     868             :       last_child_id_(0),
     869             :       splintered_from_(nullptr),
     870             :       spill_operand_(nullptr),
     871             :       spill_move_insertion_locations_(nullptr),
     872             :       spilled_in_deferred_blocks_(false),
     873             :       spill_start_index_(kMaxInt),
     874             :       last_pos_(nullptr),
     875             :       last_child_covers_(this),
     876             :       splinter_(nullptr),
     877   116740969 :       has_preassigned_slot_(false) {
     878             :   bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
     879          71 : }
     880             : 
     881             : #if DEBUG
     882             : int TopLevelLiveRange::debug_virt_reg() const {
     883             :   return IsSplinter() ? splintered_from()->vreg() : vreg();
     884             : }
     885             : #endif
     886             : 
     887           0 : void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index,
     888             :                                             InstructionOperand* operand) {
     889             :   DCHECK(HasNoSpillType());
     890             :   spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList(
     891    54947964 :       gap_index, operand, spill_move_insertion_locations_);
     892           0 : }
     893             : 
     894    19679909 : void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
     895             :                                          const InstructionOperand& op,
     896             :                                          bool might_be_duplicated) {
     897             :   DCHECK_IMPLIES(op.IsConstant(), GetSpillMoveInsertionLocations() == nullptr);
     898             :   Zone* zone = sequence->zone();
     899             : 
     900     4512891 :   for (SpillMoveInsertionList* to_spill = GetSpillMoveInsertionLocations();
     901    24192800 :        to_spill != nullptr; to_spill = to_spill->next) {
     902     4512787 :     Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
     903             :     ParallelMove* move =
     904             :         instr->GetOrCreateParallelMove(Instruction::START, zone);
     905             :     // Skip insertion if it's possible that the move exists already as a
     906             :     // constraint move from a fixed output register to a slot.
     907     4512798 :     if (might_be_duplicated || has_preassigned_slot()) {
     908             :       bool found = false;
     909     1923502 :       for (MoveOperands* move_op : *move) {
     910     1216615 :         if (move_op->IsEliminated()) continue;
     911     3641128 :         if (move_op->source().Equals(*to_spill->operand) &&
     912             :             move_op->destination().Equals(op)) {
     913             :           found = true;
     914      750889 :           if (has_preassigned_slot()) move_op->Eliminate();
     915             :           break;
     916             :         }
     917             :       }
     918     1457776 :       if (found) continue;
     919             :     }
     920     3761976 :     if (!has_preassigned_slot()) {
     921     3326298 :       move->AddMove(*to_spill->operand, op);
     922             :     }
     923             :   }
     924    19680013 : }
     925             : 
     926           0 : void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) {
     927             :   DCHECK(HasNoSpillType());
     928             :   DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
     929             :   set_spill_type(SpillType::kSpillOperand);
     930    15169028 :   spill_operand_ = operand;
     931           0 : }
     932             : 
     933           0 : void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
     934             :   DCHECK(!HasSpillOperand());
     935             :   DCHECK(spill_range);
     936    11150476 :   spill_range_ = spill_range;
     937           0 : }
     938             : 
     939           0 : AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
     940             :   SpillRange* spill_range = GetSpillRange();
     941             :   int index = spill_range->assigned_slot();
     942           0 :   return AllocatedOperand(LocationOperand::STACK_SLOT, representation(), index);
     943             : }
     944             : 
     945    17381305 : void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end,
     946             :                                  Zone* zone) {
     947             :   DCHECK(start != Start() || end != End());
     948             :   DCHECK(start < end);
     949             : 
     950             :   TopLevelLiveRange splinter_temp(-1, representation());
     951             :   UsePosition* last_in_splinter = nullptr;
     952             :   // Live ranges defined in deferred blocks stay in deferred blocks, so we
     953             :   // don't need to splinter them. That means that start should always be
     954             :   // after the beginning of the range.
     955             :   DCHECK(start > Start());
     956             : 
     957    17381305 :   if (end >= End()) {
     958             :     DCHECK(start > Start());
     959     1761872 :     DetachAt(start, &splinter_temp, zone, ConnectHints);
     960     1761978 :     next_ = nullptr;
     961             :   } else {
     962             :     DCHECK(start < End() && Start() < end);
     963             : 
     964             :     const int kInvalidId = std::numeric_limits<int>::max();
     965             : 
     966    15619433 :     UsePosition* last = DetachAt(start, &splinter_temp, zone, ConnectHints);
     967             : 
     968             :     LiveRange end_part(kInvalidId, this->representation(), nullptr);
     969             :     // The last chunk exits the deferred region, and we don't want to connect
     970             :     // hints here, because the non-deferred region shouldn't be affected
     971             :     // by allocation decisions on the deferred path.
     972             :     last_in_splinter =
     973    15619277 :         splinter_temp.DetachAt(end, &end_part, zone, DoNotConnectHints);
     974             : 
     975    15619241 :     next_ = end_part.next_;
     976    15619241 :     last_interval_->set_next(end_part.first_interval_);
     977             :     // The next splinter will happen either at or after the current interval.
     978             :     // We can optimize DetachAt by setting current_interval_ accordingly,
     979             :     // which will then be picked up by FirstSearchIntervalForPosition.
     980    15619241 :     current_interval_ = last_interval_;
     981    15619241 :     last_interval_ = end_part.last_interval_;
     982             : 
     983    15619241 :     if (first_pos_ == nullptr) {
     984     1132499 :       first_pos_ = end_part.first_pos_;
     985             :     } else {
     986    14486742 :       splitting_pointer_ = last;
     987    14486742 :       if (last != nullptr) last->set_next(end_part.first_pos_);
     988             :     }
     989             :   }
     990             : 
     991    17381219 :   if (splinter()->IsEmpty()) {
     992     5761282 :     splinter()->first_interval_ = splinter_temp.first_interval_;
     993     5761282 :     splinter()->last_interval_ = splinter_temp.last_interval_;
     994             :   } else {
     995    11619937 :     splinter()->last_interval_->set_next(splinter_temp.first_interval_);
     996    11619937 :     splinter()->last_interval_ = splinter_temp.last_interval_;
     997             :   }
     998    17381219 :   if (splinter()->first_pos() == nullptr) {
     999    13657988 :     splinter()->first_pos_ = splinter_temp.first_pos_;
    1000             :   } else {
    1001     3723231 :     splinter()->last_pos_->set_next(splinter_temp.first_pos_);
    1002             :   }
    1003    17381219 :   if (last_in_splinter != nullptr) {
    1004     2923882 :     splinter()->last_pos_ = last_in_splinter;
    1005             :   } else {
    1006    18517461 :     if (splinter()->first_pos() != nullptr &&
    1007     4060124 :         splinter()->last_pos_ == nullptr) {
    1008     1456533 :       splinter()->last_pos_ = splinter()->first_pos();
    1009     4671957 :       for (UsePosition* pos = splinter()->first_pos(); pos != nullptr;
    1010             :            pos = pos->next()) {
    1011     1607712 :         splinter()->last_pos_ = pos;
    1012             :       }
    1013             :     }
    1014             :   }
    1015             : #if DEBUG
    1016             :   Verify();
    1017             :   splinter()->Verify();
    1018             : #endif
    1019    17381219 : }
    1020             : 
    1021     5761117 : void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) {
    1022     5761117 :   splintered_from_ = splinter_parent;
    1023     5761117 :   if (!HasSpillOperand() && splinter_parent->spill_range_ != nullptr) {
    1024     3267274 :     SetSpillRange(splinter_parent->spill_range_);
    1025             :   }
    1026     5761117 : }
    1027             : 
    1028     5761287 : void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) {
    1029             :   DCHECK(merged->TopLevel() == this);
    1030             : 
    1031     6900556 :   if (HasNoSpillType() && merged->HasSpillRange()) {
    1032             :     set_spill_type(merged->spill_type());
    1033             :     DCHECK_LT(0, GetSpillRange()->live_ranges().size());
    1034      750817 :     merged->spill_range_ = nullptr;
    1035             :     merged->bits_ =
    1036     1501634 :         SpillTypeField::update(merged->bits_, SpillType::kNoSpillType);
    1037             :   }
    1038     5761287 : }
    1039             : 
    1040     5761331 : void TopLevelLiveRange::Merge(TopLevelLiveRange* other, Zone* zone) {
    1041             :   DCHECK(Start() < other->Start());
    1042             :   DCHECK(other->splintered_from() == this);
    1043             : 
    1044     5761331 :   LiveRange* first = this;
    1045             :   LiveRange* second = other;
    1046             :   DCHECK(first->Start() < second->Start());
    1047    64996316 :   while (first != nullptr && second != nullptr) {
    1048             :     DCHECK(first != second);
    1049             :     // Make sure the ranges are in order each time we iterate.
    1050    59235009 :     if (second->Start() < first->Start()) {
    1051             :       LiveRange* tmp = second;
    1052             :       second = first;
    1053             :       first = tmp;
    1054             :       continue;
    1055             :     }
    1056             : 
    1057    34732329 :     if (first->End() <= second->Start()) {
    1058    10256984 :       if (first->next() == nullptr ||
    1059             :           first->next()->Start() > second->Start()) {
    1060             :         // First is in order before second.
    1061             :         LiveRange* temp = first->next();
    1062     5789129 :         first->next_ = second;
    1063             :         first = temp;
    1064             :       } else {
    1065             :         // First is in order before its successor (or second), so advance first.
    1066             :         first = first->next();
    1067             :       }
    1068             :       continue;
    1069             :     }
    1070             : 
    1071             :     DCHECK(first->Start() < second->Start());
    1072             :     // If first and second intersect, split first.
    1073    24475345 :     if (first->Start() < second->End() && second->Start() < first->End()) {
    1074    24475348 :       LiveRange* temp = first->SplitAt(second->Start(), zone);
    1075    24475324 :       CHECK(temp != first);
    1076             :       temp->set_spilled(first->spilled());
    1077    24475324 :       if (!temp->spilled())
    1078             :         temp->set_assigned_register(first->assigned_register());
    1079             : 
    1080    24475324 :       first->next_ = second;
    1081             :       first = temp;
    1082    24475324 :       continue;
    1083             :     }
    1084             :     DCHECK(first->End() <= second->Start());
    1085             :   }
    1086             : 
    1087     5761307 :   TopLevel()->UpdateParentForAllChildren(TopLevel());
    1088     5761307 :   TopLevel()->UpdateSpillRangePostMerge(other);
    1089             :   TopLevel()->register_slot_use(other->slot_use_kind());
    1090             : 
    1091             : #if DEBUG
    1092             :   Verify();
    1093             : #endif
    1094     5761287 : }
    1095             : 
    1096           0 : void TopLevelLiveRange::VerifyChildrenInOrder() const {
    1097             :   LifetimePosition last_end = End();
    1098           0 :   for (const LiveRange* child = this->next(); child != nullptr;
    1099             :        child = child->next()) {
    1100             :     DCHECK(last_end <= child->Start());
    1101             :     last_end = child->End();
    1102             :   }
    1103           0 : }
    1104             : 
    1105           0 : LiveRange* TopLevelLiveRange::GetChildCovers(LifetimePosition pos) {
    1106           0 :   LiveRange* child = last_child_covers_;
    1107           0 :   while (child != nullptr && child->End() <= pos) {
    1108             :     child = child->next();
    1109             :   }
    1110           0 :   last_child_covers_ = child;
    1111           0 :   return !child || !child->Covers(pos) ? nullptr : child;
    1112             : }
    1113             : 
    1114           0 : void TopLevelLiveRange::Verify() const {
    1115             :   VerifyChildrenInOrder();
    1116           0 :   for (const LiveRange* child = this; child != nullptr; child = child->next()) {
    1117             :     VerifyChildStructure();
    1118             :   }
    1119           0 : }
    1120             : 
    1121           0 : void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
    1122    69979090 :   TRACE("Shorten live range %d to [%d\n", vreg(), start.value());
    1123             :   DCHECK_NOT_NULL(first_interval_);
    1124             :   DCHECK(first_interval_->start() <= start);
    1125             :   DCHECK(start < first_interval_->end());
    1126    69979535 :   first_interval_->set_start(start);
    1127           0 : }
    1128             : 
    1129     2121441 : void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
    1130             :                                        LifetimePosition end, Zone* zone) {
    1131     2121441 :   TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
    1132             :         end.value());
    1133             :   LifetimePosition new_end = end;
    1134    10179613 :   while (first_interval_ != nullptr && first_interval_->start() <= end) {
    1135     4029086 :     if (first_interval_->end() > end) {
    1136             :       new_end = first_interval_->end();
    1137             :     }
    1138     4029086 :     first_interval_ = first_interval_->next();
    1139             :   }
    1140             : 
    1141     2121441 :   UseInterval* new_interval = new (zone) UseInterval(start, new_end);
    1142     2121442 :   new_interval->set_next(first_interval_);
    1143     2121442 :   first_interval_ = new_interval;
    1144     2121442 :   if (new_interval->next() == nullptr) {
    1145      894214 :     last_interval_ = new_interval;
    1146             :   }
    1147     2121442 : }
    1148             : 
    1149   411023041 : void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
    1150             :                                        LifetimePosition end, Zone* zone) {
    1151   411023041 :   TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(),
    1152             :         end.value());
    1153   411049792 :   if (first_interval_ == nullptr) {
    1154    91419269 :     UseInterval* interval = new (zone) UseInterval(start, end);
    1155    91418992 :     first_interval_ = interval;
    1156    91418992 :     last_interval_ = interval;
    1157             :   } else {
    1158   319630523 :     if (end == first_interval_->start()) {
    1159             :       first_interval_->set_start(start);
    1160   196842177 :     } else if (end < first_interval_->start()) {
    1161   132049798 :       UseInterval* interval = new (zone) UseInterval(start, end);
    1162   132049555 :       interval->set_next(first_interval_);
    1163   132049555 :       first_interval_ = interval;
    1164             :     } else {
    1165             :       // Order of instruction's processing (see ProcessInstructions) guarantees
    1166             :       // that each new use interval either precedes, intersects with or touches
    1167             :       // the last added interval.
    1168             :       DCHECK(start <= first_interval_->end());
    1169             :       first_interval_->set_start(Min(start, first_interval_->start()));
    1170    64792379 :       first_interval_->set_end(Max(end, first_interval_->end()));
    1171             :     }
    1172             :   }
    1173   411049272 : }
    1174             : 
    1175   116634040 : void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
    1176             :   LifetimePosition pos = use_pos->pos();
    1177   116634040 :   TRACE("Add to live range %d use position %d\n", vreg(), pos.value());
    1178             :   UsePosition* prev_hint = nullptr;
    1179             :   UsePosition* prev = nullptr;
    1180   116635390 :   UsePosition* current = first_pos_;
    1181   116812262 :   while (current != nullptr && current->pos() < pos) {
    1182       88436 :     prev_hint = current->HasHint() ? current : prev_hint;
    1183             :     prev = current;
    1184             :     current = current->next();
    1185             :   }
    1186             : 
    1187   116635389 :   if (prev == nullptr) {
    1188   116546955 :     use_pos->set_next(first_pos_);
    1189   116546955 :     first_pos_ = use_pos;
    1190             :   } else {
    1191             :     use_pos->set_next(prev->next());
    1192             :     prev->set_next(use_pos);
    1193             :   }
    1194             : 
    1195   233270395 :   if (prev_hint == nullptr && use_pos->HasHint()) {
    1196    26883690 :     current_hint_position_ = use_pos;
    1197             :   }
    1198   116634860 : }
    1199             : 
    1200             : static bool AreUseIntervalsIntersecting(UseInterval* interval1,
    1201             :                                         UseInterval* interval2) {
    1202   325344285 :   while (interval1 != nullptr && interval2 != nullptr) {
    1203   325105065 :     if (interval1->start() < interval2->start()) {
    1204   103022566 :       if (interval1->end() > interval2->start()) {
    1205             :         return true;
    1206             :       }
    1207             :       interval1 = interval1->next();
    1208             :     } else {
    1209   222082499 :       if (interval2->end() > interval1->start()) {
    1210             :         return true;
    1211             :       }
    1212             :       interval2 = interval2->next();
    1213             :     }
    1214             :   }
    1215             :   return false;
    1216             : }
    1217             : 
    1218           0 : std::ostream& operator<<(std::ostream& os,
    1219             :                          const PrintableLiveRange& printable_range) {
    1220           0 :   const LiveRange* range = printable_range.range_;
    1221           0 :   os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id()
    1222           0 :      << " ";
    1223           0 :   if (range->TopLevel()->is_phi()) os << "phi ";
    1224           0 :   if (range->TopLevel()->is_non_loop_phi()) os << "nlphi ";
    1225             : 
    1226             :   os << "{" << std::endl;
    1227             :   UseInterval* interval = range->first_interval();
    1228             :   UsePosition* use_pos = range->first_pos();
    1229           0 :   while (use_pos != nullptr) {
    1230           0 :     if (use_pos->HasOperand()) {
    1231           0 :       os << *use_pos->operand() << use_pos->pos() << " ";
    1232             :     }
    1233             :     use_pos = use_pos->next();
    1234             :   }
    1235             :   os << std::endl;
    1236             : 
    1237           0 :   while (interval != nullptr) {
    1238           0 :     os << '[' << interval->start() << ", " << interval->end() << ')'
    1239             :        << std::endl;
    1240             :     interval = interval->next();
    1241             :   }
    1242           0 :   os << "}";
    1243           0 :   return os;
    1244             : }
    1245             : 
    1246             : namespace {
    1247           0 : void PrintBlockRow(std::ostream& os, const InstructionBlocks& blocks) {
    1248           0 :   os << "     ";
    1249           0 :   for (auto block : blocks) {
    1250             :     LifetimePosition start_pos = LifetimePosition::GapFromInstructionIndex(
    1251             :         block->first_instruction_index());
    1252             :     LifetimePosition end_pos = LifetimePosition::GapFromInstructionIndex(
    1253             :                                    block->last_instruction_index())
    1254             :                                    .NextFullStart();
    1255           0 :     int length = end_pos.value() - start_pos.value();
    1256           0 :     constexpr int kMaxPrefixLength = 32;
    1257             :     char buffer[kMaxPrefixLength];
    1258             :     int rpo_number = block->rpo_number().ToInt();
    1259           0 :     const char* deferred_marker = block->IsDeferred() ? "(deferred)" : "";
    1260           0 :     int max_prefix_length = std::min(length, kMaxPrefixLength);
    1261           0 :     int prefix = snprintf(buffer, max_prefix_length, "[-B%d-%s", rpo_number,
    1262           0 :                           deferred_marker);
    1263           0 :     os << buffer;
    1264           0 :     int remaining = length - std::min(prefix, max_prefix_length) - 1;
    1265           0 :     for (int i = 0; i < remaining; ++i) os << '-';
    1266             :     os << ']';
    1267             :   }
    1268             :   os << '\n';
    1269           0 : }
    1270             : }  // namespace
    1271             : 
    1272           0 : void LinearScanAllocator::PrintRangeRow(std::ostream& os,
    1273             :                                         const TopLevelLiveRange* toplevel) {
    1274             :   int position = 0;
    1275           0 :   os << std::setw(3) << toplevel->vreg()
    1276           0 :      << (toplevel->IsSplinter() ? "s:" : ": ");
    1277             : 
    1278             :   const char* kind_string;
    1279           0 :   switch (toplevel->spill_type()) {
    1280             :     case TopLevelLiveRange::SpillType::kSpillRange:
    1281             :       kind_string = "ss";
    1282             :       break;
    1283             :     case TopLevelLiveRange::SpillType::kDeferredSpillRange:
    1284             :       kind_string = "sd";
    1285           0 :       break;
    1286             :     case TopLevelLiveRange::SpillType::kSpillOperand:
    1287             :       kind_string = "so";
    1288           0 :       break;
    1289             :     default:
    1290             :       kind_string = "s?";
    1291             :   }
    1292             : 
    1293           0 :   for (const LiveRange* range = toplevel; range != nullptr;
    1294             :        range = range->next()) {
    1295           0 :     for (UseInterval* interval = range->first_interval(); interval != nullptr;
    1296             :          interval = interval->next()) {
    1297             :       LifetimePosition start = interval->start();
    1298             :       LifetimePosition end = interval->end();
    1299           0 :       CHECK_GE(start.value(), position);
    1300           0 :       for (; start.value() > position; position++) {
    1301             :         os << ' ';
    1302             :       }
    1303           0 :       int length = end.value() - start.value();
    1304           0 :       constexpr int kMaxPrefixLength = 32;
    1305             :       char buffer[kMaxPrefixLength];
    1306           0 :       int max_prefix_length = std::min(length + 1, kMaxPrefixLength);
    1307             :       int prefix;
    1308           0 :       if (range->spilled()) {
    1309           0 :         prefix = snprintf(buffer, max_prefix_length, "|%s", kind_string);
    1310             :       } else {
    1311             :         const char* reg_name;
    1312           0 :         if (range->assigned_register() == kUnassignedRegister) {
    1313             :           reg_name = "???";
    1314             :         } else {
    1315             :           reg_name = RegisterName(range->assigned_register());
    1316             :         }
    1317           0 :         prefix = snprintf(buffer, max_prefix_length, "|%s", reg_name);
    1318             :       }
    1319           0 :       os << buffer;
    1320           0 :       position += std::min(prefix, max_prefix_length - 1);
    1321           0 :       CHECK_GE(end.value(), position);
    1322           0 :       const char line_style = range->spilled() ? '-' : '=';
    1323           0 :       for (; end.value() > position; position++) {
    1324             :         os << line_style;
    1325             :       }
    1326             :     }
    1327             :   }
    1328             :   os << '\n';
    1329           0 : }
    1330             : 
    1331           0 : void LinearScanAllocator::PrintRangeOverview(std::ostream& os) {
    1332           0 :   PrintBlockRow(os, code()->instruction_blocks());
    1333           0 :   for (auto const toplevel : data()->fixed_live_ranges()) {
    1334           0 :     if (toplevel == nullptr) continue;
    1335           0 :     PrintRangeRow(os, toplevel);
    1336             :   }
    1337             :   int rowcount = 0;
    1338           0 :   for (auto toplevel : data()->live_ranges()) {
    1339           0 :     if (!CanProcessRange(toplevel)) continue;
    1340           0 :     if (rowcount++ % 10 == 0) PrintBlockRow(os, code()->instruction_blocks());
    1341           0 :     PrintRangeRow(os, toplevel);
    1342             :   }
    1343           0 : }
    1344             : 
    1345     5919292 : SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone)
    1346             :     : live_ranges_(zone),
    1347             :       assigned_slot_(kUnassignedSlot),
    1348    11838584 :       byte_width_(GetByteWidth(parent->representation())) {
    1349             :   // Spill ranges are created for top level, non-splintered ranges. This is so
    1350             :   // that, when merging decisions are made, we consider the full extent of the
    1351             :   // virtual register, and avoid clobbering it.
    1352             :   DCHECK(!parent->IsSplinter());
    1353             :   UseInterval* result = nullptr;
    1354             :   UseInterval* node = nullptr;
    1355             :   // Copy the intervals for all ranges.
    1356    24309494 :   for (LiveRange* range = parent; range != nullptr; range = range->next()) {
    1357             :     UseInterval* src = range->first_interval();
    1358    35141522 :     while (src != nullptr) {
    1359             :       UseInterval* new_node = new (zone) UseInterval(src->start(), src->end());
    1360    12973245 :       if (result == nullptr) {
    1361             :         result = new_node;
    1362             :       } else {
    1363             :         node->set_next(new_node);
    1364             :       }
    1365             :       node = new_node;
    1366             :       src = src->next();
    1367             :     }
    1368             :   }
    1369     5919361 :   use_interval_ = result;
    1370     5919361 :   live_ranges().push_back(parent);
    1371     5919344 :   end_position_ = node->end();
    1372     5919344 :   parent->SetSpillRange(this);
    1373     5919344 : }
    1374             : 
    1375   260004312 : bool SpillRange::IsIntersectingWith(SpillRange* other) const {
    1376   780012947 :   if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
    1377   519940325 :       this->End() <= other->use_interval_->start() ||
    1378             :       other->End() <= this->use_interval_->start()) {
    1379             :     return false;
    1380             :   }
    1381   516606854 :   return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
    1382             : }
    1383             : 
    1384   260674143 : bool SpillRange::TryMerge(SpillRange* other) {
    1385   260674143 :   if (HasSlot() || other->HasSlot()) return false;
    1386   260187973 :   if (byte_width() != other->byte_width() || IsIntersectingWith(other))
    1387             :     return false;
    1388             : 
    1389             :   LifetimePosition max = LifetimePosition::MaxPosition();
    1390     1939032 :   if (End() < other->End() && other->End() != max) {
    1391       72986 :     end_position_ = other->End();
    1392             :   }
    1393     1939032 :   other->end_position_ = max;
    1394             : 
    1395     1939032 :   MergeDisjointIntervals(other->use_interval_);
    1396     1939032 :   other->use_interval_ = nullptr;
    1397             : 
    1398     3902890 :   for (TopLevelLiveRange* range : other->live_ranges()) {
    1399             :     DCHECK(range->GetSpillRange() == other);
    1400             :     range->SetSpillRange(this);
    1401             :   }
    1402             : 
    1403             :   live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
    1404     1939032 :                        other->live_ranges().end());
    1405             :   other->live_ranges().clear();
    1406             : 
    1407     1939027 :   return true;
    1408             : }
    1409             : 
    1410           0 : void SpillRange::MergeDisjointIntervals(UseInterval* other) {
    1411             :   UseInterval* tail = nullptr;
    1412     1939032 :   UseInterval* current = use_interval_;
    1413     9092266 :   while (other != nullptr) {
    1414             :     // Make sure the 'current' list starts first
    1415     7153234 :     if (current == nullptr || current->start() > other->start()) {
    1416             :       std::swap(current, other);
    1417             :     }
    1418             :     // Check disjointness
    1419             :     DCHECK(other == nullptr || current->end() <= other->start());
    1420             :     // Append the 'current' node to the result accumulator and move forward
    1421     7153234 :     if (tail == nullptr) {
    1422     1939023 :       use_interval_ = current;
    1423             :     } else {
    1424             :       tail->set_next(current);
    1425             :     }
    1426             :     tail = current;
    1427             :     current = current->next();
    1428             :   }
    1429             :   // Other list is empty => we are done
    1430           0 : }
    1431             : 
    1432           0 : void SpillRange::Print() const {
    1433           0 :   StdoutStream os;
    1434             :   os << "{" << std::endl;
    1435           0 :   for (TopLevelLiveRange* range : live_ranges()) {
    1436           0 :     os << range->vreg() << " ";
    1437             :   }
    1438             :   os << std::endl;
    1439             : 
    1440           0 :   for (UseInterval* i = interval(); i != nullptr; i = i->next()) {
    1441           0 :     os << '[' << i->start() << ", " << i->end() << ')' << std::endl;
    1442             :   }
    1443             :   os << "}" << std::endl;
    1444           0 : }
    1445             : 
    1446           0 : RegisterAllocationData::PhiMapValue::PhiMapValue(PhiInstruction* phi,
    1447             :                                                  const InstructionBlock* block,
    1448             :                                                  Zone* zone)
    1449             :     : phi_(phi),
    1450             :       block_(block),
    1451             :       incoming_operands_(zone),
    1452     4323690 :       assigned_register_(kUnassignedRegister) {
    1453     2161845 :   incoming_operands_.reserve(phi->operands().size());
    1454           0 : }
    1455             : 
    1456           0 : void RegisterAllocationData::PhiMapValue::AddOperand(
    1457             :     InstructionOperand* operand) {
    1458     5216376 :   incoming_operands_.push_back(operand);
    1459           0 : }
    1460             : 
    1461           0 : void RegisterAllocationData::PhiMapValue::CommitAssignment(
    1462             :     const InstructionOperand& assigned) {
    1463     7378330 :   for (InstructionOperand* operand : incoming_operands_) {
    1464             :     InstructionOperand::ReplaceWith(operand, &assigned);
    1465             :   }
    1466           0 : }
    1467             : 
    1468     2515253 : RegisterAllocationData::RegisterAllocationData(
    1469             :     const RegisterConfiguration* config, Zone* zone, Frame* frame,
    1470             :     InstructionSequence* code, const char* debug_name)
    1471             :     : allocation_zone_(zone),
    1472             :       frame_(frame),
    1473             :       code_(code),
    1474             :       debug_name_(debug_name),
    1475             :       config_(config),
    1476             :       phi_map_(allocation_zone()),
    1477             :       live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
    1478             :       live_out_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
    1479     2516097 :       live_ranges_(code->VirtualRegisterCount() * 2, nullptr,
    1480             :                    allocation_zone()),
    1481             :       fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
    1482             :                          allocation_zone()),
    1483             :       fixed_float_live_ranges_(allocation_zone()),
    1484             :       fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
    1485             :                                 allocation_zone()),
    1486             :       fixed_simd128_live_ranges_(allocation_zone()),
    1487             :       spill_ranges_(code->VirtualRegisterCount(), nullptr, allocation_zone()),
    1488             :       delayed_references_(allocation_zone()),
    1489             :       assigned_registers_(nullptr),
    1490             :       assigned_double_registers_(nullptr),
    1491             :       virtual_register_count_(code->VirtualRegisterCount()),
    1492             :       preassigned_slot_ranges_(zone),
    1493             :       spill_state_(code->InstructionBlockCount(), ZoneVector<LiveRange*>(zone),
    1494    25163050 :                    zone) {
    1495             :   if (!kSimpleFPAliasing) {
    1496             :     fixed_float_live_ranges_.resize(this->config()->num_float_registers(),
    1497             :                                     nullptr);
    1498             :     fixed_simd128_live_ranges_.resize(this->config()->num_simd128_registers(),
    1499             :                                       nullptr);
    1500             :   }
    1501             : 
    1502             :   assigned_registers_ = new (code_zone())
    1503     2515722 :       BitVector(this->config()->num_general_registers(), code_zone());
    1504             :   assigned_double_registers_ = new (code_zone())
    1505     2515807 :       BitVector(this->config()->num_double_registers(), code_zone());
    1506             :   fixed_register_use_ = new (code_zone())
    1507     2516147 :       BitVector(this->config()->num_general_registers(), code_zone());
    1508             :   fixed_fp_register_use_ = new (code_zone())
    1509     2516477 :       BitVector(this->config()->num_double_registers(), code_zone());
    1510             : 
    1511     2516781 :   this->frame()->SetAllocatedRegisters(assigned_registers_);
    1512     2516781 :   this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
    1513     2516781 : }
    1514             : 
    1515    43025620 : MoveOperands* RegisterAllocationData::AddGapMove(
    1516             :     int index, Instruction::GapPosition position,
    1517             :     const InstructionOperand& from, const InstructionOperand& to) {
    1518             :   Instruction* instr = code()->InstructionAt(index);
    1519             :   ParallelMove* moves = instr->GetOrCreateParallelMove(position, code_zone());
    1520    43026922 :   return moves->AddMove(from, to);
    1521             : }
    1522             : 
    1523           0 : MachineRepresentation RegisterAllocationData::RepresentationFor(
    1524             :     int virtual_register) {
    1525             :   DCHECK_LT(virtual_register, code()->VirtualRegisterCount());
    1526    73323701 :   return code()->GetRepresentation(virtual_register);
    1527             : }
    1528             : 
    1529   348346773 : TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) {
    1530   348346773 :   if (index >= static_cast<int>(live_ranges().size())) {
    1531           0 :     live_ranges().resize(index + 1, nullptr);
    1532             :   }
    1533   696693546 :   TopLevelLiveRange* result = live_ranges()[index];
    1534   348346773 :   if (result == nullptr) {
    1535    43032516 :     result = NewLiveRange(index, RepresentationFor(index));
    1536    43030977 :     live_ranges()[index] = result;
    1537             :   }
    1538   348345150 :   return result;
    1539             : }
    1540             : 
    1541    99363164 : TopLevelLiveRange* RegisterAllocationData::NewLiveRange(
    1542             :     int index, MachineRepresentation rep) {
    1543    99359593 :   return new (allocation_zone()) TopLevelLiveRange(index, rep);
    1544             : }
    1545             : 
    1546     5761221 : int RegisterAllocationData::GetNextLiveRangeId() {
    1547     5761221 :   int vreg = virtual_register_count_++;
    1548     5761221 :   if (vreg >= static_cast<int>(live_ranges().size())) {
    1549           0 :     live_ranges().resize(vreg + 1, nullptr);
    1550             :   }
    1551     5761221 :   return vreg;
    1552             : }
    1553             : 
    1554     5761202 : TopLevelLiveRange* RegisterAllocationData::NextLiveRange(
    1555             :     MachineRepresentation rep) {
    1556     5761202 :   int vreg = GetNextLiveRangeId();
    1557     5761223 :   TopLevelLiveRange* ret = NewLiveRange(vreg, rep);
    1558     5761147 :   return ret;
    1559             : }
    1560             : 
    1561     2161836 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap(
    1562             :     const InstructionBlock* block, PhiInstruction* phi) {
    1563             :   RegisterAllocationData::PhiMapValue* map_value = new (allocation_zone())
    1564             :       RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
    1565             :   auto res =
    1566     4323651 :       phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
    1567             :   DCHECK(res.second);
    1568             :   USE(res);
    1569     2161831 :   return map_value;
    1570             : }
    1571             : 
    1572           0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
    1573             :     int virtual_register) {
    1574             :   auto it = phi_map_.find(virtual_register);
    1575             :   DCHECK(it != phi_map_.end());
    1576     7007097 :   return it->second;
    1577             : }
    1578             : 
    1579           0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
    1580             :     TopLevelLiveRange* top_range) {
    1581           0 :   return GetPhiMapValueFor(top_range->vreg());
    1582             : }
    1583             : 
    1584          42 : bool RegisterAllocationData::ExistsUseWithoutDefinition() {
    1585             :   bool found = false;
    1586          42 :   BitVector::Iterator iterator(live_in_sets()[0]);
    1587          42 :   while (!iterator.Done()) {
    1588             :     found = true;
    1589             :     int operand_index = iterator.Current();
    1590             :     PrintF("Register allocator error: live v%d reached first block.\n",
    1591           0 :            operand_index);
    1592           0 :     LiveRange* range = GetOrCreateLiveRangeFor(operand_index);
    1593           0 :     PrintF("  (first use is at %d)\n", range->first_pos()->pos().value());
    1594           0 :     if (debug_name() == nullptr) {
    1595           0 :       PrintF("\n");
    1596             :     } else {
    1597           0 :       PrintF("  (function: %s)\n", debug_name());
    1598             :     }
    1599           0 :     iterator.Advance();
    1600             :   }
    1601          42 :   return found;
    1602             : }
    1603             : 
    1604             : // If a range is defined in a deferred block, we can expect all the range
    1605             : // to only cover positions in deferred blocks. Otherwise, a block on the
    1606             : // hot path would be dominated by a deferred block, meaning it is unreachable
    1607             : // without passing through the deferred block, which is contradictory.
    1608             : // In particular, when such a range contributes a result back on the hot
    1609             : // path, it will be as one of the inputs of a phi. In that case, the value
    1610             : // will be transferred via a move in the Gap::END's of the last instruction
    1611             : // of a deferred block.
    1612          42 : bool RegisterAllocationData::RangesDefinedInDeferredStayInDeferred() {
    1613             :   const size_t live_ranges_size = live_ranges().size();
    1614         728 :   for (const TopLevelLiveRange* range : live_ranges()) {
    1615         686 :     CHECK_EQ(live_ranges_size,
    1616             :              live_ranges().size());  // TODO(neis): crbug.com/831822
    1617        1006 :     if (range == nullptr || range->IsEmpty() ||
    1618             :         !code()
    1619             :              ->GetInstructionBlock(range->Start().ToInstructionIndex())
    1620         320 :              ->IsDeferred()) {
    1621             :       continue;
    1622             :     }
    1623           0 :     for (const UseInterval* i = range->first_interval(); i != nullptr;
    1624             :          i = i->next()) {
    1625             :       int first = i->FirstGapIndex();
    1626             :       int last = i->LastGapIndex();
    1627           0 :       for (int instr = first; instr <= last;) {
    1628           0 :         const InstructionBlock* block = code()->GetInstructionBlock(instr);
    1629           0 :         if (!block->IsDeferred()) return false;
    1630             :         instr = block->last_instruction_index() + 1;
    1631             :       }
    1632             :     }
    1633             :   }
    1634             :   return true;
    1635             : }
    1636             : 
    1637     6770114 : SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
    1638             :     TopLevelLiveRange* range, SpillMode spill_mode) {
    1639             :   using SpillType = TopLevelLiveRange::SpillType;
    1640             :   DCHECK(!range->HasSpillOperand());
    1641             : 
    1642             :   SpillRange* spill_range = range->GetAllocatedSpillRange();
    1643     6770114 :   if (spill_range == nullptr) {
    1644             :     DCHECK(!range->IsSplinter());
    1645     3448797 :     spill_range = new (allocation_zone()) SpillRange(range, allocation_zone());
    1646             :   }
    1647     6770294 :   if (spill_mode == SpillMode::kSpillDeferred &&
    1648             :       (range->spill_type() != SpillType::kSpillRange)) {
    1649             :     DCHECK(FLAG_turbo_control_flow_aware_allocation);
    1650             :     range->set_spill_type(SpillType::kDeferredSpillRange);
    1651             :   } else {
    1652             :     range->set_spill_type(SpillType::kSpillRange);
    1653             :   }
    1654             : 
    1655             :   int spill_range_index =
    1656     6770294 :       range->IsSplinter() ? range->splintered_from()->vreg() : range->vreg();
    1657             : 
    1658    13540588 :   spill_ranges()[spill_range_index] = spill_range;
    1659             : 
    1660     6770294 :   return spill_range;
    1661             : }
    1662             : 
    1663     2470568 : SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
    1664             :     TopLevelLiveRange* range) {
    1665             :   DCHECK(FLAG_turbo_preprocess_ranges);
    1666             :   DCHECK(!range->HasSpillOperand());
    1667             :   DCHECK(!range->IsSplinter());
    1668             :   SpillRange* spill_range =
    1669     2470560 :       new (allocation_zone()) SpillRange(range, allocation_zone());
    1670     2470532 :   return spill_range;
    1671             : }
    1672             : 
    1673    20241594 : void RegisterAllocationData::MarkFixedUse(MachineRepresentation rep,
    1674             :                                           int index) {
    1675    20241594 :   switch (rep) {
    1676             :     case MachineRepresentation::kFloat32:
    1677             :     case MachineRepresentation::kSimd128:
    1678             :       if (kSimpleFPAliasing) {
    1679       19138 :         fixed_fp_register_use_->Add(index);
    1680             :       } else {
    1681             :         int alias_base_index = -1;
    1682             :         int aliases = config()->GetAliases(
    1683             :             rep, index, MachineRepresentation::kFloat64, &alias_base_index);
    1684             :         DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    1685             :         while (aliases--) {
    1686             :           int aliased_reg = alias_base_index + aliases;
    1687             :           fixed_fp_register_use_->Add(aliased_reg);
    1688             :         }
    1689             :       }
    1690             :       break;
    1691             :     case MachineRepresentation::kFloat64:
    1692       59467 :       fixed_fp_register_use_->Add(index);
    1693             :       break;
    1694             :     default:
    1695             :       DCHECK(!IsFloatingPoint(rep));
    1696    20162989 :       fixed_register_use_->Add(index);
    1697             :       break;
    1698             :   }
    1699    20241594 : }
    1700             : 
    1701   355198608 : bool RegisterAllocationData::HasFixedUse(MachineRepresentation rep, int index) {
    1702   355198608 :   switch (rep) {
    1703             :     case MachineRepresentation::kFloat32:
    1704             :     case MachineRepresentation::kSimd128:
    1705             :       if (kSimpleFPAliasing) {
    1706     1859572 :         return fixed_fp_register_use_->Contains(index);
    1707             :       } else {
    1708             :         int alias_base_index = -1;
    1709             :         int aliases = config()->GetAliases(
    1710             :             rep, index, MachineRepresentation::kFloat64, &alias_base_index);
    1711             :         DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    1712             :         bool result = false;
    1713             :         while (aliases-- && !result) {
    1714             :           int aliased_reg = alias_base_index + aliases;
    1715             :           result |= fixed_fp_register_use_->Contains(aliased_reg);
    1716             :         }
    1717             :         return result;
    1718             :       }
    1719             :       break;
    1720             :     case MachineRepresentation::kFloat64:
    1721    32427868 :       return fixed_fp_register_use_->Contains(index);
    1722             :       break;
    1723             :     default:
    1724             :       DCHECK(!IsFloatingPoint(rep));
    1725   676109776 :       return fixed_register_use_->Contains(index);
    1726             :       break;
    1727             :   }
    1728             : }
    1729             : 
    1730    94133537 : void RegisterAllocationData::MarkAllocated(MachineRepresentation rep,
    1731             :                                            int index) {
    1732    94133537 :   switch (rep) {
    1733             :     case MachineRepresentation::kFloat32:
    1734             :     case MachineRepresentation::kSimd128:
    1735             :       if (kSimpleFPAliasing) {
    1736      104588 :         assigned_double_registers_->Add(index);
    1737             :       } else {
    1738             :         int alias_base_index = -1;
    1739             :         int aliases = config()->GetAliases(
    1740             :             rep, index, MachineRepresentation::kFloat64, &alias_base_index);
    1741             :         DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    1742             :         while (aliases--) {
    1743             :           int aliased_reg = alias_base_index + aliases;
    1744             :           assigned_double_registers_->Add(aliased_reg);
    1745             :         }
    1746             :       }
    1747             :       break;
    1748             :     case MachineRepresentation::kFloat64:
    1749    29422035 :       assigned_double_registers_->Add(index);
    1750             :       break;
    1751             :     default:
    1752             :       DCHECK(!IsFloatingPoint(rep));
    1753    64606914 :       assigned_registers_->Add(index);
    1754             :       break;
    1755             :   }
    1756    94133537 : }
    1757             : 
    1758    24954236 : bool RegisterAllocationData::IsBlockBoundary(LifetimePosition pos) const {
    1759    42122443 :   return pos.IsFullStart() &&
    1760    17168223 :          code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
    1761    24954220 :              pos.ToInstructionIndex();
    1762             : }
    1763             : 
    1764     5032372 : ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data)
    1765     5032372 :     : data_(data) {}
    1766             : 
    1767    30363910 : InstructionOperand* ConstraintBuilder::AllocateFixed(
    1768             :     UnallocatedOperand* operand, int pos, bool is_tagged, bool is_input) {
    1769    30363910 :   TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
    1770             :   DCHECK(operand->HasFixedPolicy());
    1771             :   InstructionOperand allocated;
    1772             :   MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
    1773             :   int virtual_register = operand->virtual_register();
    1774    30362723 :   if (virtual_register != InstructionOperand::kInvalidVirtualRegister) {
    1775             :     rep = data()->RepresentationFor(virtual_register);
    1776             :   }
    1777    30363029 :   if (operand->HasFixedSlotPolicy()) {
    1778             :     allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, rep,
    1779             :                                  operand->fixed_slot_index());
    1780    29138838 :   } else if (operand->HasFixedRegisterPolicy()) {
    1781             :     DCHECK(!IsFloatingPoint(rep));
    1782             :     DCHECK(data()->config()->IsAllocatableGeneralCode(
    1783             :         operand->fixed_register_index()));
    1784             :     allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
    1785             :                                  operand->fixed_register_index());
    1786      143907 :   } else if (operand->HasFixedFPRegisterPolicy()) {
    1787             :     DCHECK(IsFloatingPoint(rep));
    1788             :     DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
    1789             :     allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
    1790             :                                  operand->fixed_register_index());
    1791             :   } else {
    1792           0 :     UNREACHABLE();
    1793             :   }
    1794    50697680 :   if (is_input && allocated.IsAnyRegister()) {
    1795    20242324 :     data()->MarkFixedUse(rep, operand->fixed_register_index());
    1796             :   }
    1797             :   InstructionOperand::ReplaceWith(operand, &allocated);
    1798    30362381 :   if (is_tagged) {
    1799    19155270 :     TRACE("Fixed reg is tagged at %d\n", pos);
    1800             :     Instruction* instr = code()->InstructionAt(pos);
    1801    19155317 :     if (instr->HasReferenceMap()) {
    1802    15428178 :       instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand));
    1803             :     }
    1804             :   }
    1805    30362489 :   return operand;
    1806             : }
    1807             : 
    1808     2515730 : void ConstraintBuilder::MeetRegisterConstraints() {
    1809    22825658 :   for (InstructionBlock* block : code()->instruction_blocks()) {
    1810    20308132 :     MeetRegisterConstraints(block);
    1811             :   }
    1812     2517526 : }
    1813             : 
    1814    20308403 : void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
    1815             :   int start = block->first_instruction_index();
    1816             :   int end = block->last_instruction_index();
    1817             :   DCHECK_NE(-1, start);
    1818   159588225 :   for (int i = start; i <= end; ++i) {
    1819    69638341 :     MeetConstraintsBefore(i);
    1820    69639739 :     if (i != end) MeetConstraintsAfter(i);
    1821             :   }
    1822             :   // Meet register constraints for the instruction in the end.
    1823    20309973 :   MeetRegisterConstraintsForLastInstructionInBlock(block);
    1824    20309777 : }
    1825             : 
    1826    20309730 : void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
    1827             :     const InstructionBlock* block) {
    1828             :   int end = block->last_instruction_index();
    1829             :   Instruction* last_instruction = code()->InstructionAt(end);
    1830    20905100 :   for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
    1831             :     InstructionOperand* output_operand = last_instruction->OutputAt(i);
    1832             :     DCHECK(!output_operand->IsConstant());
    1833             :     UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
    1834             :     int output_vreg = output->virtual_register();
    1835      297593 :     TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
    1836             :     bool assigned = false;
    1837      297587 :     if (output->HasFixedPolicy()) {
    1838           2 :       AllocateFixed(output, -1, false, false);
    1839             :       // This value is produced on the stack, we never need to spill it.
    1840           2 :       if (output->IsStackSlot()) {
    1841             :         DCHECK(LocationOperand::cast(output)->index() <
    1842             :                data()->frame()->GetSpillSlotCount());
    1843             :         range->SetSpillOperand(LocationOperand::cast(output));
    1844             :         range->SetSpillStartIndex(end);
    1845             :         assigned = true;
    1846             :       }
    1847             : 
    1848           4 :       for (const RpoNumber& succ : block->successors()) {
    1849           2 :         const InstructionBlock* successor = code()->InstructionBlockAt(succ);
    1850             :         DCHECK_EQ(1, successor->PredecessorCount());
    1851             :         int gap_index = successor->first_instruction_index();
    1852             :         // Create an unconstrained operand for the same virtual register
    1853             :         // and insert a gap move from the fixed output to the operand.
    1854             :         UnallocatedOperand output_copy(UnallocatedOperand::REGISTER_OR_SLOT,
    1855             :                                        output_vreg);
    1856           2 :         data()->AddGapMove(gap_index, Instruction::START, *output, output_copy);
    1857             :       }
    1858             :     }
    1859             : 
    1860      297587 :     if (!assigned) {
    1861      892757 :       for (const RpoNumber& succ : block->successors()) {
    1862      595168 :         const InstructionBlock* successor = code()->InstructionBlockAt(succ);
    1863             :         DCHECK_EQ(1, successor->PredecessorCount());
    1864             :         int gap_index = successor->first_instruction_index();
    1865             :         range->RecordSpillLocation(allocation_zone(), gap_index, output);
    1866             :         range->SetSpillStartIndex(gap_index);
    1867             :       }
    1868             :     }
    1869             :   }
    1870    20309915 : }
    1871             : 
    1872    49330906 : void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
    1873             :   Instruction* first = code()->InstructionAt(instr_index);
    1874             :   // Handle fixed temporaries.
    1875    50845247 :   for (size_t i = 0; i < first->TempCount(); i++) {
    1876             :     UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i));
    1877      757070 :     if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false, false);
    1878             :   }
    1879             :   // Handle constant/fixed output operands.
    1880   129102156 :   for (size_t i = 0; i < first->OutputCount(); i++) {
    1881             :     InstructionOperand* output = first->OutputAt(i);
    1882    39885758 :     if (output->IsConstant()) {
    1883             :       int output_vreg = ConstantOperand::cast(output)->virtual_register();
    1884    14062822 :       TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
    1885    14063222 :       range->SetSpillStartIndex(instr_index + 1);
    1886             :       range->SetSpillOperand(output);
    1887             :       continue;
    1888             :     }
    1889             :     UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
    1890             :     TopLevelLiveRange* range =
    1891    25822936 :         data()->GetOrCreateLiveRangeFor(first_output->virtual_register());
    1892             :     bool assigned = false;
    1893    25821779 :     if (first_output->HasFixedPolicy()) {
    1894             :       int output_vreg = first_output->virtual_register();
    1895             :       UnallocatedOperand output_copy(UnallocatedOperand::REGISTER_OR_SLOT,
    1896             :                                      output_vreg);
    1897             :       bool is_tagged = code()->IsReference(output_vreg);
    1898     9954556 :       if (first_output->HasSecondaryStorage()) {
    1899             :         range->MarkHasPreassignedSlot();
    1900      441384 :         data()->preassigned_slot_ranges().push_back(
    1901      882831 :             std::make_pair(range, first_output->GetSecondaryStorage()));
    1902             :       }
    1903     9954619 :       AllocateFixed(first_output, instr_index, is_tagged, false);
    1904             : 
    1905             :       // This value is produced on the stack, we never need to spill it.
    1906     9955053 :       if (first_output->IsStackSlot()) {
    1907             :         DCHECK(LocationOperand::cast(first_output)->index() <
    1908             :                data()->frame()->GetTotalFrameSlotCount());
    1909             :         range->SetSpillOperand(LocationOperand::cast(first_output));
    1910     1105804 :         range->SetSpillStartIndex(instr_index + 1);
    1911             :         assigned = true;
    1912             :       }
    1913     9955053 :       data()->AddGapMove(instr_index + 1, Instruction::START, *first_output,
    1914     9955053 :                          output_copy);
    1915             :     }
    1916             :     // Make sure we add a gap move for spilling (if we have not done
    1917             :     // so already).
    1918    25822555 :     if (!assigned) {
    1919    24717222 :       range->RecordSpillLocation(allocation_zone(), instr_index + 1,
    1920             :                                  first_output);
    1921             :       range->SetSpillStartIndex(instr_index + 1);
    1922             :     }
    1923             :   }
    1924    49330873 : }
    1925             : 
    1926    69638411 : void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
    1927             :   Instruction* second = code()->InstructionAt(instr_index);
    1928             :   // Handle fixed input operands of second instruction.
    1929   349824386 :   for (size_t i = 0; i < second->InputCount(); i++) {
    1930             :     InstructionOperand* input = second->InputAt(i);
    1931   140092571 :     if (input->IsImmediate() || input->IsExplicit()) {
    1932             :       continue;  // Ignore immediates and explicitly reserved registers.
    1933             :     }
    1934             :     UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
    1935    74835174 :     if (cur_input->HasFixedPolicy()) {
    1936             :       int input_vreg = cur_input->virtual_register();
    1937             :       UnallocatedOperand input_copy(UnallocatedOperand::REGISTER_OR_SLOT,
    1938             :                                     input_vreg);
    1939             :       bool is_tagged = code()->IsReference(input_vreg);
    1940    20336042 :       AllocateFixed(cur_input, instr_index, is_tagged, true);
    1941    40669164 :       data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
    1942             :     }
    1943             :   }
    1944             :   // Handle "output same as input" for second instruction.
    1945   150006113 :   for (size_t i = 0; i < second->OutputCount(); i++) {
    1946             :     InstructionOperand* output = second->OutputAt(i);
    1947    77332315 :     if (!output->IsUnallocated()) continue;
    1948             :     UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
    1949    26120368 :     if (!second_output->HasSameAsInputPolicy()) continue;
    1950             :     DCHECK_EQ(0, i);  // Only valid for first output.
    1951             :     UnallocatedOperand* cur_input =
    1952             :         UnallocatedOperand::cast(second->InputAt(0));
    1953             :     int output_vreg = second_output->virtual_register();
    1954             :     int input_vreg = cur_input->virtual_register();
    1955             :     UnallocatedOperand input_copy(UnallocatedOperand::REGISTER_OR_SLOT,
    1956             :                                   input_vreg);
    1957             :     *cur_input =
    1958     3034425 :         UnallocatedOperand(*cur_input, second_output->virtual_register());
    1959     3034425 :     MoveOperands* gap_move = data()->AddGapMove(instr_index, Instruction::END,
    1960     3034425 :                                                 input_copy, *cur_input);
    1961             :     DCHECK_NOT_NULL(gap_move);
    1962     3869309 :     if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) {
    1963      834890 :       if (second->HasReferenceMap()) {
    1964             :         RegisterAllocationData::DelayedReference delayed_reference = {
    1965           0 :             second->reference_map(), &gap_move->source()};
    1966           0 :         data()->delayed_references().push_back(delayed_reference);
    1967             :       }
    1968     2199329 :     } else if (!code()->IsReference(input_vreg) &&
    1969             :                code()->IsReference(output_vreg)) {
    1970             :       // The input is assumed to immediately have a tagged representation,
    1971             :       // before the pointer map can be used. I.e. the pointer map at the
    1972             :       // instruction will include the output operand (whose value at the
    1973             :       // beginning of the instruction is equal to the input operand). If
    1974             :       // this is not desired, then the pointer map at this instruction needs
    1975             :       // to be adjusted manually.
    1976             :     }
    1977             :   }
    1978    69639572 : }
    1979             : 
    1980     2517180 : void ConstraintBuilder::ResolvePhis() {
    1981             :   // Process the blocks in reverse order.
    1982    22826043 :   for (InstructionBlock* block : base::Reversed(code()->instruction_blocks())) {
    1983    20309149 :     ResolvePhis(block);
    1984             :   }
    1985     2516894 : }
    1986             : 
    1987    20308528 : void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) {
    1988    22470367 :   for (PhiInstruction* phi : block->phis()) {
    1989             :     int phi_vreg = phi->virtual_register();
    1990             :     RegisterAllocationData::PhiMapValue* map_value =
    1991     2161863 :         data()->InitializePhiMap(block, phi);
    1992             :     InstructionOperand& output = phi->output();
    1993             :     // Map the destination operands, so the commitment phase can find them.
    1994    12594579 :     for (size_t i = 0; i < phi->operands().size(); ++i) {
    1995             :       InstructionBlock* cur_block =
    1996     5216370 :           code()->InstructionBlockAt(block->predecessors()[i]);
    1997             :       UnallocatedOperand input(UnallocatedOperand::REGISTER_OR_SLOT,
    1998     5216366 :                                phi->operands()[i]);
    1999             :       MoveOperands* move = data()->AddGapMove(
    2000     5216366 :           cur_block->last_instruction_index(), Instruction::END, input, output);
    2001             :       map_value->AddOperand(&move->destination());
    2002             :       DCHECK(!code()
    2003             :                   ->InstructionAt(cur_block->last_instruction_index())
    2004             :                   ->HasReferenceMap());
    2005             :     }
    2006     2161838 :     TopLevelLiveRange* live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
    2007             :     int gap_index = block->first_instruction_index();
    2008             :     live_range->RecordSpillLocation(allocation_zone(), gap_index, &output);
    2009             :     live_range->SetSpillStartIndex(gap_index);
    2010             :     // We use the phi-ness of some nodes in some later heuristics.
    2011             :     live_range->set_is_phi(true);
    2012     2161839 :     live_range->set_is_non_loop_phi(!block->IsLoopHeader());
    2013             :   }
    2014    20308504 : }
    2015             : 
    2016     2515468 : LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data,
    2017             :                                    Zone* local_zone)
    2018     5030936 :     : data_(data), phi_hints_(local_zone) {}
    2019             : 
    2020    20306228 : BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block,
    2021             :                                             RegisterAllocationData* data) {
    2022             :   size_t block_index = block->rpo_number().ToSize();
    2023    20306228 :   BitVector* live_out = data->live_out_sets()[block_index];
    2024    20306228 :   if (live_out == nullptr) {
    2025             :     // Compute live out for the given block, except not including backward
    2026             :     // successor edges.
    2027             :     Zone* zone = data->allocation_zone();
    2028             :     const InstructionSequence* code = data->code();
    2029             : 
    2030    20306326 :     live_out = new (zone) BitVector(code->VirtualRegisterCount(), zone);
    2031             : 
    2032             :     // Process all successor blocks.
    2033    44114578 :     for (const RpoNumber& succ : block->successors()) {
    2034             :       // Add values live on entry to the successor.
    2035    23808827 :       if (succ <= block->rpo_number()) continue;
    2036    23578604 :       BitVector* live_in = data->live_in_sets()[succ.ToSize()];
    2037    23578604 :       if (live_in != nullptr) live_out->Union(*live_in);
    2038             : 
    2039             :       // All phi input operands corresponding to this successor edge are live
    2040             :       // out from this block.
    2041    23578604 :       const InstructionBlock* successor = code->InstructionBlockAt(succ);
    2042    23578472 :       size_t index = successor->PredecessorIndexOf(block->rpo_number());
    2043             :       DCHECK(index < successor->PredecessorCount());
    2044    28436656 :       for (PhiInstruction* phi : successor->phis()) {
    2045     4857718 :         live_out->Add(phi->operands()[index]);
    2046             :       }
    2047             :     }
    2048    20305751 :     data->live_out_sets()[block_index] = live_out;
    2049             :   }
    2050    20305532 :   return live_out;
    2051             : }
    2052             : 
    2053    20305588 : void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
    2054             :                                            BitVector* live_out) {
    2055             :   // Add an interval that includes the entire block to the live range for
    2056             :   // each live_out value.
    2057             :   LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
    2058    20305588 :       block->first_instruction_index());
    2059             :   LifetimePosition end = LifetimePosition::InstructionFromInstructionIndex(
    2060             :                              block->last_instruction_index())
    2061    20305588 :                              .NextStart();
    2062             :   BitVector::Iterator iterator(live_out);
    2063   281995783 :   while (!iterator.Done()) {
    2064             :     int operand_index = iterator.Current();
    2065   130845779 :     TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
    2066   130845895 :     range->AddUseInterval(start, end, allocation_zone());
    2067   130845489 :     iterator.Advance();
    2068             :   }
    2069    20304556 : }
    2070             : 
    2071    27533829 : int LiveRangeBuilder::FixedFPLiveRangeID(int index, MachineRepresentation rep) {
    2072    27533829 :   int result = -index - 1;
    2073    27533829 :   switch (rep) {
    2074             :     case MachineRepresentation::kSimd128:
    2075          48 :       result -= config()->num_float_registers();
    2076             :       V8_FALLTHROUGH;
    2077             :     case MachineRepresentation::kFloat32:
    2078       10057 :       result -= config()->num_double_registers();
    2079             :       V8_FALLTHROUGH;
    2080             :     case MachineRepresentation::kFloat64:
    2081    27533829 :       result -= config()->num_general_registers();
    2082             :       break;
    2083             :     default:
    2084           0 :       UNREACHABLE();
    2085             :       break;
    2086             :   }
    2087    27533829 :   return result;
    2088             : }
    2089             : 
    2090   129772991 : TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
    2091             :   DCHECK(index < config()->num_general_registers());
    2092   259545982 :   TopLevelLiveRange* result = data()->fixed_live_ranges()[index];
    2093   129772991 :   if (result == nullptr) {
    2094             :     MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
    2095    23037164 :     result = data()->NewLiveRange(FixedLiveRangeID(index), rep);
    2096             :     DCHECK(result->IsFixed());
    2097             :     result->set_assigned_register(index);
    2098    23036259 :     data()->MarkAllocated(rep, index);
    2099    23036201 :     data()->fixed_live_ranges()[index] = result;
    2100             :   }
    2101   129772028 :   return result;
    2102             : }
    2103             : 
    2104    92752009 : TopLevelLiveRange* LiveRangeBuilder::FixedFPLiveRangeFor(
    2105             :     int index, MachineRepresentation rep) {
    2106             :   int num_regs = config()->num_double_registers();
    2107             :   ZoneVector<TopLevelLiveRange*>* live_ranges =
    2108             :       &data()->fixed_double_live_ranges();
    2109             :   if (!kSimpleFPAliasing) {
    2110             :     switch (rep) {
    2111             :       case MachineRepresentation::kFloat32:
    2112             :         num_regs = config()->num_float_registers();
    2113             :         live_ranges = &data()->fixed_float_live_ranges();
    2114             :         break;
    2115             :       case MachineRepresentation::kSimd128:
    2116             :         num_regs = config()->num_simd128_registers();
    2117             :         live_ranges = &data()->fixed_simd128_live_ranges();
    2118             :         break;
    2119             :       default:
    2120             :         break;
    2121             :     }
    2122             :   }
    2123             : 
    2124             :   DCHECK(index < num_regs);
    2125             :   USE(num_regs);
    2126   185504018 :   TopLevelLiveRange* result = (*live_ranges)[index];
    2127    92752009 :   if (result == nullptr) {
    2128    27533851 :     result = data()->NewLiveRange(FixedFPLiveRangeID(index, rep), rep);
    2129             :     DCHECK(result->IsFixed());
    2130             :     result->set_assigned_register(index);
    2131    27533368 :     data()->MarkAllocated(rep, index);
    2132    27533460 :     (*live_ranges)[index] = result;
    2133             :   }
    2134    92751618 :   return result;
    2135             : }
    2136             : 
    2137   186196462 : TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
    2138   186196462 :   if (operand->IsUnallocated()) {
    2139             :     return data()->GetOrCreateLiveRangeFor(
    2140   113589740 :         UnallocatedOperand::cast(operand)->virtual_register());
    2141    72606722 :   } else if (operand->IsConstant()) {
    2142             :     return data()->GetOrCreateLiveRangeFor(
    2143    14063880 :         ConstantOperand::cast(operand)->virtual_register());
    2144    58542842 :   } else if (operand->IsRegister()) {
    2145             :     return FixedLiveRangeFor(
    2146    55807184 :         LocationOperand::cast(operand)->GetRegister().code());
    2147     2735658 :   } else if (operand->IsFPRegister()) {
    2148             :     LocationOperand* op = LocationOperand::cast(operand);
    2149      287274 :     return FixedFPLiveRangeFor(op->register_code(), op->representation());
    2150             :   } else {
    2151             :     return nullptr;
    2152             :   }
    2153             : }
    2154             : 
    2155   116635010 : UsePosition* LiveRangeBuilder::NewUsePosition(LifetimePosition pos,
    2156             :                                               InstructionOperand* operand,
    2157             :                                               void* hint,
    2158             :                                               UsePositionHintType hint_type) {
    2159   233270055 :   return new (allocation_zone()) UsePosition(pos, operand, hint, hint_type);
    2160             : }
    2161             : 
    2162    74247660 : UsePosition* LiveRangeBuilder::Define(LifetimePosition position,
    2163             :                                       InstructionOperand* operand, void* hint,
    2164             :                                       UsePositionHintType hint_type) {
    2165    74247660 :   TopLevelLiveRange* range = LiveRangeFor(operand);
    2166    74248352 :   if (range == nullptr) return nullptr;
    2167             : 
    2168    73024623 :   if (range->IsEmpty() || range->Start() > position) {
    2169             :     // Can happen if there is a definition without use.
    2170     3045533 :     range->AddUseInterval(position, position.NextStart(), allocation_zone());
    2171     3045488 :     range->AddUsePosition(NewUsePosition(position.NextStart()));
    2172             :   } else {
    2173             :     range->ShortenTo(position);
    2174             :   }
    2175    73025905 :   if (!operand->IsUnallocated()) return nullptr;
    2176             :   UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
    2177             :   UsePosition* use_pos =
    2178    29821980 :       NewUsePosition(position, unalloc_operand, hint, hint_type);
    2179    29821753 :   range->AddUsePosition(use_pos);
    2180    29821918 :   return use_pos;
    2181             : }
    2182             : 
    2183   111949580 : UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start,
    2184             :                                    LifetimePosition position,
    2185             :                                    InstructionOperand* operand, void* hint,
    2186             :                                    UsePositionHintType hint_type) {
    2187   111949580 :   TopLevelLiveRange* range = LiveRangeFor(operand);
    2188   111949434 :   if (range == nullptr) return nullptr;
    2189             :   UsePosition* use_pos = nullptr;
    2190   110725573 :   if (operand->IsUnallocated()) {
    2191             :     UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
    2192    83770981 :     use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type);
    2193    83771005 :     range->AddUsePosition(use_pos);
    2194             :   }
    2195   110725410 :   range->AddUseInterval(block_start, position, allocation_zone());
    2196   110724529 :   return use_pos;
    2197             : }
    2198             : 
    2199    20304855 : void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
    2200             :                                            BitVector* live) {
    2201             :   int block_start = block->first_instruction_index();
    2202             :   LifetimePosition block_start_position =
    2203             :       LifetimePosition::GapFromInstructionIndex(block_start);
    2204             :   bool fixed_float_live_ranges = false;
    2205             :   bool fixed_simd128_live_ranges = false;
    2206             :   if (!kSimpleFPAliasing) {
    2207             :     int mask = data()->code()->representation_mask();
    2208             :     fixed_float_live_ranges = (mask & kFloat32Bit) != 0;
    2209             :     fixed_simd128_live_ranges = (mask & kSimd128Bit) != 0;
    2210             :   }
    2211             : 
    2212   159582667 :   for (int index = block->last_instruction_index(); index >= block_start;
    2213             :        index--) {
    2214             :     LifetimePosition curr_position =
    2215             :         LifetimePosition::InstructionFromInstructionIndex(index);
    2216             :     Instruction* instr = code()->InstructionAt(index);
    2217             :     DCHECK_NOT_NULL(instr);
    2218             :     DCHECK(curr_position.IsInstructionPosition());
    2219             :     // Process output, inputs, and temps of this instruction.
    2220   150008636 :     for (size_t i = 0; i < instr->OutputCount(); i++) {
    2221             :       InstructionOperand* output = instr->OutputAt(i);
    2222    40184900 :       if (output->IsUnallocated()) {
    2223             :         // Unsupported.
    2224             :         DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
    2225             :         int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
    2226             :         live->Remove(out_vreg);
    2227    24018969 :       } else if (output->IsConstant()) {
    2228             :         int out_vreg = ConstantOperand::cast(output)->virtual_register();
    2229             :         live->Remove(out_vreg);
    2230             :       }
    2231    40823988 :       if (block->IsHandler() && index == block_start && output->IsAllocated() &&
    2232    40383275 :           output->IsRegister() &&
    2233             :           AllocatedOperand::cast(output)->GetRegister() ==
    2234             :               v8::internal::kReturnRegister0) {
    2235             :         // The register defined here is blocked from gap start - it is the
    2236             :         // exception value.
    2237             :         // TODO(mtrofin): should we explore an explicit opcode for
    2238             :         // the first instruction in the handler?
    2239             :         Define(LifetimePosition::GapFromInstructionIndex(index), output);
    2240             :       } else {
    2241             :         Define(curr_position, output);
    2242             :       }
    2243             :     }
    2244             : 
    2245    69638159 :     if (instr->ClobbersRegisters()) {
    2246   154098338 :       for (int i = 0; i < config()->num_allocatable_general_registers(); ++i) {
    2247             :         // Create a UseInterval at this instruction for all fixed registers,
    2248             :         // (including the instruction outputs). Adding another UseInterval here
    2249             :         // is OK because AddUseInterval will just merge it with the existing
    2250             :         // one at the end of the range.
    2251             :         int code = config()->GetAllocatableGeneralCode(i);
    2252    73967224 :         TopLevelLiveRange* range = FixedLiveRangeFor(code);
    2253             :         range->AddUseInterval(curr_position, curr_position.End(),
    2254    73967014 :                               allocation_zone());
    2255             :       }
    2256             :     }
    2257             : 
    2258    69637888 :     if (instr->ClobbersDoubleRegisters()) {
    2259   191093610 :       for (int i = 0; i < config()->num_allocatable_double_registers(); ++i) {
    2260             :         // Add a UseInterval for all DoubleRegisters. See comment above for
    2261             :         // general registers.
    2262             :         int code = config()->GetAllocatableDoubleCode(i);
    2263             :         TopLevelLiveRange* range =
    2264    92464779 :             FixedFPLiveRangeFor(code, MachineRepresentation::kFloat64);
    2265             :         range->AddUseInterval(curr_position, curr_position.End(),
    2266    92464525 :                               allocation_zone());
    2267             :       }
    2268             :       // Clobber fixed float registers on archs with non-simple aliasing.
    2269             :       if (!kSimpleFPAliasing) {
    2270             :         if (fixed_float_live_ranges) {
    2271             :           for (int i = 0; i < config()->num_allocatable_float_registers();
    2272             :                ++i) {
    2273             :             // Add a UseInterval for all FloatRegisters. See comment above for
    2274             :             // general registers.
    2275             :             int code = config()->GetAllocatableFloatCode(i);
    2276             :             TopLevelLiveRange* range =
    2277             :                 FixedFPLiveRangeFor(code, MachineRepresentation::kFloat32);
    2278             :             range->AddUseInterval(curr_position, curr_position.End(),
    2279             :                                   allocation_zone());
    2280             :           }
    2281             :         }
    2282             :         if (fixed_simd128_live_ranges) {
    2283             :           for (int i = 0; i < config()->num_allocatable_simd128_registers();
    2284             :                ++i) {
    2285             :             int code = config()->GetAllocatableSimd128Code(i);
    2286             :             TopLevelLiveRange* range =
    2287             :                 FixedFPLiveRangeFor(code, MachineRepresentation::kSimd128);
    2288             :             range->AddUseInterval(curr_position, curr_position.End(),
    2289             :                                   allocation_zone());
    2290             :           }
    2291             :         }
    2292             :       }
    2293             :     }
    2294             : 
    2295   349806667 :     for (size_t i = 0; i < instr->InputCount(); i++) {
    2296             :       InstructionOperand* input = instr->InputAt(i);
    2297   140083973 :       if (input->IsImmediate() || input->IsExplicit()) {
    2298             :         continue;  // Ignore immediates and explicitly reserved registers.
    2299             :       }
    2300             :       LifetimePosition use_pos;
    2301   129330437 :       if (input->IsUnallocated() &&
    2302             :           UnallocatedOperand::cast(input)->IsUsedAtStart()) {
    2303             :         use_pos = curr_position;
    2304             :       } else {
    2305             :         use_pos = curr_position.End();
    2306             :       }
    2307             : 
    2308    74832190 :       if (input->IsUnallocated()) {
    2309             :         UnallocatedOperand* unalloc = UnallocatedOperand::cast(input);
    2310             :         int vreg = unalloc->virtual_register();
    2311             :         live->Add(vreg);
    2312    54498450 :         if (unalloc->HasSlotPolicy()) {
    2313    14857242 :           if (FLAG_turbo_control_flow_aware_allocation) {
    2314           0 :             data()->GetOrCreateLiveRangeFor(vreg)->register_slot_use(
    2315             :                 block->IsDeferred()
    2316             :                     ? TopLevelLiveRange::SlotUseKind::kDeferredSlotUse
    2317             :                     : TopLevelLiveRange::SlotUseKind::kGeneralSlotUse);
    2318             :           } else {
    2319    14857242 :             data()->GetOrCreateLiveRangeFor(vreg)->register_slot_use(
    2320             :                 TopLevelLiveRange::SlotUseKind::kGeneralSlotUse);
    2321             :           }
    2322             :         }
    2323             :       }
    2324             :       Use(block_start_position, use_pos, input);
    2325             :     }
    2326             : 
    2327    71159037 :     for (size_t i = 0; i < instr->TempCount(); i++) {
    2328             :       InstructionOperand* temp = instr->TempAt(i);
    2329             :       // Unsupported.
    2330             :       DCHECK_IMPLIES(temp->IsUnallocated(),
    2331             :                      !UnallocatedOperand::cast(temp)->HasSlotPolicy());
    2332      760407 :       if (instr->ClobbersTemps()) {
    2333           0 :         if (temp->IsRegister()) continue;
    2334           0 :         if (temp->IsUnallocated()) {
    2335             :           UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
    2336           0 :           if (temp_unalloc->HasFixedPolicy()) {
    2337             :             continue;
    2338             :           }
    2339             :         }
    2340             :       }
    2341             :       Use(block_start_position, curr_position.End(), temp);
    2342             :       Define(curr_position, temp);
    2343             :     }
    2344             : 
    2345             :     // Process the moves of the instruction's gaps, making their sources live.
    2346             :     const Instruction::GapPosition kPositions[] = {Instruction::END,
    2347    69638211 :                                                    Instruction::START};
    2348             :     curr_position = curr_position.PrevStart();
    2349             :     DCHECK(curr_position.IsGapPosition());
    2350   348186707 :     for (const Instruction::GapPosition& position : kPositions) {
    2351   139273553 :       ParallelMove* move = instr->GetParallelMove(position);
    2352   139273553 :       if (move == nullptr) continue;
    2353    26108327 :       if (position == Instruction::END) {
    2354             :         curr_position = curr_position.End();
    2355             :       } else {
    2356             :         curr_position = curr_position.Start();
    2357             :       }
    2358    64649974 :       for (MoveOperands* cur : *move) {
    2359             :         InstructionOperand& from = cur->source();
    2360             :         InstructionOperand& to = cur->destination();
    2361             :         void* hint = &to;
    2362    38540952 :         UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to);
    2363             :         UsePosition* to_use = nullptr;
    2364             :         int phi_vreg = -1;
    2365    38541207 :         if (to.IsUnallocated()) {
    2366             :           int to_vreg = UnallocatedOperand::cast(to).virtual_register();
    2367             :           TopLevelLiveRange* to_range =
    2368    18206517 :               data()->GetOrCreateLiveRangeFor(to_vreg);
    2369    18206497 :           if (to_range->is_phi()) {
    2370             :             phi_vreg = to_vreg;
    2371     5216519 :             if (to_range->is_non_loop_phi()) {
    2372             :               hint = to_range->current_hint_position();
    2373             :               hint_type = hint == nullptr ? UsePositionHintType::kNone
    2374     4501709 :                                           : UsePositionHintType::kUsePos;
    2375             :             } else {
    2376             :               hint_type = UsePositionHintType::kPhi;
    2377             :               hint = data()->GetPhiMapValueFor(to_vreg);
    2378             :             }
    2379             :           } else {
    2380    12989978 :             if (live->Contains(to_vreg)) {
    2381    10806816 :               to_use = Define(curr_position, &to, &from,
    2382    10806905 :                               UsePosition::HintTypeForOperand(from));
    2383             :               live->Remove(to_vreg);
    2384             :             } else {
    2385             :               cur->Eliminate();
    2386             :               continue;
    2387             :             }
    2388             :           }
    2389             :         } else {
    2390             :           Define(curr_position, &to);
    2391             :         }
    2392             :         UsePosition* from_use =
    2393    36358977 :             Use(block_start_position, curr_position, &from, hint, hint_type);
    2394             :         // Mark range live.
    2395    36358396 :         if (from.IsUnallocated()) {
    2396             :           live->Add(UnallocatedOperand::cast(from).virtual_register());
    2397             :         }
    2398             :         // Resolve use position hints just created.
    2399    36358396 :         if (to_use != nullptr && from_use != nullptr) {
    2400             :           to_use->ResolveHint(from_use);
    2401             :           from_use->ResolveHint(to_use);
    2402             :         }
    2403             :         DCHECK_IMPLIES(to_use != nullptr, to_use->IsResolved());
    2404             :         DCHECK_IMPLIES(from_use != nullptr, from_use->IsResolved());
    2405             :         // Potentially resolve phi hint.
    2406    36358396 :         if (phi_vreg != -1) ResolvePhiHint(&from, from_use);
    2407             :       }
    2408             :     }
    2409             :   }
    2410    20307792 : }
    2411             : 
    2412    20307748 : void LiveRangeBuilder::ProcessPhis(const InstructionBlock* block,
    2413             :                                    BitVector* live) {
    2414    22469617 :   for (PhiInstruction* phi : block->phis()) {
    2415             :     // The live range interval already ends at the first instruction of the
    2416             :     // block.
    2417             :     int phi_vreg = phi->virtual_register();
    2418             :     live->Remove(phi_vreg);
    2419             :     // Select a hint from a predecessor block that precedes this block in the
    2420             :     // rpo order. In order of priority:
    2421             :     // - Avoid hints from deferred blocks.
    2422             :     // - Prefer hints from allocated (or explicit) operands.
    2423             :     // - Prefer hints from empty blocks (containing just parallel moves and a
    2424             :     //   jump). In these cases, if we can elide the moves, the jump threader
    2425             :     //   is likely to be able to elide the jump.
    2426             :     // The enforcement of hinting in rpo order is required because hint
    2427             :     // resolution that happens later in the compiler pipeline visits
    2428             :     // instructions in reverse rpo order, relying on the fact that phis are
    2429             :     // encountered before their hints.
    2430             :     InstructionOperand* hint = nullptr;
    2431             :     int hint_preference = 0;
    2432             : 
    2433             :     // The cost of hinting increases with the number of predecessors. At the
    2434             :     // same time, the typical benefit decreases, since this hinting only
    2435             :     // optimises the execution path through one predecessor. A limit of 2 is
    2436             :     // sufficient to hit the common if/else pattern.
    2437             :     int predecessor_limit = 2;
    2438             : 
    2439     4682509 :     for (RpoNumber predecessor : block->predecessors()) {
    2440             :       const InstructionBlock* predecessor_block =
    2441     4326060 :           code()->InstructionBlockAt(predecessor);
    2442             :       DCHECK_EQ(predecessor_block->rpo_number(), predecessor);
    2443             : 
    2444             :       // Only take hints from earlier rpo numbers.
    2445     4326056 :       if (predecessor >= block->rpo_number()) continue;
    2446             : 
    2447             :       // Look up the predecessor instruction.
    2448             :       const Instruction* predecessor_instr =
    2449             :           GetLastInstruction(code(), predecessor_block);
    2450             :       InstructionOperand* predecessor_hint = nullptr;
    2451             :       // Phis are assigned in the END position of the last instruction in each
    2452             :       // predecessor block.
    2453     4973092 :       for (MoveOperands* move :
    2454     4973096 :            *predecessor_instr->GetParallelMove(Instruction::END)) {
    2455             :         InstructionOperand& to = move->destination();
    2456     9946146 :         if (to.IsUnallocated() &&
    2457             :             UnallocatedOperand::cast(to).virtual_register() == phi_vreg) {
    2458             :           predecessor_hint = &move->source();
    2459     3967667 :           break;
    2460             :         }
    2461             :       }
    2462             :       DCHECK_NOT_NULL(predecessor_hint);
    2463             : 
    2464             :       // For each predecessor, generate a score according to the priorities
    2465             :       // described above, and pick the best one. Flags in higher-order bits have
    2466             :       // a higher priority than those in lower-order bits.
    2467             :       int predecessor_hint_preference = 0;
    2468             :       const int kNotDeferredBlockPreference = (1 << 2);
    2469             :       const int kMoveIsAllocatedPreference = (1 << 1);
    2470             :       const int kBlockIsEmptyPreference = (1 << 0);
    2471             : 
    2472             :       // - Avoid hints from deferred blocks.
    2473     3967663 :       if (!predecessor_block->IsDeferred()) {
    2474             :         predecessor_hint_preference |= kNotDeferredBlockPreference;
    2475             :       }
    2476             : 
    2477             :       // - Prefer hints from allocated (or explicit) operands.
    2478             :       //
    2479             :       // Already-allocated or explicit operands are typically assigned using
    2480             :       // the parallel moves on the last instruction. For example:
    2481             :       //
    2482             :       //      gap (v101 = [x0|R|w32]) (v100 = v101)
    2483             :       //      ArchJmp
    2484             :       //    ...
    2485             :       //    phi: v100 = v101 v102
    2486             :       //
    2487             :       // We have already found the END move, so look for a matching START move
    2488             :       // from an allocated (or explicit) operand.
    2489             :       //
    2490             :       // Note that we cannot simply look up data()->live_ranges()[vreg] here
    2491             :       // because the live ranges are still being built when this function is
    2492             :       // called.
    2493             :       // TODO(v8): Find a way to separate hinting from live range analysis in
    2494             :       // BuildLiveRanges so that we can use the O(1) live-range look-up.
    2495             :       auto moves = predecessor_instr->GetParallelMove(Instruction::START);
    2496     3967663 :       if (moves != nullptr) {
    2497      429511 :         for (MoveOperands* move : *moves) {
    2498             :           InstructionOperand& to = move->destination();
    2499      362462 :           if (predecessor_hint->Equals(to)) {
    2500      295413 :             if (move->source().IsAllocated() || move->source().IsExplicit()) {
    2501      295413 :               predecessor_hint_preference |= kMoveIsAllocatedPreference;
    2502             :             }
    2503             :             break;
    2504             :           }
    2505             :         }
    2506             :       }
    2507             : 
    2508             :       // - Prefer hints from empty blocks.
    2509     3967663 :       if (predecessor_block->last_instruction_index() ==
    2510             :           predecessor_block->first_instruction_index()) {
    2511     1450668 :         predecessor_hint_preference |= kBlockIsEmptyPreference;
    2512             :       }
    2513             : 
    2514     7935326 :       if ((hint == nullptr) ||
    2515     3967663 :           (predecessor_hint_preference > hint_preference)) {
    2516             :         // Take the hint from this predecessor.
    2517             :         hint = predecessor_hint;
    2518             :         hint_preference = predecessor_hint_preference;
    2519             :       }
    2520             : 
    2521     3967663 :       if (--predecessor_limit <= 0) break;
    2522             :     }
    2523             :     DCHECK_NOT_NULL(hint);
    2524             : 
    2525             :     LifetimePosition block_start = LifetimePosition::GapFromInstructionIndex(
    2526     2161879 :         block->first_instruction_index());
    2527     2161879 :     UsePosition* use_pos = Define(block_start, &phi->output(), hint,
    2528     2161883 :                                   UsePosition::HintTypeForOperand(*hint));
    2529             :     MapPhiHint(hint, use_pos);
    2530             :   }
    2531    20307741 : }
    2532             : 
    2533      229517 : void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block,
    2534             :                                          BitVector* live) {
    2535             :   DCHECK(block->IsLoopHeader());
    2536             :   // Add a live range stretching from the first loop instruction to the last
    2537             :   // for each value live on entry to the header.
    2538             :   BitVector::Iterator iterator(live);
    2539             :   LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
    2540      229517 :       block->first_instruction_index());
    2541             :   LifetimePosition end = LifetimePosition::GapFromInstructionIndex(
    2542      229517 :                              code()->LastLoopInstructionIndex(block))
    2543      229517 :                              .NextFullStart();
    2544     4472399 :   while (!iterator.Done()) {
    2545             :     int operand_index = iterator.Current();
    2546     2121441 :     TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
    2547     2121441 :     range->EnsureInterval(start, end, allocation_zone());
    2548     2121441 :     iterator.Advance();
    2549             :   }
    2550             :   // Insert all values into the live in sets of all blocks in the loop.
    2551     3758830 :   for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt();
    2552             :        ++i) {
    2553     7058626 :     live_in_sets()[i]->Union(*live);
    2554             :   }
    2555      229517 : }
    2556             : 
    2557     2515523 : void LiveRangeBuilder::BuildLiveRanges() {
    2558             :   // Process the blocks in reverse order.
    2559    22823628 :   for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
    2560             :        --block_id) {
    2561             :     InstructionBlock* block =
    2562    20306013 :         code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
    2563    20306233 :     BitVector* live = ComputeLiveOut(block, data());
    2564             :     // Initially consider all live_out values live for the entire block. We
    2565             :     // will shorten these intervals if necessary.
    2566    20305739 :     AddInitialIntervals(block, live);
    2567             :     // Process the instructions in reverse order, generating and killing
    2568             :     // live values.
    2569    20304616 :     ProcessInstructions(block, live);
    2570             :     // All phi output operands are killed by this block.
    2571    20308296 :     ProcessPhis(block, live);
    2572             :     // Now live is live_in for this block except not including values live
    2573             :     // out on backward successor edges.
    2574    20307956 :     if (block->IsLoopHeader()) ProcessLoopHeader(block, live);
    2575    40616210 :     live_in_sets()[block_id] = live;
    2576             :   }
    2577             :   // Postprocess the ranges.
    2578             :   const size_t live_ranges_size = data()->live_ranges().size();
    2579    93987899 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    2580    91470498 :     CHECK_EQ(live_ranges_size,
    2581             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    2582    91470498 :     if (range == nullptr) continue;
    2583             :     // Give slots to all ranges with a non fixed slot use.
    2584    45207660 :     if (range->has_slot_use() && range->HasNoSpillType()) {
    2585             :       SpillMode spill_mode =
    2586             :           range->slot_use_kind() ==
    2587             :                   TopLevelLiveRange::SlotUseKind::kDeferredSlotUse
    2588             :               ? SpillMode::kSpillDeferred
    2589     1251502 :               : SpillMode::kSpillAtDefinition;
    2590     1251502 :       data()->AssignSpillRangeToLiveRange(range, spill_mode);
    2591             :     }
    2592             :     // TODO(bmeurer): This is a horrible hack to make sure that for constant
    2593             :     // live ranges, every use requires the constant to be in a register.
    2594             :     // Without this hack, all uses with "any" policy would get the constant
    2595             :     // operand assigned.
    2596    58204395 :     if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
    2597    55331062 :       for (UsePosition* pos = range->first_pos(); pos != nullptr;
    2598             :            pos = pos->next()) {
    2599    20633454 :         if (pos->type() == UsePositionType::kRequiresSlot ||
    2600             :             pos->type() == UsePositionType::kRegisterOrSlotOrConstant) {
    2601             :           continue;
    2602             :         }
    2603             :         UsePositionType new_type = UsePositionType::kRegisterOrSlot;
    2604             :         // Can't mark phis as needing a register.
    2605    20633552 :         if (!pos->pos().IsGapPosition()) {
    2606             :           new_type = UsePositionType::kRequiresRegister;
    2607             :         }
    2608             :         pos->set_type(new_type, true);
    2609             :       }
    2610             :     }
    2611             :   }
    2612     2959024 :   for (auto preassigned : data()->preassigned_slot_ranges()) {
    2613             :     TopLevelLiveRange* range = preassigned.first;
    2614             :     int slot_id = preassigned.second;
    2615             :     SpillRange* spill = range->HasSpillRange()
    2616             :                             ? range->GetSpillRange()
    2617             :                             : data()->AssignSpillRangeToLiveRange(
    2618      441702 :                                   range, SpillMode::kSpillAtDefinition);
    2619             :     spill->set_assigned_slot(slot_id);
    2620             :   }
    2621             : #ifdef DEBUG
    2622             :   Verify();
    2623             : #endif
    2624     2517322 : }
    2625             : 
    2626           0 : void LiveRangeBuilder::MapPhiHint(InstructionOperand* operand,
    2627             :                                   UsePosition* use_pos) {
    2628             :   DCHECK(!use_pos->IsResolved());
    2629     4323752 :   auto res = phi_hints_.insert(std::make_pair(operand, use_pos));
    2630             :   DCHECK(res.second);
    2631             :   USE(res);
    2632           0 : }
    2633             : 
    2634     5216491 : void LiveRangeBuilder::ResolvePhiHint(InstructionOperand* operand,
    2635             :                                       UsePosition* use_pos) {
    2636             :   auto it = phi_hints_.find(operand);
    2637     5216491 :   if (it == phi_hints_.end()) return;
    2638             :   DCHECK(!it->second->IsResolved());
    2639     2161874 :   it->second->ResolveHint(use_pos);
    2640             : }
    2641             : 
    2642           0 : void LiveRangeBuilder::Verify() const {
    2643           0 :   for (auto& hint : phi_hints_) {
    2644           0 :     CHECK(hint.second->IsResolved());
    2645             :   }
    2646           0 :   for (const TopLevelLiveRange* current : data()->live_ranges()) {
    2647           0 :     if (current != nullptr && !current->IsEmpty()) {
    2648             :       // New LiveRanges should not be split.
    2649           0 :       CHECK_NULL(current->next());
    2650             :       // General integrity check.
    2651           0 :       current->Verify();
    2652             :       const UseInterval* first = current->first_interval();
    2653           0 :       if (first->next() == nullptr) continue;
    2654             : 
    2655             :       // Consecutive intervals should not end and start in the same block,
    2656             :       // otherwise the intervals should have been joined, because the
    2657             :       // variable is live throughout that block.
    2658           0 :       CHECK(NextIntervalStartsInDifferentBlocks(first));
    2659             : 
    2660           0 :       for (const UseInterval* i = first->next(); i != nullptr; i = i->next()) {
    2661             :         // Except for the first interval, the other intevals must start at
    2662             :         // a block boundary, otherwise data wouldn't flow to them.
    2663           0 :         CHECK(IntervalStartsAtBlockBoundary(i));
    2664             :         // The last instruction of the predecessors of the block the interval
    2665             :         // starts must be covered by the range.
    2666           0 :         CHECK(IntervalPredecessorsCoveredByRange(i, current));
    2667           0 :         if (i->next() != nullptr) {
    2668             :           // Check the consecutive intervals property, except for the last
    2669             :           // interval, where it doesn't apply.
    2670           0 :           CHECK(NextIntervalStartsInDifferentBlocks(i));
    2671             :         }
    2672             :       }
    2673             :     }
    2674             :   }
    2675           0 : }
    2676             : 
    2677           0 : bool LiveRangeBuilder::IntervalStartsAtBlockBoundary(
    2678             :     const UseInterval* interval) const {
    2679             :   LifetimePosition start = interval->start();
    2680           0 :   if (!start.IsFullStart()) return false;
    2681             :   int instruction_index = start.ToInstructionIndex();
    2682             :   const InstructionBlock* block =
    2683           0 :       data()->code()->GetInstructionBlock(instruction_index);
    2684           0 :   return block->first_instruction_index() == instruction_index;
    2685             : }
    2686             : 
    2687           0 : bool LiveRangeBuilder::IntervalPredecessorsCoveredByRange(
    2688             :     const UseInterval* interval, const TopLevelLiveRange* range) const {
    2689             :   LifetimePosition start = interval->start();
    2690             :   int instruction_index = start.ToInstructionIndex();
    2691             :   const InstructionBlock* block =
    2692           0 :       data()->code()->GetInstructionBlock(instruction_index);
    2693           0 :   for (RpoNumber pred_index : block->predecessors()) {
    2694             :     const InstructionBlock* predecessor =
    2695           0 :         data()->code()->InstructionBlockAt(pred_index);
    2696             :     LifetimePosition last_pos = LifetimePosition::GapFromInstructionIndex(
    2697             :         predecessor->last_instruction_index());
    2698             :     last_pos = last_pos.NextStart().End();
    2699           0 :     if (!range->Covers(last_pos)) return false;
    2700             :   }
    2701           0 :   return true;
    2702             : }
    2703             : 
    2704           0 : bool LiveRangeBuilder::NextIntervalStartsInDifferentBlocks(
    2705             :     const UseInterval* interval) const {
    2706             :   DCHECK_NOT_NULL(interval->next());
    2707             :   LifetimePosition end = interval->end();
    2708             :   LifetimePosition next_start = interval->next()->start();
    2709             :   // Since end is not covered, but the previous position is, move back a
    2710             :   // position
    2711           0 :   end = end.IsStart() ? end.PrevStart().End() : end.Start();
    2712             :   int last_covered_index = end.ToInstructionIndex();
    2713             :   const InstructionBlock* block =
    2714           0 :       data()->code()->GetInstructionBlock(last_covered_index);
    2715             :   const InstructionBlock* next_block =
    2716           0 :       data()->code()->GetInstructionBlock(next_start.ToInstructionIndex());
    2717           0 :   return block->rpo_number() < next_block->rpo_number();
    2718             : }
    2719             : 
    2720     2516702 : void BundleBuilder::BuildBundles() {
    2721     2516702 :   TRACE("Build bundles\n");
    2722             :   // Process the blocks in reverse order.
    2723    22824637 :   for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
    2724             :        --block_id) {
    2725             :     InstructionBlock* block =
    2726    20308199 :         code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
    2727    20308318 :     TRACE("Block B%d\n", block_id);
    2728    22470198 :     for (auto phi : block->phis()) {
    2729             :       LiveRange* out_range =
    2730     2161872 :           data()->GetOrCreateLiveRangeFor(phi->virtual_register());
    2731             :       LiveRangeBundle* out = out_range->get_bundle();
    2732     2161874 :       if (out == nullptr) {
    2733             :         out = new (data()->allocation_zone())
    2734     1657215 :             LiveRangeBundle(data()->allocation_zone(), next_bundle_id_++);
    2735     1657207 :         out->TryAddRange(out_range);
    2736             :       }
    2737     2161868 :       TRACE("Processing phi for v%d with %d:%d\n", phi->virtual_register(),
    2738             :             out_range->TopLevel()->vreg(), out_range->relative_id());
    2739     7378368 :       for (auto input : phi->operands()) {
    2740     5216481 :         LiveRange* input_range = data()->GetOrCreateLiveRangeFor(input);
    2741     5216488 :         TRACE("Input value v%d with range %d:%d\n", input,
    2742             :               input_range->TopLevel()->vreg(), input_range->relative_id());
    2743             :         LiveRangeBundle* input_bundle = input_range->get_bundle();
    2744     5216503 :         if (input_bundle != nullptr) {
    2745      647360 :           TRACE("Merge\n");
    2746      647360 :           if (out->TryMerge(input_bundle))
    2747      387990 :             TRACE("Merged %d and %d to %d\n", phi->virtual_register(), input,
    2748             :                   out->id());
    2749             :         } else {
    2750     4569143 :           TRACE("Add\n");
    2751     4569143 :           if (out->TryAddRange(input_range))
    2752     4192338 :             TRACE("Added %d and %d to %d\n", phi->virtual_register(), input,
    2753             :                   out->id());
    2754             :         }
    2755             :       }
    2756             :     }
    2757    20308326 :     TRACE("Done block B%d\n", block_id);
    2758             :   }
    2759     2516438 : }
    2760             : 
    2761     6226312 : bool LiveRangeBundle::TryAddRange(LiveRange* range) {
    2762             :   DCHECK_NULL(range->get_bundle());
    2763             :   // We may only add a new live range if its use intervals do not
    2764             :   // overlap with existing intervals in the bundle.
    2765     6226312 :   if (UsesOverlap(range->first_interval())) return false;
    2766             :   ranges_.insert(range);
    2767     5849506 :   range->set_bundle(this);
    2768     5849506 :   InsertUses(range->first_interval());
    2769     5849524 :   return true;
    2770             : }
    2771      647360 : bool LiveRangeBundle::TryMerge(LiveRangeBundle* other) {
    2772      647360 :   if (other == this) return true;
    2773             : 
    2774             :   auto iter1 = uses_.begin();
    2775             :   auto iter2 = other->uses_.begin();
    2776             : 
    2777     2138167 :   while (iter1 != uses_.end() && iter2 != other->uses_.end()) {
    2778     1044857 :     if (iter1->start > iter2->end) {
    2779             :       ++iter2;
    2780      438923 :     } else if (iter2->start > iter1->end) {
    2781             :       ++iter1;
    2782             :     } else {
    2783      259371 :       TRACE("No merge %d:%d %d:%d\n", iter1->start, iter1->end, iter2->start,
    2784             :             iter2->end);
    2785             :       return false;
    2786             :     }
    2787             :   }
    2788             :   // Uses are disjoint, merging is possible.
    2789      174049 :   for (auto it = other->ranges_.begin(); it != other->ranges_.end(); ++it) {
    2790      145458 :     (*it)->set_bundle(this);
    2791      145458 :     InsertUses((*it)->first_interval());
    2792             :   }
    2793             :   ranges_.insert(other->ranges_.begin(), other->ranges_.end());
    2794             :   other->ranges_.clear();
    2795             : 
    2796       28591 :   return true;
    2797             : }
    2798             : 
    2799     5849510 : void LiveRangeBundle::MergeSpillRanges() {
    2800             :   SpillRange* target = nullptr;
    2801    53795570 :   for (auto range : ranges_) {
    2802    47946015 :     if (range->TopLevel()->HasSpillRange()) {
    2803             :       SpillRange* current = range->TopLevel()->GetSpillRange();
    2804     3764793 :       if (target == nullptr) {
    2805             :         target = current;
    2806     1479574 :       } else if (target != current) {
    2807       99856 :         target->TryMerge(current);
    2808             :       }
    2809             :     }
    2810             :   }
    2811     5849555 : }
    2812             : 
    2813           0 : RegisterAllocator::RegisterAllocator(RegisterAllocationData* data,
    2814             :                                      RegisterKind kind)
    2815             :     : data_(data),
    2816             :       mode_(kind),
    2817             :       num_registers_(GetRegisterCount(data->config(), kind)),
    2818             :       num_allocatable_registers_(
    2819             :           GetAllocatableRegisterCount(data->config(), kind)),
    2820             :       allocatable_register_codes_(
    2821             :           GetAllocatableRegisterCodes(data->config(), kind)),
    2822    10892008 :       check_fp_aliasing_(false) {
    2823             :   if (!kSimpleFPAliasing && kind == FP_REGISTERS) {
    2824             :     check_fp_aliasing_ = (data->code()->representation_mask() &
    2825             :                           (kFloat32Bit | kSimd128Bit)) != 0;
    2826             :   }
    2827           0 : }
    2828             : 
    2829           0 : LifetimePosition RegisterAllocator::GetSplitPositionForInstruction(
    2830             :     const LiveRange* range, int instruction_index) {
    2831             :   LifetimePosition ret = LifetimePosition::Invalid();
    2832             : 
    2833             :   ret = LifetimePosition::GapFromInstructionIndex(instruction_index);
    2834    13138830 :   if (range->Start() >= ret || ret >= range->End()) {
    2835             :     return LifetimePosition::Invalid();
    2836             :   }
    2837           0 :   return ret;
    2838             : }
    2839             : 
    2840     2722728 : void RegisterAllocator::SplitAndSpillRangesDefinedByMemoryOperand() {
    2841             :   size_t initial_range_count = data()->live_ranges().size();
    2842   268081040 :   for (size_t i = 0; i < initial_range_count; ++i) {
    2843   132677950 :     CHECK_EQ(initial_range_count,
    2844             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    2845   132677950 :     TopLevelLiveRange* range = data()->live_ranges()[i];
    2846   132677950 :     if (!CanProcessRange(range)) continue;
    2847             :     // Only assume defined by memory operand if we are guaranteed to spill it or
    2848             :     // it has a spill operand.
    2849    93221958 :     if (range->HasNoSpillType() ||
    2850     2489980 :         (range->HasSpillRange() && !range->has_non_deferred_slot_use())) {
    2851             :       continue;
    2852             :     }
    2853             :     LifetimePosition start = range->Start();
    2854    19347335 :     TRACE("Live range %d:%d is defined by a spill operand.\n",
    2855             :           range->TopLevel()->vreg(), range->relative_id());
    2856             :     LifetimePosition next_pos = start;
    2857    19347353 :     if (next_pos.IsGapPosition()) {
    2858             :       next_pos = next_pos.NextStart();
    2859             :     }
    2860             : 
    2861             :     // With splinters, we can be more strict and skip over positions
    2862             :     // not strictly needing registers.
    2863             :     UsePosition* pos =
    2864             :         range->IsSplinter()
    2865     3209475 :             ? range->NextRegisterPosition(next_pos)
    2866    22556828 :             : range->NextUsePositionRegisterIsBeneficial(next_pos);
    2867             :     // If the range already has a spill operand and it doesn't need a
    2868             :     // register immediately, split it and spill the first part of the range.
    2869    19347302 :     if (pos == nullptr) {
    2870     5354516 :       Spill(range, SpillMode::kSpillAtDefinition);
    2871    13992786 :     } else if (pos->pos() > range->Start().NextStart()) {
    2872             :       // Do not spill live range eagerly if use position that can benefit from
    2873             :       // the register is too close to the start of live range.
    2874             :       LifetimePosition split_pos = GetSplitPositionForInstruction(
    2875             :           range, pos->pos().ToInstructionIndex());
    2876             :       // There is no place to split, so we can't split and spill.
    2877    13138830 :       if (!split_pos.IsValid()) continue;
    2878             : 
    2879             :       split_pos =
    2880    13138049 :           FindOptimalSplitPos(range->Start().NextFullStart(), split_pos);
    2881             : 
    2882    13137928 :       SplitRangeAt(range, split_pos);
    2883    13138764 :       Spill(range, SpillMode::kSpillAtDefinition);
    2884             :     }
    2885             :   }
    2886     2723934 : }
    2887             : 
    2888    25926201 : LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
    2889             :                                            LifetimePosition pos) {
    2890             :   DCHECK(!range->TopLevel()->IsFixed());
    2891    25926201 :   TRACE("Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(),
    2892             :         range->relative_id(), pos.value());
    2893             : 
    2894    25926331 :   if (pos <= range->Start()) return range;
    2895             : 
    2896             :   // We can't properly connect liveranges if splitting occurred at the end
    2897             :   // a block.
    2898             :   DCHECK(pos.IsStart() || pos.IsGapPosition() ||
    2899             :          (GetInstructionBlock(code(), pos)->last_instruction_index() !=
    2900             :           pos.ToInstructionIndex()));
    2901             : 
    2902    23101848 :   LiveRange* result = range->SplitAt(pos, allocation_zone());
    2903    23102520 :   return result;
    2904             : }
    2905             : 
    2906     3703473 : LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
    2907             :                                            LifetimePosition start,
    2908             :                                            LifetimePosition end) {
    2909             :   DCHECK(!range->TopLevel()->IsFixed());
    2910     3703473 :   TRACE("Splitting live range %d:%d in position between [%d, %d]\n",
    2911             :         range->TopLevel()->vreg(), range->relative_id(), start.value(),
    2912             :         end.value());
    2913             : 
    2914     3703473 :   LifetimePosition split_pos = FindOptimalSplitPos(start, end);
    2915             :   DCHECK(split_pos >= start);
    2916     3703474 :   return SplitRangeAt(range, split_pos);
    2917             : }
    2918             : 
    2919    16841315 : LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
    2920             :                                                         LifetimePosition end) {
    2921             :   int start_instr = start.ToInstructionIndex();
    2922             :   int end_instr = end.ToInstructionIndex();
    2923             :   DCHECK_LE(start_instr, end_instr);
    2924             : 
    2925             :   // We have no choice
    2926    16841315 :   if (start_instr == end_instr) return end;
    2927             : 
    2928             :   const InstructionBlock* start_block = GetInstructionBlock(code(), start);
    2929             :   const InstructionBlock* end_block = GetInstructionBlock(code(), end);
    2930             : 
    2931    10791353 :   if (end_block == start_block) {
    2932             :     // The interval is split in the same basic block. Split at the latest
    2933             :     // possible position.
    2934     7906244 :     return end;
    2935             :   }
    2936             : 
    2937             :   const InstructionBlock* block = end_block;
    2938             :   // Find header of outermost loop.
    2939             :   do {
    2940             :     const InstructionBlock* loop = GetContainingLoop(code(), block);
    2941     2987204 :     if (loop == nullptr ||
    2942             :         loop->rpo_number().ToInt() <= start_block->rpo_number().ToInt()) {
    2943             :       // No more loops or loop starts before the lifetime start.
    2944             :       break;
    2945             :     }
    2946             :     block = loop;
    2947             :   } while (true);
    2948             : 
    2949             :   // We did not find any suitable outer loop. Split at the latest possible
    2950             :   // position unless end_block is a loop header itself.
    2951     5671792 :   if (block == end_block && !end_block->IsLoopHeader()) return end;
    2952             : 
    2953             :   return LifetimePosition::GapFromInstructionIndex(
    2954             :       block->first_instruction_index());
    2955             : }
    2956             : 
    2957     1071837 : LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
    2958             :     LiveRange* range, LifetimePosition pos) {
    2959             :   const InstructionBlock* block = GetInstructionBlock(code(), pos.Start());
    2960             :   const InstructionBlock* loop_header =
    2961     1071837 :       block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
    2962             : 
    2963     1071837 :   if (loop_header == nullptr) return pos;
    2964             : 
    2965             :   const UsePosition* prev_use =
    2966             :       range->PreviousUsePositionRegisterIsBeneficial(pos);
    2967             : 
    2968      239439 :   while (loop_header != nullptr) {
    2969             :     // We are going to spill live range inside the loop.
    2970             :     // If possible try to move spilling position backwards to loop header.
    2971             :     // This will reduce number of memory moves on the back edge.
    2972             :     LifetimePosition loop_start = LifetimePosition::GapFromInstructionIndex(
    2973             :         loop_header->first_instruction_index());
    2974             : 
    2975       88999 :     if (range->Covers(loop_start)) {
    2976       67227 :       if (prev_use == nullptr || prev_use->pos() < loop_start) {
    2977             :         // No register beneficial use inside the loop before the pos.
    2978             :         pos = loop_start;
    2979             :       }
    2980             :     }
    2981             : 
    2982             :     // Try hoisting out to an outer loop.
    2983             :     loop_header = GetContainingLoop(code(), loop_header);
    2984             :   }
    2985             : 
    2986       61441 :   return pos;
    2987             : }
    2988             : 
    2989    26149831 : void RegisterAllocator::Spill(LiveRange* range, SpillMode spill_mode) {
    2990             :   DCHECK(!range->spilled());
    2991             :   DCHECK(spill_mode == SpillMode::kSpillAtDefinition ||
    2992             :          GetInstructionBlock(code(), range->Start())->IsDeferred());
    2993             :   TopLevelLiveRange* first = range->TopLevel();
    2994    26149831 :   TRACE("Spilling live range %d:%d mode %d\n", first->vreg(),
    2995             :         range->relative_id(), spill_mode);
    2996             : 
    2997    26150081 :   TRACE("Starting spill type is %d\n", static_cast<int>(first->spill_type()));
    2998    26150081 :   if (first->HasNoSpillType()) {
    2999     5077169 :     TRACE("New spill range needed");
    3000     5077169 :     data()->AssignSpillRangeToLiveRange(first, spill_mode);
    3001             :   }
    3002             :   // Upgrade the spillmode, in case this was only spilled in deferred code so
    3003             :   // far.
    3004    52299913 :   if ((spill_mode == SpillMode::kSpillAtDefinition) &&
    3005             :       (first->spill_type() ==
    3006             :        TopLevelLiveRange::SpillType::kDeferredSpillRange)) {
    3007           0 :     TRACE("Upgrading\n");
    3008             :     first->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange);
    3009             :   }
    3010    26150084 :   TRACE("Final spill type is %d\n", static_cast<int>(first->spill_type()));
    3011             :   range->Spill();
    3012    26150084 : }
    3013             : 
    3014           0 : const char* RegisterAllocator::RegisterName(int register_code) const {
    3015             :   return mode() == GENERAL_REGISTERS
    3016             :              ? i::RegisterName(Register::from_code(register_code))
    3017           0 :              : i::RegisterName(DoubleRegister::from_code(register_code));
    3018             : }
    3019             : 
    3020     2723002 : LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
    3021             :                                          RegisterKind kind, Zone* local_zone)
    3022             :     : RegisterAllocator(data, kind),
    3023             :       unhandled_live_ranges_(local_zone),
    3024             :       active_live_ranges_(local_zone),
    3025             :       inactive_live_ranges_(local_zone),
    3026             :       next_active_ranges_change_(LifetimePosition::Invalid()),
    3027     2723002 :       next_inactive_ranges_change_(LifetimePosition::Invalid()) {
    3028     2723002 :   active_live_ranges().reserve(8);
    3029     2724460 :   inactive_live_ranges().reserve(8);
    3030     2724215 : }
    3031             : 
    3032           0 : void LinearScanAllocator::MaybeUndoPreviousSplit(LiveRange* range) {
    3033           0 :   if (range->next() != nullptr && range->next()->ShouldRecombine()) {
    3034           0 :     LiveRange* to_remove = range->next();
    3035           0 :     TRACE("Recombining %d:%d with %d\n", range->TopLevel()->vreg(),
    3036             :           range->relative_id(), to_remove->relative_id());
    3037             : 
    3038             :     // Remove the range from unhandled, as attaching it will change its
    3039             :     // state and hence ordering in the unhandled set.
    3040             :     auto removed_cnt = unhandled_live_ranges().erase(to_remove);
    3041             :     DCHECK_EQ(removed_cnt, 1);
    3042             :     USE(removed_cnt);
    3043             : 
    3044           0 :     range->AttachToNext();
    3045           0 :   } else if (range->next() != nullptr) {
    3046           0 :     TRACE("No recombine for %d:%d to %d\n", range->TopLevel()->vreg(),
    3047             :           range->relative_id(), range->next()->relative_id());
    3048             :   }
    3049           0 : }
    3050             : 
    3051           0 : void LinearScanAllocator::SpillNotLiveRanges(RangeWithRegisterSet& to_be_live,
    3052             :                                              LifetimePosition position,
    3053             :                                              SpillMode spill_mode) {
    3054           0 :   for (auto it = active_live_ranges().begin();
    3055             :        it != active_live_ranges().end();) {
    3056           0 :     LiveRange* active_range = *it;
    3057             :     TopLevelLiveRange* toplevel = (*it)->TopLevel();
    3058           0 :     auto found = to_be_live.find({toplevel, kUnassignedRegister});
    3059           0 :     if (found == to_be_live.end()) {
    3060             :       // Is not contained in {to_be_live}, spill it.
    3061             :       // Fixed registers are exempt from this. They might have been
    3062             :       // added from inactive at the block boundary but we know that
    3063             :       // they cannot conflict as they are built before register
    3064             :       // allocation starts. It would be algorithmically fine to split
    3065             :       // them and reschedule but the code does not allow to do this.
    3066           0 :       if (toplevel->IsFixed()) {
    3067           0 :         TRACE("Keeping reactivated fixed range for %s\n",
    3068             :               RegisterName(toplevel->assigned_register()));
    3069             :         ++it;
    3070             :       } else {
    3071             :         // When spilling a previously spilled/reloaded range, we add back the
    3072             :         // tail that we might have split off when we reloaded/spilled it
    3073             :         // previously. Otherwise we might keep generating small split-offs.
    3074           0 :         MaybeUndoPreviousSplit(active_range);
    3075           0 :         TRACE("Putting back %d:%d\n", toplevel->vreg(),
    3076             :               active_range->relative_id());
    3077           0 :         LiveRange* split = SplitRangeAt(active_range, position);
    3078             :         DCHECK_NE(split, active_range);
    3079             : 
    3080             :         // Make sure we revisit this range once it has a use that requires
    3081             :         // a register.
    3082           0 :         UsePosition* next_use = split->NextRegisterPosition(position);
    3083           0 :         if (next_use != nullptr) {
    3084             :           // Move to the start of the gap before use so that we have a space
    3085             :           // to perform the potential reload. Otherwise, do not spill but add
    3086             :           // to unhandled for reallocation.
    3087             :           LifetimePosition revisit_at = next_use->pos().FullStart();
    3088           0 :           TRACE("Next use at %d\n", revisit_at.value());
    3089           0 :           if (!data()->IsBlockBoundary(revisit_at)) {
    3090             :             // Leave some space so we have enough gap room.
    3091             :             revisit_at = revisit_at.PrevStart().FullStart();
    3092             :           }
    3093             :           // If this range became life right at the block boundary that we are
    3094             :           // currently processing, we do not need to split it. Instead move it
    3095             :           // to unhandled right away.
    3096           0 :           if (position < revisit_at) {
    3097           0 :             LiveRange* third_part = SplitRangeAt(split, revisit_at);
    3098             :             DCHECK_NE(split, third_part);
    3099           0 :             Spill(split, spill_mode);
    3100           0 :             TRACE("Marking %d:%d to recombine\n", toplevel->vreg(),
    3101             :                   third_part->relative_id());
    3102             :             third_part->SetRecombine();
    3103           0 :             AddToUnhandled(third_part);
    3104             :           } else {
    3105           0 :             AddToUnhandled(split);
    3106             :           }
    3107             :         } else {
    3108           0 :           Spill(split, spill_mode);
    3109             :         }
    3110           0 :         it = ActiveToHandled(it);
    3111             :       }
    3112             :     } else {
    3113             :       // This range is contained in {to_be_live}, so we can keep it.
    3114           0 :       int expected_register = (*found).expected_register;
    3115             :       to_be_live.erase(found);
    3116           0 :       if (expected_register == active_range->assigned_register()) {
    3117             :         // Was life and in correct register, simply pass through.
    3118           0 :         TRACE("Keeping %d:%d in %s\n", toplevel->vreg(),
    3119             :               active_range->relative_id(),
    3120             :               RegisterName(active_range->assigned_register()));
    3121             :         ++it;
    3122             :       } else {
    3123             :         // Was life but wrong register. Split and schedule for
    3124             :         // allocation.
    3125           0 :         TRACE("Scheduling %d:%d\n", toplevel->vreg(),
    3126             :               active_range->relative_id());
    3127           0 :         LiveRange* split = SplitRangeAt(active_range, position);
    3128             :         split->set_controlflow_hint(expected_register);
    3129           0 :         AddToUnhandled(split);
    3130           0 :         it = ActiveToHandled(it);
    3131             :       }
    3132             :     }
    3133             :   }
    3134           0 : }
    3135             : 
    3136           0 : LiveRange* LinearScanAllocator::AssignRegisterOnReload(LiveRange* range,
    3137             :                                                        int reg) {
    3138             :   // We know the register is currently free but it might be in
    3139             :   // use by a currently inactive range. So we might not be able
    3140             :   // to reload for the full distance. In such case, split here.
    3141             :   // TODO(herhut):
    3142             :   // It might be better if we could use the normal unhandled queue and
    3143             :   // give reloading registers pecedence. That way we would compute the
    3144             :   // intersection for the entire future.
    3145             :   LifetimePosition new_end = range->End();
    3146           0 :   for (const auto inactive : inactive_live_ranges()) {
    3147             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3148           0 :       if (inactive->assigned_register() != reg) continue;
    3149             :     } else {
    3150             :       bool conflict = inactive->assigned_register() == reg;
    3151             :       if (!conflict) {
    3152             :         int alias_base_index = -1;
    3153             :         int aliases = data()->config()->GetAliases(range->representation(), reg,
    3154             :                                                    inactive->representation(),
    3155             :                                                    &alias_base_index);
    3156             :         DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    3157             :         while (aliases-- && !conflict) {
    3158             :           int aliased_reg = alias_base_index + aliases;
    3159             :           if (aliased_reg == reg) {
    3160             :             conflict = true;
    3161             :           }
    3162             :         }
    3163             :       }
    3164             :       if (!conflict) continue;
    3165             :     }
    3166           0 :     for (auto interval = inactive->first_interval(); interval != nullptr;
    3167             :          interval = interval->next()) {
    3168           0 :       if (interval->start() > new_end) break;
    3169           0 :       if (interval->end() <= range->Start()) continue;
    3170           0 :       if (new_end > interval->start()) new_end = interval->start();
    3171             :     }
    3172             :   }
    3173           0 :   if (new_end != range->End()) {
    3174           0 :     TRACE("Found new end for %d:%d at %d\n", range->TopLevel()->vreg(),
    3175             :           range->relative_id(), new_end.value());
    3176           0 :     LiveRange* tail = SplitRangeAt(range, new_end);
    3177           0 :     AddToUnhandled(tail);
    3178             :   }
    3179           0 :   SetLiveRangeAssignedRegister(range, reg);
    3180           0 :   return range;
    3181             : }
    3182             : 
    3183           0 : void LinearScanAllocator::ReloadLiveRanges(RangeWithRegisterSet& to_be_live,
    3184             :                                            LifetimePosition position) {
    3185             :   // Assumption: All ranges in {to_be_live} are currently spilled and there are
    3186             :   // no conflicting registers in the active ranges.
    3187             :   // The former is ensured by SpillNotLiveRanges, the latter is by construction
    3188             :   // of the to_be_live set.
    3189           0 :   for (RangeWithRegister range_with_register : to_be_live) {
    3190             :     TopLevelLiveRange* range = range_with_register.range;
    3191             :     int reg = range_with_register.expected_register;
    3192           0 :     LiveRange* to_resurrect = range->GetChildCovers(position);
    3193           0 :     if (to_resurrect == nullptr) {
    3194             :       // While the range was life until the end of the predecessor block, it is
    3195             :       // not live in this block. Either there is a lifetime gap or the range
    3196             :       // died.
    3197           0 :       TRACE("No candidate for %d at %d\n", range->vreg(), position.value());
    3198             :     } else {
    3199             :       // We might be resurrecting a range that we spilled until its next use
    3200             :       // before. In such cases, we have to unsplit it before processing as
    3201             :       // otherwise we might get register changes from one range to the other
    3202             :       // in the middle of blocks.
    3203             :       // If there is a gap between this range and the next, we can just keep
    3204             :       // it as a register change won't hurt.
    3205           0 :       MaybeUndoPreviousSplit(to_resurrect);
    3206           0 :       if (to_resurrect->Start() == position) {
    3207             :         // This range already starts at this block. It might have been spilled,
    3208             :         // so we have to unspill it. Otherwise, it is already in the unhandled
    3209             :         // queue waiting for processing.
    3210             :         DCHECK(!to_resurrect->HasRegisterAssigned());
    3211           0 :         TRACE("Reload %d:%d starting at %d itself\n", range->vreg(),
    3212             :               to_resurrect->relative_id(), position.value());
    3213           0 :         if (to_resurrect->spilled()) {
    3214             :           to_resurrect->Unspill();
    3215           0 :           to_resurrect->set_controlflow_hint(reg);
    3216           0 :           AddToUnhandled(to_resurrect);
    3217             :         } else {
    3218             :           // Assign the preassigned register if we know. Otherwise, nothing to
    3219             :           // do as already in unhandeled.
    3220           0 :           if (reg != kUnassignedRegister) {
    3221             :             auto erased_cnt = unhandled_live_ranges().erase(to_resurrect);
    3222             :             DCHECK_EQ(erased_cnt, 1);
    3223             :             USE(erased_cnt);
    3224             :             // We know that there is no conflict with active ranges, so just
    3225             :             // assign the register to the range.
    3226           0 :             to_resurrect = AssignRegisterOnReload(to_resurrect, reg);
    3227           0 :             AddToActive(to_resurrect);
    3228             :           }
    3229             :         }
    3230             :       } else {
    3231             :         // This range was spilled before. We have to split it and schedule the
    3232             :         // second part for allocation (or assign the register if we know).
    3233             :         DCHECK(to_resurrect->spilled());
    3234           0 :         LiveRange* split = SplitRangeAt(to_resurrect, position);
    3235           0 :         TRACE("Reload %d:%d starting at %d as %d\n", range->vreg(),
    3236             :               to_resurrect->relative_id(), split->Start().value(),
    3237             :               split->relative_id());
    3238             :         DCHECK_NE(split, to_resurrect);
    3239           0 :         if (reg != kUnassignedRegister) {
    3240             :           // We know that there is no conflict with active ranges, so just
    3241             :           // assign the register to the range.
    3242           0 :           split = AssignRegisterOnReload(split, reg);
    3243           0 :           AddToActive(split);
    3244             :         } else {
    3245             :           // Let normal register assignment find a suitable register.
    3246             :           split->set_controlflow_hint(reg);
    3247           0 :           AddToUnhandled(split);
    3248             :         }
    3249             :       }
    3250             :     }
    3251             :   }
    3252           0 : }
    3253             : 
    3254           0 : RpoNumber LinearScanAllocator::ChooseOneOfTwoPredecessorStates(
    3255             :     InstructionBlock* current_block, LifetimePosition boundary) {
    3256             :   using SmallRangeVector =
    3257             :       base::SmallVector<TopLevelLiveRange*,
    3258             :                         RegisterConfiguration::kMaxRegisters>;
    3259             :   // Pick the state that would generate the least spill/reloads.
    3260             :   // Compute vectors of ranges with imminent use for both sides.
    3261             :   // As GetChildCovers is cached, it is cheaper to repeatedly
    3262             :   // call is rather than compute a shared set first.
    3263             :   auto& left = data()->GetSpillState(current_block->predecessors()[0]);
    3264             :   auto& right = data()->GetSpillState(current_block->predecessors()[1]);
    3265             :   SmallRangeVector left_used;
    3266           0 :   for (const auto item : left) {
    3267           0 :     LiveRange* at_next_block = item->TopLevel()->GetChildCovers(boundary);
    3268           0 :     if (at_next_block != nullptr &&
    3269           0 :         at_next_block->NextUsePositionRegisterIsBeneficial(boundary) !=
    3270             :             nullptr) {
    3271           0 :       left_used.emplace_back(item->TopLevel());
    3272             :     }
    3273             :   }
    3274             :   SmallRangeVector right_used;
    3275           0 :   for (const auto item : right) {
    3276           0 :     LiveRange* at_next_block = item->TopLevel()->GetChildCovers(boundary);
    3277           0 :     if (at_next_block != nullptr &&
    3278           0 :         at_next_block->NextUsePositionRegisterIsBeneficial(boundary) !=
    3279             :             nullptr) {
    3280           0 :       right_used.emplace_back(item->TopLevel());
    3281             :     }
    3282             :   }
    3283           0 :   if (left_used.empty() && right_used.empty()) {
    3284             :     // There are no beneficial register uses. Look at any use at
    3285             :     // all. We do not account for all uses, like flowing into a phi.
    3286             :     // So we just look at ranges still being live.
    3287           0 :     TRACE("Looking at only uses\n");
    3288           0 :     for (const auto item : left) {
    3289           0 :       LiveRange* at_next_block = item->TopLevel()->GetChildCovers(boundary);
    3290           0 :       if (at_next_block != nullptr &&
    3291             :           at_next_block->NextUsePosition(boundary) != nullptr) {
    3292           0 :         left_used.emplace_back(item->TopLevel());
    3293             :       }
    3294             :     }
    3295           0 :     for (const auto item : right) {
    3296           0 :       LiveRange* at_next_block = item->TopLevel()->GetChildCovers(boundary);
    3297           0 :       if (at_next_block != nullptr &&
    3298             :           at_next_block->NextUsePosition(boundary) != nullptr) {
    3299           0 :         right_used.emplace_back(item->TopLevel());
    3300             :       }
    3301             :     }
    3302             :   }
    3303             :   // Now left_used and right_used contains those ranges that matter.
    3304             :   // Count which side matches this most.
    3305           0 :   TRACE("Vote went %zu vs %zu\n", left_used.size(), right_used.size());
    3306             :   return left_used.size() > right_used.size()
    3307             :              ? current_block->predecessors()[0]
    3308           0 :              : current_block->predecessors()[1];
    3309             : }
    3310             : 
    3311           0 : void LinearScanAllocator::ComputeStateFromManyPredecessors(
    3312             :     InstructionBlock* current_block, RangeWithRegisterSet* to_be_live) {
    3313             :   struct Vote {
    3314             :     size_t count;
    3315             :     int used_registers[RegisterConfiguration::kMaxRegisters];
    3316             :   };
    3317             :   struct TopLevelLiveRangeComparator {
    3318             :     bool operator()(const TopLevelLiveRange* lhs,
    3319             :                     const TopLevelLiveRange* rhs) const {
    3320           0 :       return lhs->vreg() < rhs->vreg();
    3321             :     }
    3322             :   };
    3323             :   ZoneMap<TopLevelLiveRange*, Vote, TopLevelLiveRangeComparator> counts(
    3324             :       data()->allocation_zone());
    3325             :   int deferred_blocks = 0;
    3326           0 :   for (RpoNumber pred : current_block->predecessors()) {
    3327           0 :     if (!ConsiderBlockForControlFlow(current_block, pred)) {
    3328             :       // Back edges of a loop count as deferred here too.
    3329           0 :       deferred_blocks++;
    3330           0 :       continue;
    3331             :     }
    3332             :     const auto& pred_state = data()->GetSpillState(pred);
    3333           0 :     for (LiveRange* range : pred_state) {
    3334             :       // We might have spilled the register backwards, so the range we
    3335             :       // stored might have lost its register. Ignore those.
    3336           0 :       if (!range->HasRegisterAssigned()) continue;
    3337           0 :       TopLevelLiveRange* toplevel = range->TopLevel();
    3338             :       auto previous = counts.find(toplevel);
    3339           0 :       if (previous == counts.end()) {
    3340           0 :         auto result = counts.emplace(std::make_pair(toplevel, Vote{1, {0}}));
    3341           0 :         CHECK(result.second);
    3342           0 :         result.first->second.used_registers[range->assigned_register()]++;
    3343             :       } else {
    3344           0 :         previous->second.count++;
    3345           0 :         previous->second.used_registers[range->assigned_register()]++;
    3346             :       }
    3347             :     }
    3348             :   }
    3349             : 
    3350             :   // Choose the live ranges from the majority.
    3351             :   const size_t majority =
    3352           0 :       (current_block->PredecessorCount() + 2 - deferred_blocks) / 2;
    3353           0 :   bool taken_registers[RegisterConfiguration::kMaxRegisters] = {0};
    3354           0 :   auto assign_to_live = [this, counts, majority](
    3355             :                             std::function<bool(TopLevelLiveRange*)> filter,
    3356             :                             RangeWithRegisterSet* to_be_live,
    3357           0 :                             bool* taken_registers) {
    3358           0 :     for (const auto& val : counts) {
    3359           0 :       if (!filter(val.first)) continue;
    3360           0 :       if (val.second.count >= majority) {
    3361             :         int register_max = 0;
    3362           0 :         int reg = kUnassignedRegister;
    3363           0 :         for (int idx = 0; idx < RegisterConfiguration::kMaxRegisters; idx++) {
    3364           0 :           int uses = val.second.used_registers[idx];
    3365           0 :           if (uses == 0) continue;
    3366           0 :           if (uses > register_max) {
    3367           0 :             reg = idx;
    3368             :             register_max = val.second.used_registers[idx];
    3369           0 :           } else if (taken_registers[reg] && uses == register_max) {
    3370           0 :             reg = idx;
    3371             :           }
    3372             :         }
    3373           0 :         if (taken_registers[reg]) {
    3374           0 :           reg = kUnassignedRegister;
    3375             :         } else {
    3376           0 :           taken_registers[reg] = true;
    3377             :         }
    3378           0 :         to_be_live->emplace(val.first, reg);
    3379           0 :         TRACE("Reset %d as live due vote %zu in %s\n",
    3380             :               val.first->TopLevel()->vreg(), val.second.count,
    3381             :               reg == kUnassignedRegister ? "unassigned" : RegisterName(reg));
    3382             :       }
    3383             :     }
    3384           0 :   };
    3385             :   // First round, process fixed registers, as these have precedence.
    3386             :   // There is only one fixed range per register, so we cannot have
    3387             :   // conflicts.
    3388           0 :   assign_to_live([](TopLevelLiveRange* r) { return r->IsFixed(); }, to_be_live,
    3389           0 :                  taken_registers);
    3390             :   // Second round, process the rest.
    3391           0 :   assign_to_live([](TopLevelLiveRange* r) { return !r->IsFixed(); }, to_be_live,
    3392           0 :                  taken_registers);
    3393           0 : }
    3394             : 
    3395           0 : bool LinearScanAllocator::ConsiderBlockForControlFlow(
    3396             :     InstructionBlock* current_block, RpoNumber predecessor) {
    3397             :   // We ignore predecessors on back edges when looking for control flow effects,
    3398             :   // as those lie in the future of allocation and we have no data yet. Also,
    3399             :   // deferred bocks are ignored on deferred to non-deferred boundaries, as we do
    3400             :   // not want them to influence allocation of non deferred code.
    3401           0 :   return (predecessor < current_block->rpo_number()) &&
    3402           0 :          (current_block->IsDeferred() ||
    3403           0 :           !code()->InstructionBlockAt(predecessor)->IsDeferred());
    3404             : }
    3405             : 
    3406           0 : bool LinearScanAllocator::BlockIsDeferredOrImmediatePredecessorIsNotDeferred(
    3407             :     const InstructionBlock* block) {
    3408           0 :   if (block->IsDeferred()) return true;
    3409           0 :   if (block->PredecessorCount() == 0) return true;
    3410             :   bool pred_is_deferred = false;
    3411           0 :   for (auto pred : block->predecessors()) {
    3412           0 :     if (pred.IsNext(block->rpo_number())) {
    3413           0 :       pred_is_deferred = code()->InstructionBlockAt(pred)->IsDeferred();
    3414           0 :       break;
    3415             :     }
    3416             :   }
    3417           0 :   return !pred_is_deferred;
    3418             : }
    3419             : 
    3420     2722956 : void LinearScanAllocator::AllocateRegisters() {
    3421             :   DCHECK(unhandled_live_ranges().empty());
    3422             :   DCHECK(active_live_ranges().empty());
    3423             :   DCHECK(inactive_live_ranges().empty());
    3424             : 
    3425     2722956 :   SplitAndSpillRangesDefinedByMemoryOperand();
    3426             :   data()->ResetSpillState();
    3427             : 
    3428     2723952 :   if (FLAG_trace_alloc) {
    3429           0 :     PrintRangeOverview(std::cout);
    3430             :   }
    3431             : 
    3432             :   const size_t live_ranges_size = data()->live_ranges().size();
    3433   135402659 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    3434   132679058 :     CHECK_EQ(live_ranges_size,
    3435             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    3436   132679058 :     if (!CanProcessRange(range)) continue;
    3437   166107316 :     for (LiveRange* to_add = range; to_add != nullptr;
    3438             :          to_add = to_add->next()) {
    3439    59749499 :       if (!to_add->spilled()) {
    3440    41256744 :         AddToUnhandled(to_add);
    3441             :       }
    3442             :     }
    3443             :   }
    3444             : 
    3445     2723601 :   if (mode() == GENERAL_REGISTERS) {
    3446    42780027 :     for (TopLevelLiveRange* current : data()->fixed_live_ranges()) {
    3447    40261515 :       if (current != nullptr) AddToInactive(current);
    3448             :     }
    3449             :   } else {
    3450     3516707 :     for (TopLevelLiveRange* current : data()->fixed_double_live_ranges()) {
    3451     3309853 :       if (current != nullptr) AddToInactive(current);
    3452             :     }
    3453             :     if (!kSimpleFPAliasing && check_fp_aliasing()) {
    3454             :       for (TopLevelLiveRange* current : data()->fixed_float_live_ranges()) {
    3455             :         if (current != nullptr) AddToInactive(current);
    3456             :       }
    3457             :       for (TopLevelLiveRange* current : data()->fixed_simd128_live_ranges()) {
    3458             :         if (current != nullptr) AddToInactive(current);
    3459             :       }
    3460             :     }
    3461             :   }
    3462             : 
    3463             :   RpoNumber last_block = RpoNumber::FromInt(0);
    3464             :   RpoNumber max_blocks =
    3465     2725366 :       RpoNumber::FromInt(code()->InstructionBlockCount() - 1);
    3466             :   LifetimePosition next_block_boundary =
    3467             :       LifetimePosition::InstructionFromInstructionIndex(
    3468             :           data()
    3469             :               ->code()
    3470             :               ->InstructionBlockAt(last_block)
    3471     2725366 :               ->last_instruction_index())
    3472             :           .NextFullStart();
    3473             :   SpillMode spill_mode = SpillMode::kSpillAtDefinition;
    3474             : 
    3475             :   // Process all ranges. We also need to ensure that we have seen all block
    3476             :   // boundaries. Linear scan might have assigned and spilled ranges before
    3477             :   // reaching the last block and hence we would ignore control flow effects for
    3478             :   // those. Not only does this produce a potentially bad assignment, it also
    3479             :   // breaks with the invariant that we undo spills that happen in deferred code
    3480             :   // when crossing a deferred/non-deferred boundary.
    3481    52874279 :   while (
    3482    52874279 :       !unhandled_live_ranges().empty() ||
    3483           0 :       (FLAG_turbo_control_flow_aware_allocation && last_block < max_blocks)) {
    3484             :     LiveRange* current = unhandled_live_ranges().empty()
    3485             :                              ? nullptr
    3486    50150317 :                              : *unhandled_live_ranges().begin();
    3487             :     LifetimePosition position =
    3488    50150317 :         current ? current->Start() : next_block_boundary;
    3489             : #ifdef DEBUG
    3490             :     allocation_finger_ = position;
    3491             : #endif
    3492    50150317 :     if (FLAG_turbo_control_flow_aware_allocation) {
    3493             :       // Splintering is not supported.
    3494           0 :       CHECK(!FLAG_turbo_preprocess_ranges);
    3495             :       // Check whether we just moved across a block boundary. This will trigger
    3496             :       // for the first range that is past the current boundary.
    3497           0 :       if (position >= next_block_boundary) {
    3498           0 :         TRACE("Processing boundary at %d leaving %d\n",
    3499             :               next_block_boundary.value(), last_block.ToInt());
    3500             : 
    3501             :         // Forward state to before block boundary
    3502           0 :         LifetimePosition end_of_block = next_block_boundary.PrevStart().End();
    3503           0 :         ForwardStateTo(end_of_block);
    3504             : 
    3505             :         // Remember this state.
    3506             :         InstructionBlock* current_block = data()->code()->GetInstructionBlock(
    3507           0 :             next_block_boundary.ToInstructionIndex());
    3508             : 
    3509             :         // Store current spill state (as the state at end of block). For
    3510             :         // simplicity, we store the active ranges, e.g., the live ranges that
    3511             :         // are not spilled.
    3512             :         data()->RememberSpillState(last_block, active_live_ranges());
    3513             : 
    3514             :         // Only reset the state if this was not a direct fallthrough. Otherwise
    3515             :         // control flow resolution will get confused (it does not expect changes
    3516             :         // across fallthrough edges.).
    3517           0 :         bool fallthrough = (current_block->PredecessorCount() == 1) &&
    3518             :                            current_block->predecessors()[0].IsNext(
    3519             :                                current_block->rpo_number());
    3520             : 
    3521             :         spill_mode = current_block->IsDeferred()
    3522             :                          ? SpillMode::kSpillDeferred
    3523           0 :                          : SpillMode::kSpillAtDefinition;
    3524             : 
    3525           0 :         if (!fallthrough) {
    3526             : #ifdef DEBUG
    3527             :           // Allow allocation at current position.
    3528             :           allocation_finger_ = next_block_boundary;
    3529             : #endif
    3530             : 
    3531             :           // We are currently at next_block_boundary - 1. Move the state to the
    3532             :           // actual block boundary position. In particular, we have to
    3533             :           // reactivate inactive ranges so that they get rescheduled for
    3534             :           // allocation if they were not live at the predecessors.
    3535           0 :           ForwardStateTo(next_block_boundary);
    3536             : 
    3537             :           RangeWithRegisterSet to_be_live(data()->allocation_zone());
    3538             : 
    3539             :           // If we end up deciding to use the state of the immediate
    3540             :           // predecessor, it is better not to perform a change. It would lead to
    3541             :           // the same outcome anyway.
    3542             :           // This may never happen on boundaries between deferred and
    3543             :           // non-deferred code, as we rely on explicit respill to ensure we
    3544             :           // spill at definition.
    3545             :           bool no_change_required = false;
    3546             : 
    3547             :           auto pick_state_from = [this, current_block](
    3548             :                                      RpoNumber pred,
    3549           0 :                                      RangeWithRegisterSet* to_be_live) -> bool {
    3550           0 :             TRACE("Using information from B%d\n", pred.ToInt());
    3551             :             // If this is a fall-through that is not across a deferred
    3552             :             // boundary, there is nothing to do.
    3553             :             bool is_noop = pred.IsNext(current_block->rpo_number());
    3554           0 :             if (!is_noop) {
    3555             :               auto& spill_state = data()->GetSpillState(pred);
    3556           0 :               TRACE("Not a fallthrough. Adding %zu elements...\n",
    3557             :                     spill_state.size());
    3558           0 :               for (const auto range : spill_state) {
    3559             :                 // Filter out ranges that had their register stolen by backwards
    3560             :                 // working spill heuristics. These have been spilled after the
    3561             :                 // fact, so ignore them.
    3562           0 :                 if (!range->HasRegisterAssigned()) continue;
    3563             :                 to_be_live->emplace(range);
    3564             :               }
    3565             :             }
    3566           0 :             return is_noop;
    3567           0 :           };
    3568             : 
    3569             :           // Multiple cases here:
    3570             :           // 1) We have a single predecessor => this is a control flow split, so
    3571             :           //     just restore the predecessor state.
    3572             :           // 2) We have two predecessors => this is a conditional, so break ties
    3573             :           //     based on what to do based on forward uses, trying to benefit
    3574             :           //     the same branch if in doubt (make one path fast).
    3575             :           // 3) We have many predecessors => this is a switch. Compute union
    3576             :           //     based on majority, break ties by looking forward.
    3577           0 :           if (current_block->PredecessorCount() == 1) {
    3578           0 :             TRACE("Single predecessor for B%d\n",
    3579             :                   current_block->rpo_number().ToInt());
    3580             :             no_change_required =
    3581           0 :                 pick_state_from(current_block->predecessors()[0], &to_be_live);
    3582           0 :           } else if (current_block->PredecessorCount() == 2) {
    3583           0 :             TRACE("Two predecessors for B%d\n",
    3584             :                   current_block->rpo_number().ToInt());
    3585             :             // If one of the branches does not contribute any information,
    3586             :             // e.g. because it is deferred or a back edge, we can short cut
    3587             :             // here right away.
    3588             :             RpoNumber chosen_predecessor = RpoNumber::Invalid();
    3589           0 :             if (!ConsiderBlockForControlFlow(
    3590             :                     current_block, current_block->predecessors()[0])) {
    3591           0 :               chosen_predecessor = current_block->predecessors()[1];
    3592           0 :             } else if (!ConsiderBlockForControlFlow(
    3593             :                            current_block, current_block->predecessors()[1])) {
    3594           0 :               chosen_predecessor = current_block->predecessors()[0];
    3595             :             } else {
    3596             :               chosen_predecessor = ChooseOneOfTwoPredecessorStates(
    3597           0 :                   current_block, next_block_boundary);
    3598             :             }
    3599             :             no_change_required =
    3600           0 :                 pick_state_from(chosen_predecessor, &to_be_live);
    3601             : 
    3602             :           } else {
    3603             :             // Merge at the end of, e.g., a switch.
    3604           0 :             ComputeStateFromManyPredecessors(current_block, &to_be_live);
    3605             :           }
    3606             : 
    3607           0 :           if (!no_change_required) {
    3608           0 :             SpillNotLiveRanges(to_be_live, next_block_boundary, spill_mode);
    3609           0 :             ReloadLiveRanges(to_be_live, next_block_boundary);
    3610             :           }
    3611             : 
    3612             :           // TODO(herhut) Check removal.
    3613             :           // Now forward to current position
    3614           0 :           ForwardStateTo(next_block_boundary);
    3615             :         }
    3616             :         // Update block information
    3617             :         last_block = current_block->rpo_number();
    3618             :         next_block_boundary = LifetimePosition::InstructionFromInstructionIndex(
    3619             :                                   current_block->last_instruction_index())
    3620             :                                   .NextFullStart();
    3621             : 
    3622             :         // We might have created new unhandled live ranges, so cycle around the
    3623             :         // loop to make sure we pick the top most range in unhandled for
    3624             :         // processing.
    3625             :         continue;
    3626             :       }
    3627             :     }
    3628             : 
    3629             :     DCHECK_NOT_NULL(current);
    3630             : 
    3631    50150317 :     TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
    3632             :           current->relative_id(), position.value());
    3633             : 
    3634             :     // Now we can erase current, as we are sure to process it.
    3635             :     unhandled_live_ranges().erase(unhandled_live_ranges().begin());
    3636             : 
    3637    50149062 :     if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
    3638             :       continue;
    3639             : 
    3640    50136471 :     ForwardStateTo(position);
    3641             : 
    3642             :     DCHECK(!current->HasRegisterAssigned() && !current->spilled());
    3643             : 
    3644    50137068 :     ProcessCurrentRange(current, spill_mode);
    3645             :   }
    3646             : 
    3647     2723962 :   if (FLAG_trace_alloc) {
    3648           0 :     PrintRangeOverview(std::cout);
    3649             :   }
    3650     2723962 : }
    3651             : 
    3652     2672432 : bool LinearScanAllocator::TrySplitAndSpillSplinter(LiveRange* range) {
    3653             :   DCHECK(!FLAG_turbo_control_flow_aware_allocation);
    3654             :   DCHECK(range->TopLevel()->IsSplinter());
    3655             :   // If we can spill the whole range, great. Otherwise, split above the
    3656             :   // first use needing a register and spill the top part.
    3657     2672432 :   const UsePosition* next_reg = range->NextRegisterPosition(range->Start());
    3658     2672393 :   if (next_reg == nullptr) {
    3659     2007011 :     Spill(range, SpillMode::kSpillAtDefinition);
    3660     2007011 :     return true;
    3661      665382 :   } else if (range->FirstHintPosition() == nullptr) {
    3662             :     // If there was no hint, but we have a use position requiring a
    3663             :     // register, apply the hot path heuristics.
    3664             :     return false;
    3665      479302 :   } else if (next_reg->pos().PrevStart() > range->Start()) {
    3666      242748 :     LiveRange* tail = SplitRangeAt(range, next_reg->pos().PrevStart());
    3667      242748 :     AddToUnhandled(tail);
    3668      242748 :     Spill(range, SpillMode::kSpillAtDefinition);
    3669      242748 :     return true;
    3670             :   }
    3671             :   return false;
    3672             : }
    3673             : 
    3674    43564732 : void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
    3675             :                                                        int reg) {
    3676    43564732 :   data()->MarkAllocated(range->representation(), reg);
    3677             :   range->set_assigned_register(reg);
    3678             :   range->SetUseHints(reg);
    3679             :   range->UpdateBundleRegister(reg);
    3680    69612127 :   if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
    3681             :     data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
    3682             :   }
    3683    43565317 : }
    3684             : 
    3685    43565073 : void LinearScanAllocator::AddToActive(LiveRange* range) {
    3686    43565073 :   TRACE("Add live range %d:%d in %s to active\n", range->TopLevel()->vreg(),
    3687             :         range->relative_id(), RegisterName(range->assigned_register()));
    3688    43565073 :   active_live_ranges().push_back(range);
    3689             :   next_active_ranges_change_ =
    3690   130695531 :       std::min(next_active_ranges_change_, range->NextEndAfter(range->Start()));
    3691    43565177 : }
    3692             : 
    3693    25792275 : void LinearScanAllocator::AddToInactive(LiveRange* range) {
    3694    25792275 :   TRACE("Add live range %d:%d to inactive\n", range->TopLevel()->vreg(),
    3695             :         range->relative_id());
    3696    25792275 :   inactive_live_ranges().push_back(range);
    3697             :   next_inactive_ranges_change_ = std::min(
    3698    77376483 :       next_inactive_ranges_change_, range->NextStartAfter(range->Start()));
    3699    25792161 : }
    3700             : 
    3701    50148470 : void LinearScanAllocator::AddToUnhandled(LiveRange* range) {
    3702    50148470 :   if (range == nullptr || range->IsEmpty()) return;
    3703             :   DCHECK(!range->HasRegisterAssigned() && !range->spilled());
    3704             :   DCHECK(allocation_finger_ <= range->Start());
    3705             : 
    3706    50149849 :   TRACE("Add live range %d:%d to unhandled\n", range->TopLevel()->vreg(),
    3707             :         range->relative_id());
    3708             :   unhandled_live_ranges().insert(range);
    3709             : }
    3710             : 
    3711    53430331 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::ActiveToHandled(
    3712             :     const ZoneVector<LiveRange*>::iterator it) {
    3713    53430331 :   TRACE("Moving live range %d:%d from active to handled\n",
    3714             :         (*it)->TopLevel()->vreg(), (*it)->relative_id());
    3715   106860553 :   return active_live_ranges().erase(it);
    3716             : }
    3717             : 
    3718    26188132 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::ActiveToInactive(
    3719             :     const ZoneVector<LiveRange*>::iterator it, LifetimePosition position) {
    3720    26188132 :   LiveRange* range = *it;
    3721    26188132 :   inactive_live_ranges().push_back(range);
    3722    26188135 :   TRACE("Moving live range %d:%d from active to inactive\n",
    3723             :         (range)->TopLevel()->vreg(), range->relative_id());
    3724             :   next_inactive_ranges_change_ =
    3725    78564453 :       std::min(next_inactive_ranges_change_, range->NextStartAfter(position));
    3726    52376284 :   return active_live_ranges().erase(it);
    3727             : }
    3728             : 
    3729     7021945 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::InactiveToHandled(
    3730             :     ZoneVector<LiveRange*>::iterator it) {
    3731     7021945 :   TRACE("Moving live range %d:%d from inactive to handled\n",
    3732             :         (*it)->TopLevel()->vreg(), (*it)->relative_id());
    3733    14043873 :   return inactive_live_ranges().erase(it);
    3734             : }
    3735             : 
    3736    39853145 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::InactiveToActive(
    3737             :     ZoneVector<LiveRange*>::iterator it, LifetimePosition position) {
    3738    39853145 :   LiveRange* range = *it;
    3739    39853145 :   active_live_ranges().push_back(range);
    3740    39853138 :   TRACE("Moving live range %d:%d from inactive to active\n",
    3741             :         range->TopLevel()->vreg(), range->relative_id());
    3742             :   next_active_ranges_change_ =
    3743   119559438 :       std::min(next_active_ranges_change_, range->NextEndAfter(position));
    3744    79706243 :   return inactive_live_ranges().erase(it);
    3745             : }
    3746             : 
    3747    50136053 : void LinearScanAllocator::ForwardStateTo(LifetimePosition position) {
    3748    50136053 :   if (position >= next_active_ranges_change_) {
    3749    30129680 :     next_active_ranges_change_ = LifetimePosition::MaxPosition();
    3750   149910317 :     for (auto it = active_live_ranges().begin();
    3751             :          it != active_live_ranges().end();) {
    3752   119780137 :       LiveRange* cur_active = *it;
    3753   119780137 :       if (cur_active->End() <= position) {
    3754    52358495 :         it = ActiveToHandled(it);
    3755    67421642 :       } else if (!cur_active->Covers(position)) {
    3756    26188169 :         it = ActiveToInactive(it, position);
    3757             :       } else {
    3758             :         next_active_ranges_change_ = std::min(
    3759    82468216 :             next_active_ranges_change_, cur_active->NextEndAfter(position));
    3760             :         ++it;
    3761             :       }
    3762             :     }
    3763             :   }
    3764             : 
    3765    50136553 :   if (position >= next_inactive_ranges_change_) {
    3766    11591630 :     next_inactive_ranges_change_ = LifetimePosition::MaxPosition();
    3767   152262365 :     for (auto it = inactive_live_ranges().begin();
    3768             :          it != inactive_live_ranges().end();) {
    3769   140669712 :       LiveRange* cur_inactive = *it;
    3770   140669712 :       if (cur_inactive->End() <= position) {
    3771     7022086 :         it = InactiveToHandled(it);
    3772   133647626 :       } else if (cur_inactive->Covers(position)) {
    3773    39853171 :         it = InactiveToActive(it, position);
    3774             :       } else {
    3775             :         next_inactive_ranges_change_ =
    3776             :             std::min(next_inactive_ranges_change_,
    3777   187591626 :                      cur_inactive->NextStartAfter(position));
    3778             :         ++it;
    3779             :       }
    3780             :     }
    3781             :   }
    3782    50137576 : }
    3783             : 
    3784           0 : int LinearScanAllocator::LastDeferredInstructionIndex(InstructionBlock* start) {
    3785             :   DCHECK(start->IsDeferred());
    3786             :   RpoNumber last_block =
    3787           0 :       RpoNumber::FromInt(code()->InstructionBlockCount() - 1);
    3788           0 :   while ((start->rpo_number() < last_block)) {
    3789             :     InstructionBlock* next =
    3790           0 :         code()->InstructionBlockAt(start->rpo_number().Next());
    3791           0 :     if (!next->IsDeferred()) break;
    3792             :     start = next;
    3793             :   }
    3794           0 :   return start->last_instruction_index();
    3795             : }
    3796             : 
    3797           0 : void LinearScanAllocator::GetFPRegisterSet(MachineRepresentation rep,
    3798             :                                            int* num_regs, int* num_codes,
    3799             :                                            const int** codes) const {
    3800             :   DCHECK(!kSimpleFPAliasing);
    3801           0 :   if (rep == MachineRepresentation::kFloat32) {
    3802           0 :     *num_regs = data()->config()->num_float_registers();
    3803           0 :     *num_codes = data()->config()->num_allocatable_float_registers();
    3804           0 :     *codes = data()->config()->allocatable_float_codes();
    3805           0 :   } else if (rep == MachineRepresentation::kSimd128) {
    3806           0 :     *num_regs = data()->config()->num_simd128_registers();
    3807           0 :     *num_codes = data()->config()->num_allocatable_simd128_registers();
    3808           0 :     *codes = data()->config()->allocatable_simd128_codes();
    3809             :   } else {
    3810           0 :     UNREACHABLE();
    3811             :   }
    3812           0 : }
    3813             : 
    3814    50143619 : void LinearScanAllocator::FindFreeRegistersForRange(
    3815             :     LiveRange* range, Vector<LifetimePosition> positions) {
    3816             :   int num_regs = num_registers();
    3817             :   int num_codes = num_allocatable_registers();
    3818             :   const int* codes = allocatable_register_codes();
    3819             :   MachineRepresentation rep = range->representation();
    3820             :   if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
    3821             :                              rep == MachineRepresentation::kSimd128))
    3822             :     GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
    3823             :   DCHECK_GE(positions.length(), num_regs);
    3824             : 
    3825  1654503543 :   for (int i = 0; i < num_regs; ++i) {
    3826  1604359924 :     positions[i] = LifetimePosition::MaxPosition();
    3827             :   }
    3828             : 
    3829   204456757 :   for (LiveRange* cur_active : active_live_ranges()) {
    3830             :     int cur_reg = cur_active->assigned_register();
    3831             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3832   308626132 :       positions[cur_reg] = LifetimePosition::GapFromInstructionIndex(0);
    3833   154313066 :       TRACE("Register %s is free until pos %d (1) due to %d\n",
    3834             :             RegisterName(cur_reg),
    3835             :             LifetimePosition::GapFromInstructionIndex(0).value(),
    3836             :             cur_active->TopLevel()->vreg());
    3837             :     } else {
    3838             :       int alias_base_index = -1;
    3839             :       int aliases = data()->config()->GetAliases(
    3840             :           cur_active->representation(), cur_reg, rep, &alias_base_index);
    3841             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    3842             :       while (aliases--) {
    3843             :         int aliased_reg = alias_base_index + aliases;
    3844             :         positions[aliased_reg] = LifetimePosition::GapFromInstructionIndex(0);
    3845             :       }
    3846             :     }
    3847             :   }
    3848             : 
    3849   580147450 :   for (LiveRange* cur_inactive : inactive_live_ranges()) {
    3850             :     DCHECK(cur_inactive->End() > range->Start());
    3851             :     int cur_reg = cur_inactive->assigned_register();
    3852             :     // No need to carry out intersections, when this register won't be
    3853             :     // interesting to this range anyway.
    3854             :     // TODO(mtrofin): extend to aliased ranges, too.
    3855   530009221 :     if ((kSimpleFPAliasing || !check_fp_aliasing()) &&
    3856   530009221 :         positions[cur_reg] < range->Start()) {
    3857   455271170 :       continue;
    3858             :     }
    3859             : 
    3860   416734990 :     LifetimePosition next_intersection = cur_inactive->FirstIntersection(range);
    3861   416736132 :     if (!next_intersection.IsValid()) continue;
    3862             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3863    74739193 :       positions[cur_reg] = Min(positions[cur_reg], next_intersection);
    3864    74739193 :       TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
    3865             :             Min(positions[cur_reg], next_intersection).value());
    3866             :     } else {
    3867             :       int alias_base_index = -1;
    3868             :       int aliases = data()->config()->GetAliases(
    3869             :           cur_inactive->representation(), cur_reg, rep, &alias_base_index);
    3870             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    3871             :       while (aliases--) {
    3872             :         int aliased_reg = alias_base_index + aliases;
    3873             :         positions[aliased_reg] = Min(positions[aliased_reg], next_intersection);
    3874             :       }
    3875             :     }
    3876             :   }
    3877    50138229 : }
    3878             : 
    3879             : // High-level register allocation summary:
    3880             : //
    3881             : // For regular, or hot (i.e. not splinter) ranges, we attempt to first
    3882             : // allocate first the preferred (hint) register. If that is not possible,
    3883             : // we find a register that's free, and allocate that. If that's not possible,
    3884             : // we search for a register to steal from a range that was allocated. The
    3885             : // goal is to optimize for throughput by avoiding register-to-memory
    3886             : // moves, which are expensive.
    3887             : //
    3888             : // For splinters, the goal is to minimize the number of moves. First we try
    3889             : // to allocate the preferred register (more discussion follows). Failing that,
    3890             : // we bail out and spill as far as we can, unless the first use is at start,
    3891             : // case in which we apply the same behavior as we do for regular ranges.
    3892             : // If there is no hint, we apply the hot-path behavior.
    3893             : //
    3894             : // For the splinter, the hint register may come from:
    3895             : //
    3896             : // - the hot path (we set it at splintering time with SetHint). In this case, if
    3897             : // we cannot offer the hint register, spilling is better because it's at most
    3898             : // 1 move, while trying to find and offer another register is at least 1 move.
    3899             : //
    3900             : // - a constraint. If we cannot offer that register, it's because  there is some
    3901             : // interference. So offering the hint register up to the interference would
    3902             : // result
    3903             : // in a move at the interference, plus a move to satisfy the constraint. This is
    3904             : // also the number of moves if we spill, with the potential of the range being
    3905             : // already spilled and thus saving a move (the spill).
    3906             : // Note that this can only be an input constraint, if it were an output one,
    3907             : // the range wouldn't be a splinter because it means it'd be defined in a
    3908             : // deferred
    3909             : // block, and we don't mark those as splinters (they live in deferred blocks
    3910             : // only).
    3911             : //
    3912             : // - a phi. The same analysis as in the case of the input constraint applies.
    3913             : //
    3914    50137215 : void LinearScanAllocator::ProcessCurrentRange(LiveRange* current,
    3915             :                                               SpillMode spill_mode) {
    3916             :   EmbeddedVector<LifetimePosition, RegisterConfiguration::kMaxRegisters>
    3917             :       free_until_pos;
    3918    50137215 :   FindFreeRegistersForRange(current, free_until_pos);
    3919    50138308 :   if (!TryAllocatePreferredReg(current, free_until_pos)) {
    3920    29415792 :     if (current->TopLevel()->IsSplinter()) {
    3921             :       DCHECK(!FLAG_turbo_control_flow_aware_allocation);
    3922     4922194 :       if (TrySplitAndSpillSplinter(current)) return;
    3923             :     }
    3924    27165990 :     if (!TryAllocateFreeReg(current, free_until_pos)) {
    3925     5394663 :       AllocateBlockedReg(current, spill_mode);
    3926             :     }
    3927             :   }
    3928    47888038 :   if (current->HasRegisterAssigned()) {
    3929    43565176 :     AddToActive(current);
    3930             :   }
    3931             : }
    3932             : 
    3933    55084840 : bool LinearScanAllocator::TryAllocatePreferredReg(
    3934             :     LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
    3935             :   int hint_register;
    3936   110169177 :   if (current->RegisterFromControlFlow(&hint_register) ||
    3937    81119366 :       current->FirstHintPosition(&hint_register) != nullptr ||
    3938             :       current->RegisterFromBundle(&hint_register)) {
    3939    31341248 :     TRACE(
    3940             :         "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
    3941             :         RegisterName(hint_register), free_until_pos[hint_register].value(),
    3942             :         current->TopLevel()->vreg(), current->relative_id(),
    3943             :         current->End().value());
    3944             : 
    3945             :     // The desired register is free until the end of the current live range.
    3946    62682420 :     if (free_until_pos[hint_register] >= current->End()) {
    3947    22555880 :       TRACE("Assigning preferred reg %s to live range %d:%d\n",
    3948             :             RegisterName(hint_register), current->TopLevel()->vreg(),
    3949             :             current->relative_id());
    3950    22555880 :       SetLiveRangeAssignedRegister(current, hint_register);
    3951    22555892 :       return true;
    3952             :     }
    3953             :   }
    3954             :   return false;
    3955             : }
    3956             : 
    3957    31055575 : int LinearScanAllocator::PickRegisterThatIsAvailableLongest(
    3958             :     LiveRange* current, int hint_reg,
    3959             :     const Vector<LifetimePosition>& free_until_pos) {
    3960             :   int num_regs = 0;  // used only for the call to GetFPRegisterSet.
    3961             :   int num_codes = num_allocatable_registers();
    3962             :   const int* codes = allocatable_register_codes();
    3963             :   MachineRepresentation rep = current->representation();
    3964             :   if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
    3965             :                              rep == MachineRepresentation::kSimd128)) {
    3966             :     GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
    3967             :   }
    3968             : 
    3969             :   DCHECK_GE(free_until_pos.length(), num_codes);
    3970             : 
    3971             :   // Find the register which stays free for the longest time. Check for
    3972             :   // the hinted register first, as we might want to use that one. Only
    3973             :   // count full instructions for free ranges, as an instruction's internal
    3974             :   // positions do not help but might shadow a hinted register. This is
    3975             :   // typically the case for function calls, where all registered are
    3976             :   // cloberred after the call except for the argument registers, which are
    3977             :   // set before the call. Hence, the argument registers always get ignored,
    3978             :   // as their available time is shorter.
    3979    31055575 :   int reg = (hint_reg == kUnassignedRegister) ? codes[0] : hint_reg;
    3980             :   int current_free = -1;
    3981   785199209 :   for (int i = 0; i < num_codes; ++i) {
    3982   377072045 :     int code = codes[i];
    3983             :     // Prefer registers that have no fixed uses to avoid blocking later hints.
    3984             :     // We use the first register that has no fixed uses to ensure we use
    3985             :     // byte addressable registers in ia32 first.
    3986   377072045 :     int candidate_free = free_until_pos[code].ToInstructionIndex();
    3987   377072045 :     TRACE("Register %s in free until %d\n", RegisterName(code), candidate_free);
    3988   754145154 :     if ((candidate_free > current_free) ||
    3989   551743250 :         (candidate_free == current_free && reg != hint_reg &&
    3990   355198824 :          (data()->HasFixedUse(current->representation(), reg) &&
    3991    87805120 :           !data()->HasFixedUse(current->representation(), code)))) {
    3992             :       reg = code;
    3993             :       current_free = candidate_free;
    3994             :     }
    3995             :   }
    3996             : 
    3997    31055347 :   return reg;
    3998             : }
    3999             : 
    4000    27165818 : bool LinearScanAllocator::TryAllocateFreeReg(
    4001             :     LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
    4002             :   // Compute register hint, if such exists.
    4003    27165818 :   int hint_reg = kUnassignedRegister;
    4004    27165894 :   current->RegisterFromControlFlow(&hint_reg) ||
    4005             :       current->FirstHintPosition(&hint_reg) != nullptr ||
    4006    27165818 :       current->RegisterFromBundle(&hint_reg);
    4007             : 
    4008             :   int reg =
    4009    27165771 :       PickRegisterThatIsAvailableLongest(current, hint_reg, free_until_pos);
    4010             : 
    4011    54331838 :   LifetimePosition pos = free_until_pos[reg];
    4012             : 
    4013    27165919 :   if (pos <= current->Start()) {
    4014             :     // All registers are blocked.
    4015             :     return false;
    4016             :   }
    4017             : 
    4018    21771375 :   if (pos < current->End()) {
    4019             :     // Register reg is available at the range start but becomes blocked before
    4020             :     // the range end. Split current at position where it becomes blocked.
    4021     4947119 :     LiveRange* tail = SplitRangeAt(current, pos);
    4022     4947128 :     AddToUnhandled(tail);
    4023             : 
    4024             :     // Try to allocate preferred register once more.
    4025     4947118 :     if (TryAllocatePreferredReg(current, free_until_pos)) return true;
    4026             :   }
    4027             : 
    4028             :   // Register reg is available at the range start and is free until the range
    4029             :   // end.
    4030             :   DCHECK(pos >= current->End());
    4031    19938008 :   TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
    4032             :         current->TopLevel()->vreg(), current->relative_id());
    4033    19938008 :   SetLiveRangeAssignedRegister(current, reg);
    4034             : 
    4035    19937981 :   return true;
    4036             : }
    4037             : 
    4038     5394655 : void LinearScanAllocator::AllocateBlockedReg(LiveRange* current,
    4039             :                                              SpillMode spill_mode) {
    4040     5394655 :   UsePosition* register_use = current->NextRegisterPosition(current->Start());
    4041     5394662 :   if (register_use == nullptr) {
    4042             :     // There is no use in the current live range that requires a register.
    4043             :     // We can just spill it.
    4044     1505134 :     Spill(current, spill_mode);
    4045     1505135 :     return;
    4046             :   }
    4047             : 
    4048             :   MachineRepresentation rep = current->representation();
    4049             : 
    4050             :   // use_pos keeps track of positions a register/alias is used at.
    4051             :   // block_pos keeps track of positions where a register/alias is blocked
    4052             :   // from.
    4053             :   EmbeddedVector<LifetimePosition, RegisterConfiguration::kMaxRegisters>
    4054             :       use_pos(LifetimePosition::MaxPosition());
    4055             :   EmbeddedVector<LifetimePosition, RegisterConfiguration::kMaxRegisters>
    4056             :       block_pos(LifetimePosition::MaxPosition());
    4057             : 
    4058    50892811 :   for (LiveRange* range : active_live_ranges()) {
    4059             :     int cur_reg = range->assigned_register();
    4060             :     bool is_fixed_or_cant_spill =
    4061    47003279 :         range->TopLevel()->IsFixed() || !range->CanBeSpilled(current->Start());
    4062             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    4063    47003278 :       if (is_fixed_or_cant_spill) {
    4064    34858096 :         block_pos[cur_reg] = use_pos[cur_reg] =
    4065    34858096 :             LifetimePosition::GapFromInstructionIndex(0);
    4066             :       } else {
    4067             :         DCHECK_NE(LifetimePosition::GapFromInstructionIndex(0),
    4068             :                   block_pos[cur_reg]);
    4069    12145182 :         use_pos[cur_reg] =
    4070    24290369 :             range->NextLifetimePositionRegisterIsBeneficial(current->Start());
    4071             :       }
    4072             :     } else {
    4073             :       int alias_base_index = -1;
    4074             :       int aliases = data()->config()->GetAliases(
    4075             :           range->representation(), cur_reg, rep, &alias_base_index);
    4076             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    4077             :       while (aliases--) {
    4078             :         int aliased_reg = alias_base_index + aliases;
    4079             :         if (is_fixed_or_cant_spill) {
    4080             :           block_pos[aliased_reg] = use_pos[aliased_reg] =
    4081             :               LifetimePosition::GapFromInstructionIndex(0);
    4082             :         } else {
    4083             :           use_pos[aliased_reg] =
    4084             :               Min(block_pos[aliased_reg],
    4085             :                   range->NextLifetimePositionRegisterIsBeneficial(
    4086             :                       current->Start()));
    4087             :         }
    4088             :       }
    4089             :     }
    4090             :   }
    4091             : 
    4092    18513395 :   for (LiveRange* range : inactive_live_ranges()) {
    4093             :     DCHECK(range->End() > current->Start());
    4094             :     int cur_reg = range->assigned_register();
    4095             :     bool is_fixed = range->TopLevel()->IsFixed();
    4096             : 
    4097             :     // Don't perform costly intersections if they are guaranteed to not update
    4098             :     // block_pos or use_pos.
    4099             :     // TODO(mtrofin): extend to aliased ranges, too.
    4100             :     if ((kSimpleFPAliasing || !check_fp_aliasing())) {
    4101    14623866 :       if (is_fixed) {
    4102    38964368 :         if (block_pos[cur_reg] < range->Start()) continue;
    4103             :       } else {
    4104     4503368 :         if (use_pos[cur_reg] < range->Start()) continue;
    4105             :       }
    4106             :     }
    4107             : 
    4108    11697581 :     LifetimePosition next_intersection = range->FirstIntersection(current);
    4109    11697578 :     if (!next_intersection.IsValid()) continue;
    4110             : 
    4111             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    4112      403859 :       if (is_fixed) {
    4113      780790 :         block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
    4114      390395 :         use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
    4115             :       } else {
    4116       26928 :         use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
    4117             :       }
    4118             :     } else {
    4119             :       int alias_base_index = -1;
    4120             :       int aliases = data()->config()->GetAliases(
    4121             :           range->representation(), cur_reg, rep, &alias_base_index);
    4122             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    4123             :       while (aliases--) {
    4124             :         int aliased_reg = alias_base_index + aliases;
    4125             :         if (is_fixed) {
    4126             :           block_pos[aliased_reg] =
    4127             :               Min(block_pos[aliased_reg], next_intersection);
    4128             :           use_pos[aliased_reg] =
    4129             :               Min(block_pos[aliased_reg], use_pos[aliased_reg]);
    4130             :         } else {
    4131             :           use_pos[aliased_reg] = Min(use_pos[aliased_reg], next_intersection);
    4132             :         }
    4133             :       }
    4134             :     }
    4135             :   }
    4136             : 
    4137             :   // Compute register hint if it exists.
    4138     3889529 :   int hint_reg = kUnassignedRegister;
    4139     3889523 :   current->RegisterFromControlFlow(&hint_reg) ||
    4140     3889525 :       register_use->HintRegister(&hint_reg) ||
    4141     3889529 :       current->RegisterFromBundle(&hint_reg);
    4142     3889527 :   int reg = PickRegisterThatIsAvailableLongest(current, hint_reg, use_pos);
    4143             : 
    4144     7779062 :   if (use_pos[reg] < register_use->pos()) {
    4145             :     // If there is a gap position before the next register use, we can
    4146             :     // spill until there. The gap position will then fit the fill move.
    4147     2817695 :     if (LifetimePosition::ExistsGapPositionBetween(current->Start(),
    4148             :                                                    register_use->pos())) {
    4149             :       SpillBetween(current, current->Start(), register_use->pos(), spill_mode);
    4150             :       return;
    4151             :     }
    4152             :   }
    4153             : 
    4154             :   // When in deferred spilling mode avoid stealing registers beyond the current
    4155             :   // deferred region. This is required as we otherwise might spill an inactive
    4156             :   // range with a start outside of deferred code and that would not be reloaded.
    4157             :   LifetimePosition new_end = current->End();
    4158     1071836 :   if (spill_mode == SpillMode::kSpillDeferred) {
    4159             :     InstructionBlock* deferred_block =
    4160           0 :         code()->GetInstructionBlock(current->Start().ToInstructionIndex());
    4161             :     new_end = Min(new_end, LifetimePosition::GapFromInstructionIndex(
    4162           0 :                                LastDeferredInstructionIndex(deferred_block)));
    4163             :   }
    4164             : 
    4165             :   // We couldn't spill until the next register use. Split before the register
    4166             :   // is blocked, if applicable.
    4167     1071836 :   if (block_pos[reg] < new_end) {
    4168             :     // Register becomes blocked before the current range end. Split before that
    4169             :     // position.
    4170             :     new_end = block_pos[reg].Start();
    4171             :   }
    4172             : 
    4173             :   // Split at the new end if we found one.
    4174     1071836 :   if (new_end != current->End()) {
    4175       11647 :     LiveRange* tail = SplitBetween(current, current->Start(), new_end);
    4176       11647 :     AddToUnhandled(tail);
    4177             :   }
    4178             : 
    4179             :   // Register reg is not blocked for the whole range.
    4180             :   DCHECK(block_pos[reg] >= current->End());
    4181     1071835 :   TRACE("Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg),
    4182             :         current->TopLevel()->vreg(), current->relative_id());
    4183     1071835 :   SetLiveRangeAssignedRegister(current, reg);
    4184             : 
    4185             :   // This register was not free. Thus we need to find and spill
    4186             :   // parts of active and inactive live regions that use the same register
    4187             :   // at the same lifetime positions as current.
    4188     1071835 :   SplitAndSpillIntersecting(current, spill_mode);
    4189             : }
    4190             : 
    4191     1071836 : void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current,
    4192             :                                                     SpillMode spill_mode) {
    4193             :   DCHECK(current->HasRegisterAssigned());
    4194             :   int reg = current->assigned_register();
    4195             :   LifetimePosition split_pos = current->Start();
    4196    13963766 :   for (auto it = active_live_ranges().begin();
    4197             :        it != active_live_ranges().end();) {
    4198    12891931 :     LiveRange* range = *it;
    4199             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    4200    12891931 :       if (range->assigned_register() != reg) {
    4201             :         ++it;
    4202             :         continue;
    4203             :       }
    4204             :     } else {
    4205             :       if (!data()->config()->AreAliases(current->representation(), reg,
    4206             :                                         range->representation(),
    4207             :                                         range->assigned_register())) {
    4208             :         ++it;
    4209             :         continue;
    4210             :       }
    4211             :     }
    4212             : 
    4213     1071836 :     UsePosition* next_pos = range->NextRegisterPosition(current->Start());
    4214             :     // TODO(herhut): Be more clever here as long as we do not move split_pos
    4215             :     // out of deferred code.
    4216             :     LifetimePosition spill_pos = spill_mode == SpillMode::kSpillDeferred
    4217             :                                      ? split_pos
    4218     1071837 :                                      : FindOptimalSpillingPos(range, split_pos);
    4219     1071837 :     if (next_pos == nullptr) {
    4220             :       SpillAfter(range, spill_pos, spill_mode);
    4221             :     } else {
    4222             :       // When spilling between spill_pos and next_pos ensure that the range
    4223             :       // remains spilled at least until the start of the current live range.
    4224             :       // This guarantees that we will not introduce new unhandled ranges that
    4225             :       // start before the current range as this violates allocation invariants
    4226             :       // and will lead to an inconsistent state of active and inactive
    4227             :       // live-ranges: ranges are allocated in order of their start positions,
    4228             :       // ranges are retired from active/inactive when the start of the
    4229             :       // current live-range is larger than their end.
    4230             :       DCHECK(LifetimePosition::ExistsGapPositionBetween(current->Start(),
    4231             :                                                         next_pos->pos()));
    4232             :       SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos(),
    4233      868764 :                         spill_mode);
    4234             :     }
    4235     1071837 :     it = ActiveToHandled(it);
    4236             :   }
    4237             : 
    4238    13450502 :   for (auto it = inactive_live_ranges().begin();
    4239             :        it != inactive_live_ranges().end();) {
    4240    12378667 :     LiveRange* range = *it;
    4241             :     DCHECK(range->End() > current->Start());
    4242    12378667 :     if (range->TopLevel()->IsFixed()) {
    4243             :       ++it;
    4244             :       continue;
    4245             :     }
    4246             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    4247      283154 :       if (range->assigned_register() != reg) {
    4248             :         ++it;
    4249             :         continue;
    4250             :       }
    4251             :     } else {
    4252             :       if (!data()->config()->AreAliases(current->representation(), reg,
    4253             :                                         range->representation(),
    4254             :                                         range->assigned_register())) {
    4255             :         ++it;
    4256             :         continue;
    4257             :       }
    4258             :     }
    4259             : 
    4260       23614 :     LifetimePosition next_intersection = range->FirstIntersection(current);
    4261       23614 :     if (next_intersection.IsValid()) {
    4262          98 :       UsePosition* next_pos = range->NextRegisterPosition(current->Start());
    4263          98 :       if (next_pos == nullptr) {
    4264             :         SpillAfter(range, split_pos, spill_mode);
    4265             :       } else {
    4266             :         next_intersection = Min(next_intersection, next_pos->pos());
    4267             :         SpillBetween(range, split_pos, next_intersection, spill_mode);
    4268             :       }
    4269          98 :       it = InactiveToHandled(it);
    4270             :     } else {
    4271             :       ++it;
    4272             :     }
    4273             :   }
    4274     1071835 : }
    4275             : 
    4276    28118507 : bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
    4277    28118507 :   if (!range->is_phi()) return false;
    4278             : 
    4279             :   DCHECK(!range->HasSpillOperand());
    4280             :   // Check how many operands belong to the same bundle as the output.
    4281             :   LiveRangeBundle* out_bundle = range->get_bundle();
    4282             :   RegisterAllocationData::PhiMapValue* phi_map_value =
    4283             :       data()->GetPhiMapValueFor(range);
    4284             :   const PhiInstruction* phi = phi_map_value->phi();
    4285             :   const InstructionBlock* block = phi_map_value->block();
    4286             :   // Count the number of spilled operands.
    4287             :   size_t spilled_count = 0;
    4288    12037295 :   for (size_t i = 0; i < phi->operands().size(); i++) {
    4289     4979732 :     int op = phi->operands()[i];
    4290     4979732 :     LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op);
    4291     4979733 :     if (!op_range->TopLevel()->HasSpillRange()) continue;
    4292             :     const InstructionBlock* pred =
    4293      282174 :         code()->InstructionBlockAt(block->predecessors()[i]);
    4294             :     LifetimePosition pred_end =
    4295             :         LifetimePosition::InstructionFromInstructionIndex(
    4296             :             pred->last_instruction_index());
    4297     1323154 :     while (op_range != nullptr && !op_range->CanCover(pred_end)) {
    4298             :       op_range = op_range->next();
    4299             :     }
    4300      553080 :     if (op_range != nullptr && op_range->spilled() &&
    4301             :         op_range->get_bundle() == out_bundle) {
    4302      147192 :       spilled_count++;
    4303             :     }
    4304             :   }
    4305             : 
    4306             :   // Only continue if more than half of the operands are spilled to the same
    4307             :   // slot (because part of same bundle).
    4308     2077829 :   if (spilled_count * 2 <= phi->operands().size()) {
    4309             :     return false;
    4310             :   }
    4311             : 
    4312             :   // If the range does not need register soon, spill it to the merged
    4313             :   // spill range.
    4314             :   LifetimePosition next_pos = range->Start();
    4315       14006 :   if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
    4316       14006 :   UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
    4317       14006 :   if (pos == nullptr) {
    4318        7472 :     Spill(range, SpillMode::kSpillAtDefinition);
    4319        7472 :     return true;
    4320        6534 :   } else if (pos->pos() > range->Start().NextStart()) {
    4321             :     SpillBetween(range, range->Start(), pos->pos(),
    4322             :                  SpillMode::kSpillAtDefinition);
    4323        5374 :     return true;
    4324             :   }
    4325             :   return false;
    4326             : }
    4327             : 
    4328           0 : void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos,
    4329             :                                      SpillMode spill_mode) {
    4330      203131 :   LiveRange* second_part = SplitRangeAt(range, pos);
    4331      203131 :   Spill(second_part, spill_mode);
    4332           0 : }
    4333             : 
    4334           0 : void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
    4335             :                                        LifetimePosition end,
    4336             :                                        SpillMode spill_mode) {
    4337     2823109 :   SpillBetweenUntil(range, start, start, end, spill_mode);
    4338           0 : }
    4339             : 
    4340     3691868 : void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
    4341             :                                             LifetimePosition start,
    4342             :                                             LifetimePosition until,
    4343             :                                             LifetimePosition end,
    4344             :                                             SpillMode spill_mode) {
    4345     3691868 :   CHECK(start < end);
    4346     3691868 :   LiveRange* second_part = SplitRangeAt(range, start);
    4347             : 
    4348     3691869 :   if (second_part->Start() < end) {
    4349             :     // The split result intersects with [start, end[.
    4350             :     // Split it at position between ]start+1, end[, spill the middle part
    4351             :     // and put the rest to unhandled.
    4352             : 
    4353             :     // Make sure that the third part always starts after the start of the
    4354             :     // second part, as that likely is the current position of the register
    4355             :     // allocator and we cannot add ranges to unhandled that start before
    4356             :     // the current position.
    4357             :     LifetimePosition split_start = Max(second_part->Start().End(), until);
    4358             : 
    4359             :     // If end is an actual use (which it typically is) we have to split
    4360             :     // so that there is a gap before so that we have space for moving the
    4361             :     // value into its position.
    4362             :     // However, if we have no choice, split right where asked.
    4363     3691829 :     LifetimePosition third_part_end = Max(split_start, end.PrevStart().End());
    4364             :     // Instead of spliting right after or even before the block boundary,
    4365             :     // split on the boumndary to avoid extra moves.
    4366     3691829 :     if (data()->IsBlockBoundary(end.Start())) {
    4367           0 :       third_part_end = Max(split_start, end.Start());
    4368             :     }
    4369             : 
    4370             :     LiveRange* third_part =
    4371     3691826 :         SplitBetween(second_part, split_start, third_part_end);
    4372             : 
    4373     3691829 :     AddToUnhandled(third_part);
    4374             :     // This can happen, even if we checked for start < end above, as we fiddle
    4375             :     // with the end location. However, we are guaranteed to be after or at
    4376             :     // until, so this is fine.
    4377     3691826 :     if (third_part != second_part) {
    4378     3691826 :       Spill(second_part, spill_mode);
    4379             :     }
    4380             :   } else {
    4381             :     // The split result does not intersect with [start, end[.
    4382             :     // Nothing to spill. Just put it to unhandled as whole.
    4383          40 :     AddToUnhandled(second_part);
    4384             :   }
    4385     3691871 : }
    4386             : 
    4387     2516399 : SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
    4388     2516399 :     : data_(data) {}
    4389             : 
    4390     2516445 : void SpillSlotLocator::LocateSpillSlots() {
    4391             :   const InstructionSequence* code = data()->code();
    4392             :   const size_t live_ranges_size = data()->live_ranges().size();
    4393    93983083 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    4394    91466697 :     CHECK_EQ(live_ranges_size,
    4395             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    4396    91466697 :     if (range == nullptr || range->IsEmpty()) continue;
    4397             :     // We care only about ranges which spill in the frame.
    4398    46380542 :     if (!range->HasSpillRange() || range->IsSpilledOnlyInDeferredBlocks()) {
    4399             :       continue;
    4400             :     }
    4401             :     TopLevelLiveRange::SpillMoveInsertionList* spills =
    4402             :         range->GetSpillMoveInsertionLocations();
    4403             :     DCHECK_NOT_NULL(spills);
    4404    13536149 :     for (; spills != nullptr; spills = spills->next) {
    4405     4512888 :       code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
    4406             :     }
    4407             :   }
    4408     2516386 : }
    4409             : 
    4410     7550415 : OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
    4411             : 
    4412     2516037 : void OperandAssigner::DecideSpillingMode() {
    4413     2516037 :   if (FLAG_turbo_control_flow_aware_allocation) {
    4414           0 :     for (auto range : data()->live_ranges()) {
    4415             :       int max_blocks = data()->code()->InstructionBlockCount();
    4416           0 :       if (range != nullptr && range->IsSpilledOnlyInDeferredBlocks()) {
    4417             :         // If the range is spilled only in deferred blocks and starts in
    4418             :         // a non-deferred block, we transition its representation here so
    4419             :         // that the LiveRangeConnector processes them correctly. If,
    4420             :         // however, they start in a deferred block, we uograde them to
    4421             :         // spill at definition, as that definition is in a deferred block
    4422             :         // anyway. While this is an optimization, the code in LiveRangeConnector
    4423             :         // relies on it!
    4424           0 :         if (GetInstructionBlock(data()->code(), range->Start())->IsDeferred()) {
    4425           0 :           TRACE("Live range %d is spilled and alive in deferred code only\n",
    4426             :                 range->vreg());
    4427             :           range->TransitionRangeToSpillAtDefinition();
    4428             :         } else {
    4429           0 :           TRACE(
    4430             :               "Live range %d is spilled deferred code only but alive outside\n",
    4431             :               range->vreg());
    4432             :           range->TransitionRangeToDeferredSpill(data()->allocation_zone(),
    4433           0 :                                                 max_blocks);
    4434             :         }
    4435             :       }
    4436             :     }
    4437             :   }
    4438     2516037 : }
    4439             : 
    4440     2517453 : void OperandAssigner::AssignSpillSlots() {
    4441    93987616 :   for (auto range : data()->live_ranges()) {
    4442    91470172 :     if (range != nullptr && range->get_bundle() != nullptr) {
    4443     5849561 :       range->get_bundle()->MergeSpillRanges();
    4444             :     }
    4445             :   }
    4446             :   ZoneVector<SpillRange*>& spill_ranges = data()->spill_ranges();
    4447             :   // Merge disjoint spill ranges
    4448    93989026 :   for (size_t i = 0; i < spill_ranges.size(); ++i) {
    4449    45735953 :     SpillRange* range = spill_ranges[i];
    4450    45735953 :     if (range == nullptr) continue;
    4451     5531052 :     if (range->IsEmpty()) continue;
    4452  2367394820 :     for (size_t j = i + 1; j < spill_ranges.size(); ++j) {
    4453  1180105543 :       SpillRange* other = spill_ranges[j];
    4454  1180105543 :       if (other != nullptr && !other->IsEmpty()) {
    4455   260574344 :         range->TryMerge(other);
    4456             :       }
    4457             :     }
    4458             :   }
    4459             :   // Allocate slots for the merged spill ranges.
    4460    48250297 :   for (SpillRange* range : spill_ranges) {
    4461    45733015 :     if (range == nullptr || range->IsEmpty()) continue;
    4462             :     // Allocate a new operand referring to the spill slot.
    4463     3591548 :     if (!range->HasSlot()) {
    4464             :       int index = data()->frame()->AllocateSpillSlot(range->byte_width());
    4465             :       range->set_assigned_slot(index);
    4466             :     }
    4467             :   }
    4468     2517282 : }
    4469             : 
    4470     2521368 : void OperandAssigner::CommitAssignment() {
    4471             :   const size_t live_ranges_size = data()->live_ranges().size();
    4472    93990357 :   for (TopLevelLiveRange* top_range : data()->live_ranges()) {
    4473    91472946 :     CHECK_EQ(live_ranges_size,
    4474             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    4475   142092206 :     if (top_range == nullptr || top_range->IsEmpty()) continue;
    4476             :     InstructionOperand spill_operand;
    4477    40853686 :     if (top_range->HasSpillOperand()) {
    4478    15169582 :       spill_operand = *top_range->TopLevel()->GetSpillOperand();
    4479    25684104 :     } else if (top_range->TopLevel()->HasSpillRange()) {
    4480     5530995 :       spill_operand = top_range->TopLevel()->GetSpillRangeOperand();
    4481             :     }
    4482    40853686 :     if (top_range->is_phi()) {
    4483             :       data()->GetPhiMapValueFor(top_range)->CommitAssignment(
    4484     4323736 :           top_range->GetAssignedOperand());
    4485             :     }
    4486   229215730 :     for (LiveRange* range = top_range; range != nullptr;
    4487             :          range = range->next()) {
    4488    94183866 :       InstructionOperand assigned = range->GetAssignedOperand();
    4489             :       DCHECK(!assigned.IsUnallocated());
    4490    94183534 :       range->ConvertUsesToOperand(assigned, spill_operand);
    4491             :     }
    4492             : 
    4493    40850833 :     if (!spill_operand.IsInvalid()) {
    4494             :       // If this top level range has a child spilled in a deferred block, we use
    4495             :       // the range and control flow connection mechanism instead of spilling at
    4496             :       // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
    4497             :       // phases. Normally, when we spill at definition, we do not insert a
    4498             :       // connecting move when a successor child range is spilled - because the
    4499             :       // spilled range picks up its value from the slot which was assigned at
    4500             :       // definition. For ranges that are determined to spill only in deferred
    4501             :       // blocks, we let ConnectLiveRanges and ResolveControlFlow find the blocks
    4502             :       // where a spill operand is expected, and then finalize by inserting the
    4503             :       // spills in the deferred blocks dominators.
    4504    20700676 :       if (!top_range->IsSpilledOnlyInDeferredBlocks()) {
    4505             :         // Spill at definition if the range isn't spilled only in deferred
    4506             :         // blocks.
    4507    19680247 :         top_range->CommitSpillMoves(
    4508             :             data()->code(), spill_operand,
    4509    57131540 :             top_range->has_slot_use() || top_range->spilled());
    4510             :       }
    4511             :     }
    4512             :   }
    4513     2517411 : }
    4514             : 
    4515     2517343 : ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
    4516     2517343 :     : data_(data) {}
    4517             : 
    4518           0 : bool ReferenceMapPopulator::SafePointsAreInOrder() const {
    4519             :   int safe_point = 0;
    4520           0 :   for (ReferenceMap* map : *data()->code()->reference_maps()) {
    4521           0 :     if (safe_point > map->instruction_position()) return false;
    4522             :     safe_point = map->instruction_position();
    4523             :   }
    4524           0 :   return true;
    4525             : }
    4526             : 
    4527     2517098 : void ReferenceMapPopulator::PopulateReferenceMaps() {
    4528             :   DCHECK(SafePointsAreInOrder());
    4529             :   // Map all delayed references.
    4530     2517098 :   for (RegisterAllocationData::DelayedReference& delayed_reference :
    4531             :        data()->delayed_references()) {
    4532           0 :     delayed_reference.map->RecordReference(
    4533           0 :         AllocatedOperand::cast(*delayed_reference.operand));
    4534             :   }
    4535             :   // Iterate over all safe point positions and record a pointer
    4536             :   // for all spilled live ranges at this point.
    4537             :   int last_range_start = 0;
    4538             :   const ReferenceMapDeque* reference_maps = data()->code()->reference_maps();
    4539             :   ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
    4540             :   const size_t live_ranges_size = data()->live_ranges().size();
    4541    93985713 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    4542    91468530 :     CHECK_EQ(live_ranges_size,
    4543             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    4544    91468530 :     if (range == nullptr) continue;
    4545             :     // Skip non-reference values.
    4546    43034179 :     if (!data()->IsReference(range)) continue;
    4547             :     // Skip empty live ranges.
    4548    18238781 :     if (range->IsEmpty()) continue;
    4549    16087965 :     if (range->has_preassigned_slot()) continue;
    4550             : 
    4551             :     // Find the extent of the range and its children.
    4552             :     int start = range->Start().ToInstructionIndex();
    4553             :     int end = 0;
    4554   105001509 :     for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) {
    4555             :       LifetimePosition this_end = cur->End();
    4556    44677621 :       if (this_end.ToInstructionIndex() > end)
    4557             :         end = this_end.ToInstructionIndex();
    4558             :       DCHECK(cur->Start().ToInstructionIndex() >= start);
    4559             :     }
    4560             : 
    4561             :     // Most of the ranges are in order, but not all.  Keep an eye on when they
    4562             :     // step backwards and reset the first_it so we don't miss any safe points.
    4563    15646267 :     if (start < last_range_start) first_it = reference_maps->begin();
    4564             :     last_range_start = start;
    4565             : 
    4566             :     // Step across all the safe points that are before the start of this range,
    4567             :     // recording how far we step in order to save doing this for the next range.
    4568   895000088 :     for (; first_it != reference_maps->end(); ++first_it) {
    4569   893872262 :       ReferenceMap* map = *first_it;
    4570   893872262 :       if (map->instruction_position() >= start) break;
    4571             :     }
    4572             : 
    4573             :     InstructionOperand spill_operand;
    4574    21636088 :     if (((range->HasSpillOperand() &&
    4575    30235583 :           !range->GetSpillOperand()->IsConstant()) ||
    4576             :          range->HasSpillRange())) {
    4577     3922144 :       if (range->HasSpillOperand()) {
    4578     1056942 :         spill_operand = *range->GetSpillOperand();
    4579             :       } else {
    4580             :         spill_operand = range->GetSpillRangeOperand();
    4581             :       }
    4582             :       DCHECK(spill_operand.IsStackSlot());
    4583             :       DCHECK(CanBeTaggedPointer(
    4584             :           AllocatedOperand::cast(spill_operand).representation()));
    4585             :     }
    4586             : 
    4587             :     LiveRange* cur = range;
    4588             :     // Step through the safe points to see whether they are in the range.
    4589    64397867 :     for (auto it = first_it; it != reference_maps->end(); ++it) {
    4590    59635947 :       ReferenceMap* map = *it;
    4591             :       int safe_point = map->instruction_position();
    4592             : 
    4593             :       // The safe points are sorted so we can stop searching here.
    4594    59635947 :       if (safe_point - 1 > end) break;
    4595             : 
    4596             :       // Advance to the next active range that covers the current
    4597             :       // safe point position.
    4598             :       LifetimePosition safe_point_pos =
    4599             :           LifetimePosition::InstructionFromInstructionIndex(safe_point);
    4600             : 
    4601             :       // Search for the child range (cur) that covers safe_point_pos. If we
    4602             :       // don't find it before the children pass safe_point_pos, keep cur at
    4603             :       // the last child, because the next safe_point_pos may be covered by cur.
    4604             :       // This may happen if cur has more than one interval, and the current
    4605             :       // safe_point_pos is in between intervals.
    4606             :       // For that reason, cur may be at most the last child.
    4607             :       DCHECK_NOT_NULL(cur);
    4608             :       DCHECK(safe_point_pos >= cur->Start() || range == cur);
    4609             :       bool found = false;
    4610   108430743 :       while (!found) {
    4611    71396907 :         if (cur->Covers(safe_point_pos)) {
    4612             :           found = true;
    4613             :         } else {
    4614             :           LiveRange* next = cur->next();
    4615    34363042 :           if (next == nullptr || next->Start() > safe_point_pos) {
    4616             :             break;
    4617             :           }
    4618             :           cur = next;
    4619             :         }
    4620             :       }
    4621             : 
    4622    48751620 :       if (!found) {
    4623             :         continue;
    4624             :       }
    4625             : 
    4626             :       // Check if the live range is spilled and the safe point is after
    4627             :       // the spill position.
    4628             :       int spill_index = range->IsSpilledOnlyInDeferredBlocks()
    4629             :                             ? cur->Start().ToInstructionIndex()
    4630    37033968 :                             : range->spill_start_index();
    4631             : 
    4632    37033968 :       if (!spill_operand.IsInvalid() && safe_point >= spill_index) {
    4633    24080452 :         TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
    4634             :               range->vreg(), spill_index, safe_point);
    4635    24080452 :         map->RecordReference(AllocatedOperand::cast(spill_operand));
    4636             :       }
    4637             : 
    4638    37033948 :       if (!cur->spilled()) {
    4639           0 :         TRACE(
    4640             :             "Pointer in register for range %d:%d (start at %d) "
    4641             :             "at safe point %d\n",
    4642             :             range->vreg(), cur->relative_id(), cur->Start().value(),
    4643             :             safe_point);
    4644           0 :         InstructionOperand operand = cur->GetAssignedOperand();
    4645             :         DCHECK(!operand.IsStackSlot());
    4646             :         DCHECK(CanBeTaggedPointer(
    4647             :             AllocatedOperand::cast(operand).representation()));
    4648           0 :         map->RecordReference(AllocatedOperand::cast(operand));
    4649             :       }
    4650             :     }
    4651             :   }
    4652     2517183 : }
    4653             : 
    4654     5034521 : LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
    4655     5034521 :     : data_(data) {}
    4656             : 
    4657           0 : bool LiveRangeConnector::CanEagerlyResolveControlFlow(
    4658             :     const InstructionBlock* block) const {
    4659    22212503 :   if (block->PredecessorCount() != 1) return false;
    4660           0 :   return block->predecessors()[0].IsNext(block->rpo_number());
    4661             : }
    4662             : 
    4663     2516852 : void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
    4664             :   // Lazily linearize live ranges in memory for fast lookup.
    4665     2516852 :   LiveRangeFinder finder(data(), local_zone);
    4666             :   ZoneVector<BitVector*>& live_in_sets = data()->live_in_sets();
    4667    22827168 :   for (const InstructionBlock* block : code()->instruction_blocks()) {
    4668    28303443 :     if (CanEagerlyResolveControlFlow(block)) continue;
    4669    24634250 :     BitVector* live = live_in_sets[block->rpo_number().ToInt()];
    4670             :     BitVector::Iterator iterator(live);
    4671   183499925 :     while (!iterator.Done()) {
    4672             :       int vreg = iterator.Current();
    4673    85591668 :       LiveRangeBoundArray* array = finder.ArrayFor(vreg);
    4674   216775600 :       for (const RpoNumber& pred : block->predecessors()) {
    4675             :         FindResult result;
    4676   131184140 :         const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
    4677   131184279 :         if (!array->FindConnectableSubranges(block, pred_block, &result)) {
    4678   128480587 :           continue;
    4679             :         }
    4680     9102449 :         InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
    4681     9102450 :         InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
    4682     9102458 :         if (pred_op.Equals(cur_op)) continue;
    4683     5505393 :         if (!pred_op.IsAnyRegister() && cur_op.IsAnyRegister()) {
    4684             :           // We're doing a reload.
    4685             :           // We don't need to, if:
    4686             :           // 1) there's no register use in this block, and
    4687             :           // 2) the range ends before the block does, and
    4688             :           // 3) we don't have a successor, or the successor is spilled.
    4689             :           LifetimePosition block_start =
    4690             :               LifetimePosition::GapFromInstructionIndex(block->code_start());
    4691             :           LifetimePosition block_end =
    4692             :               LifetimePosition::GapFromInstructionIndex(block->code_end());
    4693     2627856 :           const LiveRange* current = result.cur_cover_;
    4694             :           // TODO(herhut): This is not the successor if we have control flow!
    4695             :           const LiveRange* successor = current->next();
    4696     5255712 :           if (current->End() < block_end &&
    4697      297082 :               (successor == nullptr || successor->spilled())) {
    4698             :             // verify point 1: no register use. We can go to the end of the
    4699             :             // range, since it's all within the block.
    4700             : 
    4701             :             bool uses_reg = false;
    4702         151 :             for (const UsePosition* use = current->NextUsePosition(block_start);
    4703      643204 :                  use != nullptr; use = use->next()) {
    4704      470284 :               if (use->operand()->IsAnyRegister()) {
    4705             :                 uses_reg = true;
    4706             :                 break;
    4707             :               }
    4708             :             }
    4709      643053 :             if (!uses_reg) continue;
    4710             :           }
    4711     2454930 :           if (current->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
    4712             :               pred_block->IsDeferred()) {
    4713             :             // The spill location should be defined in pred_block, so add
    4714             :             // pred_block to the list of blocks requiring a spill operand.
    4715      350095 :             TRACE("Adding B%d to list of spill blocks for %d\n",
    4716             :                   pred_block->rpo_number().ToInt(),
    4717             :                   current->TopLevel()->vreg());
    4718             :             current->TopLevel()->GetListOfBlocksRequiringSpillOperands()->Add(
    4719             :                 pred_block->rpo_number().ToInt());
    4720             :           }
    4721             :         }
    4722             :         int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op);
    4723             :         USE(move_loc);
    4724             :         DCHECK_IMPLIES(
    4725             :             result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
    4726             :                 !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()),
    4727             :             code()->GetInstructionBlock(move_loc)->IsDeferred());
    4728             :       }
    4729    85591460 :       iterator.Advance();
    4730             :     }
    4731             :   }
    4732             : 
    4733             :   // At this stage, we collected blocks needing a spill operand from
    4734             :   // ConnectRanges and from ResolveControlFlow. Time to commit the spills for
    4735             :   // deferred blocks.
    4736             :   const size_t live_ranges_size = data()->live_ranges().size();
    4737    93986259 :   for (TopLevelLiveRange* top : data()->live_ranges()) {
    4738    91468856 :     CHECK_EQ(live_ranges_size,
    4739             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    4740   132319724 :     if (top == nullptr || top->IsEmpty() ||
    4741             :         !top->IsSpilledOnlyInDeferredBlocks())
    4742             :       continue;
    4743     1020499 :     CommitSpillsInDeferredBlocks(top, finder.ArrayFor(top->vreg()), local_zone);
    4744             :   }
    4745     2517403 : }
    4746             : 
    4747           0 : int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
    4748             :                                            const InstructionOperand& cur_op,
    4749             :                                            const InstructionBlock* pred,
    4750             :                                            const InstructionOperand& pred_op) {
    4751             :   DCHECK(!pred_op.Equals(cur_op));
    4752             :   int gap_index;
    4753             :   Instruction::GapPosition position;
    4754     2704611 :   if (block->PredecessorCount() == 1) {
    4755             :     gap_index = block->first_instruction_index();
    4756             :     position = Instruction::START;
    4757             :   } else {
    4758             :     DCHECK_EQ(1, pred->SuccessorCount());
    4759             :     DCHECK(!code()
    4760             :                 ->InstructionAt(pred->last_instruction_index())
    4761             :                 ->HasReferenceMap());
    4762             :     gap_index = pred->last_instruction_index();
    4763             :     position = Instruction::END;
    4764             :   }
    4765     2704611 :   data()->AddGapMove(gap_index, position, pred_op, cur_op);
    4766           0 :   return gap_index;
    4767             : }
    4768             : 
    4769     2516554 : void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
    4770             :   DelayedInsertionMap delayed_insertion_map(local_zone);
    4771             :   const size_t live_ranges_size = data()->live_ranges().size();
    4772    93982572 :   for (TopLevelLiveRange* top_range : data()->live_ranges()) {
    4773    91465799 :     CHECK_EQ(live_ranges_size,
    4774             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    4775    91465799 :     if (top_range == nullptr) continue;
    4776             :     bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
    4777             :     LiveRange* first_range = top_range;
    4778   149712097 :     for (LiveRange *second_range = first_range->next(); second_range != nullptr;
    4779             :          first_range = second_range, second_range = second_range->next()) {
    4780             :       LifetimePosition pos = second_range->Start();
    4781             :       // Add gap move if the two live ranges touch and there is no block
    4782             :       // boundary.
    4783    86991465 :       if (second_range->spilled()) continue;
    4784    21814371 :       if (first_range->End() != pos) continue;
    4785    23164658 :       if (data()->IsBlockBoundary(pos) &&
    4786             :           !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
    4787             :         continue;
    4788             :       }
    4789    20194187 :       InstructionOperand prev_operand = first_range->GetAssignedOperand();
    4790    20194137 :       InstructionOperand cur_operand = second_range->GetAssignedOperand();
    4791    20194123 :       if (prev_operand.Equals(cur_operand)) continue;
    4792             :       bool delay_insertion = false;
    4793             :       Instruction::GapPosition gap_pos;
    4794             :       int gap_index = pos.ToInstructionIndex();
    4795    22395455 :       if (connect_spilled && !prev_operand.IsAnyRegister() &&
    4796             :           cur_operand.IsAnyRegister()) {
    4797     1349996 :         const InstructionBlock* block = code()->GetInstructionBlock(gap_index);
    4798             :         DCHECK(block->IsDeferred());
    4799             :         // Performing a reload in this block, meaning the spill operand must
    4800             :         // be defined here.
    4801             :         top_range->GetListOfBlocksRequiringSpillOperands()->Add(
    4802             :             block->rpo_number().ToInt());
    4803             :       }
    4804             : 
    4805    19687230 :       if (pos.IsGapPosition()) {
    4806    19687019 :         gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
    4807             :       } else {
    4808         211 :         if (pos.IsStart()) {
    4809             :           delay_insertion = true;
    4810             :         } else {
    4811           0 :           gap_index++;
    4812             :         }
    4813         211 :         gap_pos = delay_insertion ? Instruction::END : Instruction::START;
    4814             :       }
    4815             :       // Reloads or spills for spilled in deferred blocks ranges must happen
    4816             :       // only in deferred blocks.
    4817             :       DCHECK_IMPLIES(connect_spilled && !(prev_operand.IsAnyRegister() &&
    4818             :                                           cur_operand.IsAnyRegister()),
    4819             :                      code()->GetInstructionBlock(gap_index)->IsDeferred());
    4820             : 
    4821             :       ParallelMove* move =
    4822             :           code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
    4823             :               gap_pos, code_zone());
    4824    19686840 :       if (!delay_insertion) {
    4825             :         move->AddMove(prev_operand, cur_operand);
    4826             :       } else {
    4827             :         delayed_insertion_map.insert(
    4828         211 :             std::make_pair(std::make_pair(move, prev_operand), cur_operand));
    4829             :       }
    4830             :     }
    4831             :   }
    4832     2516773 :   if (delayed_insertion_map.empty()) return;
    4833             :   // Insert all the moves which should occur after the stored move.
    4834             :   ZoneVector<MoveOperands*> to_insert(local_zone);
    4835             :   ZoneVector<MoveOperands*> to_eliminate(local_zone);
    4836          76 :   to_insert.reserve(4);
    4837          76 :   to_eliminate.reserve(4);
    4838          76 :   ParallelMove* moves = delayed_insertion_map.begin()->first.first;
    4839             :   for (auto it = delayed_insertion_map.begin();; ++it) {
    4840             :     bool done = it == delayed_insertion_map.end();
    4841         287 :     if (done || it->first.first != moves) {
    4842             :       // Commit the MoveOperands for current ParallelMove.
    4843         211 :       for (MoveOperands* move : to_eliminate) {
    4844             :         move->Eliminate();
    4845             :       }
    4846         422 :       for (MoveOperands* move : to_insert) {
    4847         211 :         moves->push_back(move);
    4848             :       }
    4849         211 :       if (done) break;
    4850             :       // Reset state.
    4851             :       to_eliminate.clear();
    4852             :       to_insert.clear();
    4853         135 :       moves = it->first.first;
    4854             :     }
    4855             :     // Gather all MoveOperands for a single ParallelMove.
    4856             :     MoveOperands* move =
    4857         211 :         new (code_zone()) MoveOperands(it->first.second, it->second);
    4858         211 :     moves->PrepareInsertAfter(move, &to_eliminate);
    4859         211 :     to_insert.push_back(move);
    4860             :   }
    4861             : }
    4862             : 
    4863     1020473 : void LiveRangeConnector::CommitSpillsInDeferredBlocks(
    4864             :     TopLevelLiveRange* range, LiveRangeBoundArray* array, Zone* temp_zone) {
    4865             :   DCHECK(range->IsSpilledOnlyInDeferredBlocks());
    4866             :   DCHECK(!range->spilled());
    4867             : 
    4868             :   InstructionSequence* code = data()->code();
    4869     1020473 :   InstructionOperand spill_operand = range->GetSpillRangeOperand();
    4870             : 
    4871     1020473 :   TRACE("Live Range %d will be spilled only in deferred blocks.\n",
    4872             :         range->vreg());
    4873             :   // If we have ranges that aren't spilled but require the operand on the stack,
    4874             :   // make sure we insert the spill.
    4875     9672137 :   for (const LiveRange* child = range; child != nullptr;
    4876             :        child = child->next()) {
    4877    13858993 :     for (const UsePosition* pos = child->first_pos(); pos != nullptr;
    4878             :          pos = pos->next()) {
    4879     9258574 :       if (pos->type() != UsePositionType::kRequiresSlot && !child->spilled())
    4880             :         continue;
    4881      568775 :       range->AddBlockRequiringSpillOperand(
    4882             :           code->GetInstructionBlock(pos->pos().ToInstructionIndex())
    4883             :               ->rpo_number());
    4884             :     }
    4885             :   }
    4886             : 
    4887     1020510 :   ZoneQueue<int> worklist(temp_zone);
    4888             : 
    4889     5044337 :   for (BitVector::Iterator iterator(
    4890             :            range->GetListOfBlocksRequiringSpillOperands());
    4891     2011920 :        !iterator.Done(); iterator.Advance()) {
    4892     4023862 :     worklist.push(iterator.Current());
    4893             :   }
    4894             : 
    4895             :   ZoneSet<std::pair<RpoNumber, int>> done_moves(temp_zone);
    4896             :   // Seek the deferred blocks that dominate locations requiring spill operands,
    4897             :   // and spill there. We only need to spill at the start of such blocks.
    4898             :   BitVector done_blocks(
    4899     1020463 :       range->GetListOfBlocksRequiringSpillOperands()->length(), temp_zone);
    4900     7976113 :   while (!worklist.empty()) {
    4901     6955645 :     int block_id = worklist.front();
    4902             :     worklist.pop();
    4903     6955640 :     if (done_blocks.Contains(block_id)) continue;
    4904             :     done_blocks.Add(block_id);
    4905             :     InstructionBlock* spill_block =
    4906     5472069 :         code->InstructionBlockAt(RpoNumber::FromInt(block_id));
    4907             : 
    4908    12197154 :     for (const RpoNumber& pred : spill_block->predecessors()) {
    4909     6725063 :       const InstructionBlock* pred_block = code->InstructionBlockAt(pred);
    4910             : 
    4911     6725092 :       if (pred_block->IsDeferred()) {
    4912     9887543 :         worklist.push(pred_block->rpo_number().ToInt());
    4913             :       } else {
    4914             :         LifetimePosition pred_end =
    4915             :             LifetimePosition::InstructionFromInstructionIndex(
    4916             :                 pred_block->last_instruction_index());
    4917             : 
    4918             :         LiveRangeBound* bound = array->Find(pred_end);
    4919             : 
    4920     1781323 :         InstructionOperand pred_op = bound->range_->GetAssignedOperand();
    4921             : 
    4922             :         RpoNumber spill_block_number = spill_block->rpo_number();
    4923     3562602 :         if (done_moves.find(std::make_pair(
    4924             :                 spill_block_number, range->vreg())) == done_moves.end()) {
    4925     1781278 :           TRACE("Spilling deferred spill for range %d at B%d\n", range->vreg(),
    4926             :                 spill_block_number.ToInt());
    4927             :           data()->AddGapMove(spill_block->first_instruction_index(),
    4928             :                              Instruction::GapPosition::START, pred_op,
    4929     1781278 :                              spill_operand);
    4930     3562617 :           done_moves.insert(std::make_pair(spill_block_number, range->vreg()));
    4931             :           spill_block->mark_needs_frame();
    4932             :         }
    4933             :       }
    4934             :     }
    4935             :   }
    4936     1020502 : }
    4937             : 
    4938             : #undef TRACE
    4939             : 
    4940             : }  // namespace compiler
    4941             : }  // namespace internal
    4942      120216 : }  // namespace v8

Generated by: LCOV version 1.10