LCOV - code coverage report
Current view: top level - src/compiler - register-allocator.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 1259 1471 85.6 %
Date: 2017-04-26 Functions: 119 167 71.3 %

          Line data    Source code
       1             : // Copyright 2014 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/compiler/register-allocator.h"
       6             : 
       7             : #include "src/assembler-inl.h"
       8             : #include "src/base/adapters.h"
       9             : #include "src/compiler/linkage.h"
      10             : #include "src/string-stream.h"
      11             : 
      12             : namespace v8 {
      13             : namespace internal {
      14             : namespace compiler {
      15             : 
      16             : #define TRACE(...)                             \
      17             :   do {                                         \
      18             :     if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
      19             :   } while (false)
      20             : 
      21             : 
      22             : namespace {
      23             : 
      24             : static const int kFloatRepBit =
      25             :     1 << static_cast<int>(MachineRepresentation::kFloat32);
      26             : static const int kSimd128RepBit =
      27             :     1 << static_cast<int>(MachineRepresentation::kSimd128);
      28             : 
      29    65593199 : void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) {
      30    65593261 :   auto it = std::find(v->begin(), v->end(), range);
      31             :   DCHECK(it != v->end());
      32    65593261 :   v->erase(it);
      33    65593213 : }
      34             : 
      35      915104 : int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) {
      36             :   return kind == FP_REGISTERS ? cfg->num_double_registers()
      37     1830140 :                               : cfg->num_general_registers();
      38             : }
      39             : 
      40             : 
      41      915096 : int GetAllocatableRegisterCount(const RegisterConfiguration* cfg,
      42             :                                 RegisterKind kind) {
      43             :   return kind == FP_REGISTERS ? cfg->num_allocatable_double_registers()
      44     1830140 :                               : cfg->num_allocatable_general_registers();
      45             : }
      46             : 
      47             : 
      48      915090 : const int* GetAllocatableRegisterCodes(const RegisterConfiguration* cfg,
      49             :                                        RegisterKind kind) {
      50             :   return kind == FP_REGISTERS ? cfg->allocatable_double_codes()
      51     1830140 :                               : cfg->allocatable_general_codes();
      52             : }
      53             : 
      54             : 
      55      384068 : const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence,
      56             :                                           const InstructionBlock* block) {
      57             :   RpoNumber index = block->loop_header();
      58     2158982 :   if (!index.IsValid()) return nullptr;
      59      384068 :   return sequence->InstructionBlockAt(index);
      60             : }
      61             : 
      62             : 
      63             : const InstructionBlock* GetInstructionBlock(const InstructionSequence* code,
      64             :                                             LifetimePosition pos) {
      65    15100911 :   return code->GetInstructionBlock(pos.ToInstructionIndex());
      66             : }
      67             : 
      68             : 
      69     2488387 : Instruction* GetLastInstruction(InstructionSequence* code,
      70     2488387 :                                 const InstructionBlock* block) {
      71     2488387 :   return code->InstructionAt(block->last_instruction_index());
      72             : }
      73             : 
      74             : // TODO(dcarney): fix frame to allow frame accesses to half size location.
      75     2881181 : int GetByteWidth(MachineRepresentation rep) {
      76     2881181 :   switch (rep) {
      77             :     case MachineRepresentation::kBit:
      78             :     case MachineRepresentation::kWord8:
      79             :     case MachineRepresentation::kWord16:
      80             :     case MachineRepresentation::kWord32:
      81             :     case MachineRepresentation::kTaggedSigned:
      82             :     case MachineRepresentation::kTaggedPointer:
      83             :     case MachineRepresentation::kTagged:
      84             :     case MachineRepresentation::kFloat32:
      85             :       return kPointerSize;
      86             :     case MachineRepresentation::kWord64:
      87             :     case MachineRepresentation::kFloat64:
      88             :       return kDoubleSize;
      89             :     case MachineRepresentation::kSimd128:
      90           0 :       return kSimd128Size;
      91             :     case MachineRepresentation::kSimd1x4:
      92             :     case MachineRepresentation::kSimd1x8:
      93             :     case MachineRepresentation::kSimd1x16:
      94           0 :       return kSimdMaskRegisters ? kPointerSize : kSimd128Size;
      95             :     case MachineRepresentation::kNone:
      96             :       break;
      97             :   }
      98           0 :   UNREACHABLE();
      99             :   return 0;
     100             : }
     101             : 
     102             : }  // namespace
     103             : 
     104             : class LiveRangeBound {
     105             :  public:
     106    27454941 :   explicit LiveRangeBound(LiveRange* range, bool skip)
     107    82364823 :       : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
     108             :     DCHECK(!range->IsEmpty());
     109             :   }
     110             : 
     111             :   bool CanCover(LifetimePosition position) {
     112    79474518 :     return start_ <= position && position < end_;
     113             :   }
     114             : 
     115             :   LiveRange* const range_;
     116             :   const LifetimePosition start_;
     117             :   const LifetimePosition end_;
     118             :   const bool skip_;
     119             : 
     120             :  private:
     121             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
     122             : };
     123             : 
     124             : 
     125             : struct FindResult {
     126             :   LiveRange* cur_cover_;
     127             :   LiveRange* pred_cover_;
     128             : };
     129             : 
     130             : 
     131             : class LiveRangeBoundArray {
     132             :  public:
     133    47172615 :   LiveRangeBoundArray() : length_(0), start_(nullptr) {}
     134             : 
     135             :   bool ShouldInitialize() { return start_ == nullptr; }
     136             : 
     137     4164714 :   void Initialize(Zone* zone, TopLevelLiveRange* range) {
     138     4164714 :     length_ = range->GetChildCount();
     139             : 
     140     4164715 :     start_ = zone->NewArray<LiveRangeBound>(length_);
     141             :     LiveRangeBound* curr = start_;
     142             :     // Normally, spilled ranges do not need connecting moves, because the spill
     143             :     // location has been assigned at definition. For ranges spilled in deferred
     144             :     // blocks, that is not the case, so we need to connect the spilled children.
     145    31619655 :     for (LiveRange *i = range; i != nullptr; i = i->next(), ++curr) {
     146    27454940 :       new (curr) LiveRangeBound(i, i->spilled());
     147             :     }
     148     4164715 :   }
     149             : 
     150             :   LiveRangeBound* Find(const LifetimePosition position) const {
     151             :     size_t left_index = 0;
     152    80416591 :     size_t right_index = length_;
     153             :     while (true) {
     154   418540348 :       size_t current_index = left_index + (right_index - left_index) / 2;
     155             :       DCHECK(right_index > current_index);
     156   418540348 :       LiveRangeBound* bound = &start_[current_index];
     157   418540348 :       if (bound->start_ <= position) {
     158   287967441 :         if (position < bound->end_) return bound;
     159             :         DCHECK(left_index < current_index);
     160             :         left_index = current_index;
     161             :       } else {
     162             :         right_index = current_index;
     163             :       }
     164             :     }
     165             :   }
     166             : 
     167             :   LiveRangeBound* FindPred(const InstructionBlock* pred) {
     168             :     LifetimePosition pred_end =
     169             :         LifetimePosition::InstructionFromInstructionIndex(
     170             :             pred->last_instruction_index());
     171             :     return Find(pred_end);
     172             :   }
     173             : 
     174             :   LiveRangeBound* FindSucc(const InstructionBlock* succ) {
     175             :     LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
     176             :         succ->first_instruction_index());
     177             :     return Find(succ_start);
     178             :   }
     179             : 
     180   158949036 :   bool FindConnectableSubranges(const InstructionBlock* block,
     181    79474518 :                                 const InstructionBlock* pred,
     182             :                                 FindResult* result) const {
     183             :     LifetimePosition pred_end =
     184             :         LifetimePosition::InstructionFromInstructionIndex(
     185             :             pred->last_instruction_index());
     186             :     LiveRangeBound* bound = Find(pred_end);
     187    79474518 :     result->pred_cover_ = bound->range_;
     188             :     LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
     189             :         block->first_instruction_index());
     190             : 
     191    79474518 :     if (bound->CanCover(cur_start)) {
     192             :       // Both blocks are covered by the same range, so there is nothing to
     193             :       // connect.
     194             :       return false;
     195             :     }
     196             :     bound = Find(cur_start);
     197    30084613 :     if (bound->skip_) {
     198             :       return false;
     199             :     }
     200     4714169 :     result->cur_cover_ = bound->range_;
     201             :     DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
     202     4714169 :     return (result->cur_cover_ != result->pred_cover_);
     203             :   }
     204             : 
     205             :  private:
     206             :   size_t length_;
     207             :   LiveRangeBound* start_;
     208             : 
     209             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
     210             : };
     211             : 
     212             : 
     213             : class LiveRangeFinder {
     214             :  public:
     215      915014 :   explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
     216             :       : data_(data),
     217      915014 :         bounds_length_(static_cast<int>(data_->live_ranges().size())),
     218      915014 :         bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
     219     2745130 :         zone_(zone) {
     220    48087727 :     for (int i = 0; i < bounds_length_; ++i) {
     221    47172625 :       new (&bounds_[i]) LiveRangeBoundArray();
     222             :     }
     223      915102 :   }
     224             : 
     225    52168293 :   LiveRangeBoundArray* ArrayFor(int operand_index) {
     226             :     DCHECK(operand_index < bounds_length_);
     227   104336586 :     TopLevelLiveRange* range = data_->live_ranges()[operand_index];
     228             :     DCHECK(range != nullptr && !range->IsEmpty());
     229    52168293 :     LiveRangeBoundArray* array = &bounds_[operand_index];
     230    52168293 :     if (array->ShouldInitialize()) {
     231     4164717 :       array->Initialize(zone_, range);
     232             :     }
     233    52168292 :     return array;
     234             :   }
     235             : 
     236             :  private:
     237             :   const RegisterAllocationData* const data_;
     238             :   const int bounds_length_;
     239             :   LiveRangeBoundArray* const bounds_;
     240             :   Zone* const zone_;
     241             : 
     242             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
     243             : };
     244             : 
     245             : 
     246             : typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
     247             : 
     248             : 
     249             : struct DelayedInsertionMapCompare {
     250             :   bool operator()(const DelayedInsertionMapKey& a,
     251             :                   const DelayedInsertionMapKey& b) const {
     252         171 :     if (a.first == b.first) {
     253             :       return a.second.Compare(b.second);
     254             :     }
     255         171 :     return a.first < b.first;
     256             :   }
     257             : };
     258             : 
     259             : 
     260             : typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
     261             :                 DelayedInsertionMapCompare> DelayedInsertionMap;
     262             : 
     263             : 
     264    67971287 : UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
     265             :                          void* hint, UsePositionHintType hint_type)
     266    67971287 :     : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) {
     267             :   DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone);
     268             :   bool register_beneficial = true;
     269             :   UsePositionType type = UsePositionType::kAny;
     270   134579407 :   if (operand_ != nullptr && operand_->IsUnallocated()) {
     271             :     const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
     272    66608206 :     if (unalloc->HasRegisterPolicy()) {
     273             :       type = UsePositionType::kRequiresRegister;
     274    45825044 :     } else if (unalloc->HasSlotPolicy()) {
     275             :       type = UsePositionType::kRequiresSlot;
     276             :       register_beneficial = false;
     277             :     } else {
     278    33691632 :       register_beneficial = !unalloc->HasAnyPolicy();
     279             :     }
     280             :   }
     281   135942574 :   flags_ = TypeField::encode(type) | HintTypeField::encode(hint_type) |
     282    67971287 :            RegisterBeneficialField::encode(register_beneficial) |
     283    67971287 :            AssignedRegisterField::encode(kUnassignedRegister);
     284             :   DCHECK(pos_.IsValid());
     285    67971287 : }
     286             : 
     287             : 
     288           0 : bool UsePosition::HasHint() const {
     289             :   int hint_register;
     290    68083364 :   return HintRegister(&hint_register);
     291             : }
     292             : 
     293             : 
     294   146305002 : bool UsePosition::HintRegister(int* register_code) const {
     295   146305002 :   if (hint_ == nullptr) return false;
     296    89971280 :   switch (HintTypeField::decode(flags_)) {
     297             :     case UsePositionHintType::kNone:
     298             :     case UsePositionHintType::kUnresolved:
     299             :       return false;
     300             :     case UsePositionHintType::kUsePos: {
     301             :       UsePosition* use_pos = reinterpret_cast<UsePosition*>(hint_);
     302    10861865 :       int assigned_register = AssignedRegisterField::decode(use_pos->flags_);
     303    10861865 :       if (assigned_register == kUnassignedRegister) return false;
     304     4734817 :       *register_code = assigned_register;
     305     4734817 :       return true;
     306             :     }
     307             :     case UsePositionHintType::kOperand: {
     308             :       InstructionOperand* operand =
     309             :           reinterpret_cast<InstructionOperand*>(hint_);
     310    28013966 :       *register_code = LocationOperand::cast(operand)->register_code();
     311    28013966 :       return true;
     312             :     }
     313             :     case UsePositionHintType::kPhi: {
     314      671569 :       RegisterAllocationData::PhiMapValue* phi =
     315             :           reinterpret_cast<RegisterAllocationData::PhiMapValue*>(hint_);
     316             :       int assigned_register = phi->assigned_register();
     317      671569 :       if (assigned_register == kUnassignedRegister) return false;
     318      112432 :       *register_code = assigned_register;
     319      112432 :       return true;
     320             :     }
     321             :   }
     322           0 :   UNREACHABLE();
     323             :   return false;
     324             : }
     325             : 
     326             : 
     327    31094705 : UsePositionHintType UsePosition::HintTypeForOperand(
     328             :     const InstructionOperand& op) {
     329    31094705 :   switch (op.kind()) {
     330             :     case InstructionOperand::CONSTANT:
     331             :     case InstructionOperand::IMMEDIATE:
     332             :     case InstructionOperand::EXPLICIT:
     333             :       return UsePositionHintType::kNone;
     334             :     case InstructionOperand::UNALLOCATED:
     335    13392506 :       return UsePositionHintType::kUnresolved;
     336             :     case InstructionOperand::ALLOCATED:
     337    18593832 :       if (op.IsRegister() || op.IsFPRegister()) {
     338             :         return UsePositionHintType::kOperand;
     339             :       } else {
     340             :         DCHECK(op.IsStackSlot() || op.IsFPStackSlot());
     341      787218 :         return UsePositionHintType::kNone;
     342             :       }
     343             :     case InstructionOperand::INVALID:
     344             :       break;
     345             :   }
     346           0 :   UNREACHABLE();
     347             :   return UsePositionHintType::kNone;
     348             : }
     349             : 
     350           0 : void UsePosition::SetHint(UsePosition* use_pos) {
     351             :   DCHECK_NOT_NULL(use_pos);
     352     8033871 :   hint_ = use_pos;
     353    16067742 :   flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
     354           0 : }
     355             : 
     356           0 : void UsePosition::ResolveHint(UsePosition* use_pos) {
     357             :   DCHECK_NOT_NULL(use_pos);
     358     9272412 :   if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return;
     359     4636207 :   hint_ = use_pos;
     360     4636207 :   flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
     361             : }
     362             : 
     363             : 
     364           0 : void UsePosition::set_type(UsePositionType type, bool register_beneficial) {
     365             :   DCHECK_IMPLIES(type == UsePositionType::kRequiresSlot, !register_beneficial);
     366             :   DCHECK_EQ(kUnassignedRegister, AssignedRegisterField::decode(flags_));
     367    13153404 :   flags_ = TypeField::encode(type) |
     368    13153404 :            RegisterBeneficialField::encode(register_beneficial) |
     369    13153404 :            HintTypeField::encode(HintTypeField::decode(flags_)) |
     370    13153404 :            AssignedRegisterField::encode(kUnassignedRegister);
     371           0 : }
     372             : 
     373             : 
     374    31818405 : UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
     375             :   DCHECK(Contains(pos) && pos != start());
     376    31818405 :   UseInterval* after = new (zone) UseInterval(pos, end_);
     377    31818450 :   after->next_ = next_;
     378    31818450 :   next_ = nullptr;
     379    31818450 :   end_ = pos;
     380    31818450 :   return after;
     381             : }
     382             : 
     383             : 
     384           0 : void LifetimePosition::Print() const {
     385           0 :   OFStream os(stdout);
     386           0 :   os << *this << std::endl;
     387           0 : }
     388             : 
     389             : 
     390           0 : std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) {
     391           0 :   os << '@' << pos.ToInstructionIndex();
     392           0 :   if (pos.IsGapPosition()) {
     393             :     os << 'g';
     394             :   } else {
     395             :     os << 'i';
     396             :   }
     397           0 :   if (pos.IsStart()) {
     398             :     os << 's';
     399             :   } else {
     400             :     os << 'e';
     401             :   }
     402           0 :   return os;
     403             : }
     404             : 
     405           0 : LiveRange::LiveRange(int relative_id, MachineRepresentation rep,
     406             :                      TopLevelLiveRange* top_level)
     407             :     : relative_id_(relative_id),
     408             :       bits_(0),
     409             :       last_interval_(nullptr),
     410             :       first_interval_(nullptr),
     411             :       first_pos_(nullptr),
     412             :       top_level_(top_level),
     413             :       next_(nullptr),
     414             :       current_interval_(nullptr),
     415             :       last_processed_use_(nullptr),
     416             :       current_hint_position_(nullptr),
     417    92153434 :       splitting_pointer_(nullptr) {
     418             :   DCHECK(AllocatedOperand::IsSupportedRepresentation(rep));
     419    92153434 :   bits_ = AssignedRegisterField::encode(kUnassignedRegister) |
     420    92153434 :           RepresentationField::encode(rep);
     421           0 : }
     422             : 
     423             : 
     424           0 : void LiveRange::VerifyPositions() const {
     425             :   // Walk the positions, verifying that each is in an interval.
     426           0 :   UseInterval* interval = first_interval_;
     427           0 :   for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
     428           0 :     CHECK(Start() <= pos->pos());
     429           0 :     CHECK(pos->pos() <= End());
     430           0 :     CHECK_NOT_NULL(interval);
     431           0 :     while (!interval->Contains(pos->pos()) && interval->end() != pos->pos()) {
     432             :       interval = interval->next();
     433           0 :       CHECK_NOT_NULL(interval);
     434             :     }
     435             :   }
     436           0 : }
     437             : 
     438             : 
     439           0 : void LiveRange::VerifyIntervals() const {
     440             :   DCHECK(first_interval()->start() == Start());
     441             :   LifetimePosition last_end = first_interval()->end();
     442           0 :   for (UseInterval* interval = first_interval()->next(); interval != nullptr;
     443             :        interval = interval->next()) {
     444             :     DCHECK(last_end <= interval->start());
     445             :     last_end = interval->end();
     446             :   }
     447             :   DCHECK(last_end == End());
     448           0 : }
     449             : 
     450             : 
     451           0 : void LiveRange::set_assigned_register(int reg) {
     452             :   DCHECK(!HasRegisterAssigned() && !spilled());
     453    81157725 :   bits_ = AssignedRegisterField::update(bits_, reg);
     454           0 : }
     455             : 
     456             : 
     457           0 : void LiveRange::UnsetAssignedRegister() {
     458             :   DCHECK(HasRegisterAssigned() && !spilled());
     459           0 :   bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
     460           0 : }
     461             : 
     462             : 
     463           0 : void LiveRange::Spill() {
     464             :   DCHECK(!spilled());
     465             :   DCHECK(!TopLevel()->HasNoSpillType());
     466             :   set_spilled(true);
     467    16899141 :   bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
     468           0 : }
     469             : 
     470             : 
     471   102315936 : RegisterKind LiveRange::kind() const {
     472   102315936 :   return IsFloatingPoint(representation()) ? FP_REGISTERS : GENERAL_REGISTERS;
     473             : }
     474             : 
     475             : 
     476    29519437 : UsePosition* LiveRange::FirstHintPosition(int* register_index) const {
     477    91803311 :   for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
     478    78229511 :     if (pos->HintRegister(register_index)) return pos;
     479             :   }
     480             :   return nullptr;
     481             : }
     482             : 
     483             : 
     484    47602644 : UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
     485    36653807 :   UsePosition* use_pos = last_processed_use_;
     486    24030491 :   if (use_pos == nullptr || use_pos->pos() > start) {
     487             :     use_pos = first_pos();
     488             :   }
     489    36653807 :   while (use_pos != nullptr && use_pos->pos() < start) {
     490             :     use_pos = use_pos->next();
     491             :   }
     492    24030491 :   last_processed_use_ = use_pos;
     493    24030491 :   return use_pos;
     494             : }
     495             : 
     496             : 
     497    13621925 : UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
     498             :     LifetimePosition start) const {
     499    43001043 :   UsePosition* pos = NextUsePosition(start);
     500    56623088 :   while (pos != nullptr && !pos->RegisterIsBeneficial()) {
     501             :     pos = pos->next();
     502             :   }
     503    13621985 :   return pos;
     504             : }
     505             : 
     506     2403176 : LifetimePosition LiveRange::NextLifetimePositionRegisterIsBeneficial(
     507      490396 :     const LifetimePosition& start) const {
     508     2403176 :   UsePosition* next_use = NextUsePositionRegisterIsBeneficial(start);
     509     2403190 :   if (next_use == nullptr) return End();
     510             :   return next_use->pos();
     511             : }
     512             : 
     513           0 : UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
     514       37121 :     LifetimePosition start) const {
     515      280224 :   UsePosition* pos = first_pos();
     516             :   UsePosition* prev = nullptr;
     517      177233 :   while (pos != nullptr && pos->pos() < start) {
     518      140112 :     if (pos->RegisterIsBeneficial()) prev = pos;
     519             :     pos = pos->next();
     520             :   }
     521           0 :   return prev;
     522             : }
     523             : 
     524             : 
     525     9244166 : UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const {
     526    39143336 :   UsePosition* pos = NextUsePosition(start);
     527    48387488 :   while (pos != nullptr && pos->type() != UsePositionType::kRequiresRegister) {
     528             :     pos = pos->next();
     529             :   }
     530     9244159 :   return pos;
     531             : }
     532             : 
     533             : 
     534      901925 : UsePosition* LiveRange::NextSlotPosition(LifetimePosition start) const {
     535     4391883 :   for (UsePosition* pos = NextUsePosition(start); pos != nullptr;
     536             :        pos = pos->next()) {
     537     3490189 :     if (pos->type() != UsePositionType::kRequiresSlot) continue;
     538             :     return pos;
     539             :   }
     540             :   return nullptr;
     541             : }
     542             : 
     543             : 
     544     2590904 : bool LiveRange::CanBeSpilled(LifetimePosition pos) const {
     545             :   // We cannot spill a live range that has a use requiring a register
     546             :   // at the current or the immediate next position.
     547     2590904 :   UsePosition* use_pos = NextRegisterPosition(pos);
     548     2590909 :   if (use_pos == nullptr) return true;
     549             :   return use_pos->pos() > pos.NextStart().End();
     550             : }
     551             : 
     552             : 
     553    48701994 : bool LiveRange::IsTopLevel() const { return top_level_ == this; }
     554             : 
     555             : 
     556   133860186 : InstructionOperand LiveRange::GetAssignedOperand() const {
     557    90116526 :   if (HasRegisterAssigned()) {
     558             :     DCHECK(!spilled());
     559             :     return AllocatedOperand(LocationOperand::REGISTER, representation(),
     560    46372866 :                             assigned_register());
     561             :   }
     562             :   DCHECK(spilled());
     563             :   DCHECK(!HasRegisterAssigned());
     564    43743660 :   if (TopLevel()->HasSpillOperand()) {
     565    33329034 :     InstructionOperand* op = TopLevel()->GetSpillOperand();
     566             :     DCHECK(!op->IsUnallocated());
     567    33329034 :     return *op;
     568             :   }
     569    10414626 :   return TopLevel()->GetSpillRangeOperand();
     570             : }
     571             : 
     572             : 
     573           0 : UseInterval* LiveRange::FirstSearchIntervalForPosition(
     574             :     LifetimePosition position) const {
     575   632871921 :   if (current_interval_ == nullptr) return first_interval_;
     576   540033686 :   if (current_interval_->start() > position) {
     577     1928256 :     current_interval_ = nullptr;
     578     1622589 :     return first_interval_;
     579             :   }
     580             :   return current_interval_;
     581             : }
     582             : 
     583             : 
     584           0 : void LiveRange::AdvanceLastProcessedMarker(
     585             :     UseInterval* to_start_of, LifetimePosition but_not_past) const {
     586   750829824 :   if (to_start_of == nullptr) return;
     587   750833997 :   if (to_start_of->start() > but_not_past) return;
     588   410422499 :   LifetimePosition start = current_interval_ == nullptr
     589             :                                ? LifetimePosition::Invalid()
     590   410422499 :                                : current_interval_->start();
     591   410422499 :   if (to_start_of->start() > start) {
     592    74946132 :     current_interval_ = to_start_of;
     593             :   }
     594             : }
     595             : 
     596             : 
     597   117461358 : LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
     598             :   int new_id = TopLevel()->GetNextChildId();
     599             :   LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel());
     600             :   // If we split, we do so because we're about to switch registers or move
     601             :   // to/from a slot, so there's no value in connecting hints.
     602    29365299 :   DetachAt(position, child, zone, DoNotConnectHints);
     603             : 
     604    29365332 :   child->top_level_ = TopLevel();
     605    29365332 :   child->next_ = next_;
     606    29365332 :   next_ = child;
     607    29365332 :   return child;
     608             : }
     609             : 
     610    48239942 : UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
     611             :                                  Zone* zone,
     612    40625432 :                                  HintConnectionOption connect_hints) {
     613             :   DCHECK(Start() < position);
     614             :   DCHECK(End() > position);
     615             :   DCHECK(result->IsEmpty());
     616             :   // Find the last interval that ends before the position. If the
     617             :   // position is contained in one of the intervals in the chain, we
     618             :   // split that interval and use the first part.
     619    30754928 :   UseInterval* current = FirstSearchIntervalForPosition(position);
     620             : 
     621             :   // If the split position coincides with the beginning of a use interval
     622             :   // we need to split use positons in a special way.
     623             :   bool split_at_start = false;
     624             : 
     625    48239942 :   if (current->start() == position) {
     626             :     // When splitting at start we need to locate the previous use interval.
     627         986 :     current = first_interval_;
     628             :   }
     629             : 
     630             :   UseInterval* after = nullptr;
     631    62572404 :   while (current != nullptr) {
     632    62572356 :     if (current->Contains(position)) {
     633    31817428 :       after = current->SplitAt(position, zone);
     634    31818364 :       break;
     635             :     }
     636             :     UseInterval* next = current->next();
     637    30754928 :     if (next->start() >= position) {
     638             :       split_at_start = (next->start() == position);
     639             :       after = next;
     640             :       current->set_next(nullptr);
     641             :       break;
     642             :     }
     643             :     current = next;
     644             :   }
     645             :   DCHECK(nullptr != after);
     646             : 
     647             :   // Partition original use intervals to the two live ranges.
     648             :   UseInterval* before = current;
     649             :   result->last_interval_ =
     650    48240878 :       (last_interval_ == before)
     651             :           ? after            // Only interval in the range after split.
     652    48240878 :           : last_interval_;  // Last interval of the original range.
     653    48240878 :   result->first_interval_ = after;
     654    48240878 :   last_interval_ = before;
     655             : 
     656             :   // Find the last use position before the split and the first use
     657             :   // position after it.
     658    42488400 :   UsePosition* use_after =
     659    56984681 :       splitting_pointer_ == nullptr || splitting_pointer_->pos() > position
     660             :           ? first_pos()
     661    88866310 :           : splitting_pointer_;
     662             :   UsePosition* use_before = nullptr;
     663    48240878 :   if (split_at_start) {
     664             :     // The split position coincides with the beginning of a use interval (the
     665             :     // end of a lifetime hole). Use at this position should be attributed to
     666             :     // the split child because split child owns use interval covering it.
     667     2058692 :     while (use_after != nullptr && use_after->pos() < position) {
     668             :       use_before = use_after;
     669             :       use_after = use_after->next();
     670             :     }
     671             :   } else {
     672    88670586 :     while (use_after != nullptr && use_after->pos() <= position) {
     673             :       use_before = use_after;
     674             :       use_after = use_after->next();
     675             :     }
     676             :   }
     677             : 
     678             :   // Partition original use positions to the two live ranges.
     679    48240878 :   if (use_before != nullptr) {
     680             :     use_before->set_next(nullptr);
     681             :   } else {
     682    28713380 :     first_pos_ = nullptr;
     683             :   }
     684    48240878 :   result->first_pos_ = use_after;
     685             : 
     686             :   // Discard cached iteration state. It might be pointing
     687             :   // to the use that no longer belongs to this live range.
     688    48240878 :   last_processed_use_ = nullptr;
     689    48240878 :   current_interval_ = nullptr;
     690             : 
     691    58028376 :   if (connect_hints == ConnectHints && use_before != nullptr &&
     692     9787498 :       use_after != nullptr) {
     693             :     use_after->SetHint(use_before);
     694             :   }
     695             : #ifdef DEBUG
     696             :   VerifyChildStructure();
     697             :   result->VerifyChildStructure();
     698             : #endif
     699    48240878 :   return use_before;
     700             : }
     701             : 
     702             : 
     703           0 : void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
     704    26054036 :   LiveRange* child = this;
     705    29338275 :   for (; child != nullptr; child = child->next()) {
     706    26054036 :     child->top_level_ = new_top_level;
     707             :   }
     708           0 : }
     709             : 
     710             : 
     711    54943864 : void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
     712    54943864 :                                      const InstructionOperand& spill_op) {
     713   188460469 :   for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
     714             :     DCHECK(Start() <= pos->pos() && pos->pos() <= End());
     715    66907799 :     if (!pos->HasOperand()) continue;
     716    66608806 :     switch (pos->type()) {
     717             :       case UsePositionType::kRequiresSlot:
     718             :         DCHECK(spill_op.IsStackSlot() || spill_op.IsFPStackSlot());
     719             :         InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
     720             :         break;
     721             :       case UsePositionType::kRequiresRegister:
     722             :         DCHECK(op.IsRegister() || op.IsFPRegister());
     723             :       // Fall through.
     724             :       case UsePositionType::kAny:
     725             :         InstructionOperand::ReplaceWith(pos->operand(), &op);
     726             :         break;
     727             :     }
     728             :   }
     729    54943864 : }
     730             : 
     731             : 
     732             : // This implements an ordering on live ranges so that they are ordered by their
     733             : // start positions.  This is needed for the correctness of the register
     734             : // allocation algorithm.  If two live ranges start at the same offset then there
     735             : // is a tie breaker based on where the value is first used.  This part of the
     736             : // ordering is merely a heuristic.
     737    22246999 : bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
     738             :   LifetimePosition start = Start();
     739             :   LifetimePosition other_start = other->Start();
     740   355998113 :   if (start == other_start) {
     741             :     UsePosition* pos = first_pos();
     742    11893759 :     if (pos == nullptr) return false;
     743             :     UsePosition* other_pos = other->first_pos();
     744    10353240 :     if (other_pos == nullptr) return true;
     745             :     return pos->pos() < other_pos->pos();
     746             :   }
     747           0 :   return start < other_start;
     748             : }
     749             : 
     750             : 
     751    22484172 : void LiveRange::SetUseHints(int register_index) {
     752   112562351 :   for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
     753    45188616 :     if (!pos->HasOperand()) continue;
     754    44889563 :     switch (pos->type()) {
     755             :       case UsePositionType::kRequiresSlot:
     756             :         break;
     757             :       case UsePositionType::kRequiresRegister:
     758             :       case UsePositionType::kAny:
     759             :         pos->set_assigned_register(register_index);
     760             :         break;
     761             :     }
     762             :   }
     763    22484172 : }
     764             : 
     765             : 
     766   379325221 : bool LiveRange::CanCover(LifetimePosition position) const {
     767   412398006 :   if (IsEmpty()) return false;
     768   791724225 :   return Start() <= position && position < End();
     769             : }
     770             : 
     771             : 
     772   411130742 : bool LiveRange::Covers(LifetimePosition position) const {
     773   411130742 :   if (!CanCover(position)) return false;
     774             :   UseInterval* start_search = FirstSearchIntervalForPosition(position);
     775   670010111 :   for (UseInterval* interval = start_search; interval != nullptr;
     776             :        interval = interval->next()) {
     777             :     DCHECK(interval->next() == nullptr ||
     778             :            interval->next()->start() >= interval->start());
     779             :     AdvanceLastProcessedMarker(interval, position);
     780   669982868 :     if (interval->Contains(position)) return true;
     781   569244081 :     if (interval->start() > position) return false;
     782             :   }
     783             :   return false;
     784             : }
     785             : 
     786             : 
     787  1106420935 : LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
     788    52077800 :   UseInterval* b = other->first_interval();
     789   224304576 :   if (b == nullptr) return LifetimePosition::Invalid();
     790             :   LifetimePosition advance_last_processed_up_to = b->start();
     791   224478374 :   UseInterval* a = FirstSearchIntervalForPosition(b->start());
     792   581533908 :   while (a != nullptr && b != nullptr) {
     793   338535022 :     if (a->start() > other->End()) break;
     794   319133399 :     if (b->start() > End()) break;
     795   315402918 :     LifetimePosition cur_intersection = a->Intersect(b);
     796   315525523 :     if (cur_intersection.IsValid()) {
     797    38969349 :       return cur_intersection;
     798             :     }
     799   276556174 :     if (a->start() < b->start()) {
     800             :       a = a->next();
     801   448926312 :       if (a == nullptr || a->start() > other->End()) break;
     802             :       AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
     803             :     } else {
     804             :       b = b->next();
     805             :     }
     806             :   }
     807             :   return LifetimePosition::Invalid();
     808             : }
     809             : 
     810           0 : void LiveRange::Print(const RegisterConfiguration* config,
     811             :                       bool with_children) const {
     812           0 :   OFStream os(stdout);
     813             :   PrintableLiveRange wrapper;
     814           0 :   wrapper.register_configuration_ = config;
     815           0 :   for (const LiveRange* i = this; i != nullptr; i = i->next()) {
     816           0 :     wrapper.range_ = i;
     817           0 :     os << wrapper << std::endl;
     818           0 :     if (!with_children) break;
     819           0 :   }
     820           0 : }
     821             : 
     822             : 
     823           0 : void LiveRange::Print(bool with_children) const {
     824           0 :   Print(RegisterConfiguration::Turbofan(), with_children);
     825           0 : }
     826             : 
     827             : 
     828             : struct TopLevelLiveRange::SpillMoveInsertionList : ZoneObject {
     829             :   SpillMoveInsertionList(int gap_index, InstructionOperand* operand,
     830             :                          SpillMoveInsertionList* next)
     831    12890545 :       : gap_index(gap_index), operand(operand), next(next) {}
     832             :   const int gap_index;
     833             :   InstructionOperand* const operand;
     834             :   SpillMoveInsertionList* const next;
     835             : };
     836             : 
     837             : 
     838          71 : TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineRepresentation rep)
     839             :     : LiveRange(0, rep, this),
     840             :       vreg_(vreg),
     841             :       last_child_id_(0),
     842             :       splintered_from_(nullptr),
     843             :       spill_operand_(nullptr),
     844             :       spill_move_insertion_locations_(nullptr),
     845             :       spilled_in_deferred_blocks_(false),
     846             :       spill_start_index_(kMaxInt),
     847             :       last_pos_(nullptr),
     848             :       splinter_(nullptr),
     849    53700007 :       has_preassigned_slot_(false) {
     850             :   bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
     851          71 : }
     852             : 
     853             : 
     854             : #if DEBUG
     855             : int TopLevelLiveRange::debug_virt_reg() const {
     856             :   return IsSplinter() ? splintered_from()->vreg() : vreg();
     857             : }
     858             : #endif
     859             : 
     860             : 
     861           0 : void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index,
     862             :                                             InstructionOperand* operand) {
     863             :   DCHECK(HasNoSpillType());
     864             :   spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList(
     865    25781090 :       gap_index, operand, spill_move_insertion_locations_);
     866           0 : }
     867             : 
     868    11765269 : void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
     869             :                                          const InstructionOperand& op,
     870    14305277 :                                          bool might_be_duplicated) {
     871             :   DCHECK_IMPLIES(op.IsConstant(), GetSpillMoveInsertionLocations() == nullptr);
     872             :   Zone* zone = sequence->zone();
     873             : 
     874    13775389 :   for (SpillMoveInsertionList* to_spill = GetSpillMoveInsertionLocations();
     875             :        to_spill != nullptr; to_spill = to_spill->next) {
     876     2010122 :     Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
     877             :     ParallelMove* move =
     878     2010122 :         instr->GetOrCreateParallelMove(Instruction::START, zone);
     879             :     // Skip insertion if it's possible that the move exists already as a
     880             :     // constraint move from a fixed output register to a slot.
     881     2540006 :     if (might_be_duplicated || has_preassigned_slot()) {
     882             :       bool found = false;
     883     3184917 :       for (MoveOperands* move_op : *move) {
     884     1134804 :         if (move_op->IsEliminated()) continue;
     885     3274410 :         if (move_op->source().Equals(*to_spill->operand) &&
     886             :             move_op->destination().Equals(op)) {
     887             :           found = true;
     888      949123 :           if (has_preassigned_slot()) move_op->Eliminate();
     889             :           break;
     890             :         }
     891             :       }
     892     1499618 :       if (found) continue;
     893             :     }
     894     1061000 :     if (!has_preassigned_slot()) {
     895     1040342 :       move->AddMove(*to_spill->operand, op);
     896             :     }
     897             :   }
     898    11765267 : }
     899             : 
     900             : 
     901           0 : void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) {
     902             :   DCHECK(HasNoSpillType());
     903             :   DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
     904             :   set_spill_type(SpillType::kSpillOperand);
     905     9755064 :   spill_operand_ = operand;
     906           0 : }
     907             : 
     908             : 
     909           0 : void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
     910             :   DCHECK(!HasSpillOperand());
     911             :   DCHECK(spill_range);
     912     5632641 :   spill_range_ = spill_range;
     913           0 : }
     914             : 
     915             : 
     916    15146666 : AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
     917    15146666 :   SpillRange* spill_range = GetSpillRange();
     918             :   int index = spill_range->assigned_slot();
     919      630712 :   return AllocatedOperand(LocationOperand::STACK_SLOT, representation(), index);
     920             : }
     921             : 
     922             : 
     923     9787481 : void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end,
     924    40684711 :                                  Zone* zone) {
     925             :   DCHECK(start != Start() || end != End());
     926             :   DCHECK(start < end);
     927             : 
     928    28663090 :   TopLevelLiveRange splinter_temp(-1, representation());
     929             :   UsePosition* last_in_splinter = nullptr;
     930             :   // Live ranges defined in deferred blocks stay in deferred blocks, so we
     931             :   // don't need to splinter them. That means that start should always be
     932             :   // after the beginning of the range.
     933             :   DCHECK(start > Start());
     934             : 
     935     9787481 :   if (end >= End()) {
     936             :     DCHECK(start > Start());
     937      699417 :     DetachAt(start, &splinter_temp, zone, ConnectHints);
     938      699398 :     next_ = nullptr;
     939             :   } else {
     940             :     DCHECK(start < End() && Start() < end);
     941             : 
     942             :     const int kInvalidId = std::numeric_limits<int>::max();
     943             : 
     944     9088064 :     UsePosition* last = DetachAt(start, &splinter_temp, zone, ConnectHints);
     945             : 
     946             :     LiveRange end_part(kInvalidId, this->representation(), nullptr);
     947             :     // The last chunk exits the deferred region, and we don't want to connect
     948             :     // hints here, because the non-deferred region shouldn't be affected
     949             :     // by allocation decisions on the deferred path.
     950             :     last_in_splinter =
     951     9088128 :         splinter_temp.DetachAt(end, &end_part, zone, DoNotConnectHints);
     952             : 
     953     9088118 :     next_ = end_part.next_;
     954     9088118 :     last_interval_->set_next(end_part.first_interval_);
     955             :     // The next splinter will happen either at or after the current interval.
     956             :     // We can optimize DetachAt by setting current_interval_ accordingly,
     957             :     // which will then be picked up by FirstSearchIntervalForPosition.
     958     9088118 :     current_interval_ = last_interval_;
     959     9088118 :     last_interval_ = end_part.last_interval_;
     960             : 
     961     9088118 :     if (first_pos_ == nullptr) {
     962      903867 :       first_pos_ = end_part.first_pos_;
     963             :     } else {
     964     8184251 :       splitting_pointer_ = last;
     965     8184251 :       if (last != nullptr) last->set_next(end_part.first_pos_);
     966             :     }
     967             :   }
     968             : 
     969     9787516 :   if (splinter()->IsEmpty()) {
     970     3284202 :     splinter()->first_interval_ = splinter_temp.first_interval_;
     971     3284202 :     splinter()->last_interval_ = splinter_temp.last_interval_;
     972             :   } else {
     973     6503314 :     splinter()->last_interval_->set_next(splinter_temp.first_interval_);
     974     6503314 :     splinter()->last_interval_ = splinter_temp.last_interval_;
     975             :   }
     976     9787516 :   if (splinter()->first_pos() == nullptr) {
     977     7137310 :     splinter()->first_pos_ = splinter_temp.first_pos_;
     978             :   } else {
     979     2650206 :     splinter()->last_pos_->set_next(splinter_temp.first_pos_);
     980             :   }
     981     9787516 :   if (last_in_splinter != nullptr) {
     982     1975056 :     splinter()->last_pos_ = last_in_splinter;
     983             :   } else {
     984    10540380 :     if (splinter()->first_pos() != nullptr &&
     985     2727920 :         splinter()->last_pos_ == nullptr) {
     986      659499 :       splinter()->last_pos_ = splinter()->first_pos();
     987     1534647 :       for (UsePosition* pos = splinter()->first_pos(); pos != nullptr;
     988             :            pos = pos->next()) {
     989      875148 :         splinter()->last_pos_ = pos;
     990             :       }
     991             :     }
     992             :   }
     993             : #if DEBUG
     994             :   Verify();
     995             :   splinter()->Verify();
     996             : #endif
     997     9787516 : }
     998             : 
     999             : 
    1000     3284150 : void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) {
    1001     3284150 :   splintered_from_ = splinter_parent;
    1002     3284150 :   if (!HasSpillOperand() && splinter_parent->spill_range_ != nullptr) {
    1003     1578174 :     SetSpillRange(splinter_parent->spill_range_);
    1004             :   }
    1005     3284150 : }
    1006             : 
    1007             : 
    1008     3736317 : void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) {
    1009             :   DCHECK(merged->TopLevel() == this);
    1010             : 
    1011     3976720 :   if (HasNoSpillType() && merged->HasSpillRange()) {
    1012             :     set_spill_type(merged->spill_type());
    1013             :     DCHECK(GetSpillRange()->live_ranges().size() > 0);
    1014      452078 :     merged->spill_range_ = nullptr;
    1015             :     merged->bits_ =
    1016      904156 :         SpillTypeField::update(merged->bits_, SpillType::kNoSpillType);
    1017             :   }
    1018     3284239 : }
    1019             : 
    1020             : 
    1021     5633201 : void TopLevelLiveRange::Merge(TopLevelLiveRange* other, Zone* zone) {
    1022             :   DCHECK(Start() < other->Start());
    1023             :   DCHECK(other->splintered_from() == this);
    1024             : 
    1025    47971184 :   LiveRange* first = this;
    1026    15564419 :   LiveRange* second = other;
    1027             :   DCHECK(first->Start() < second->Start());
    1028    43687011 :   while (first != nullptr && second != nullptr) {
    1029             :     DCHECK(first != second);
    1030             :     // Make sure the ranges are in order each time we iterate.
    1031    37118612 :     if (second->Start() < first->Start()) {
    1032             :       LiveRange* tmp = second;
    1033             :       second = first;
    1034             :       first = tmp;
    1035             :       continue;
    1036             :     }
    1037             : 
    1038    21527315 :     if (first->End() <= second->Start()) {
    1039     8641551 :       if (first->next() == nullptr ||
    1040             :           first->next()->Start() > second->Start()) {
    1041             :         // First is in order before second.
    1042             :         LiveRange* temp = first->next();
    1043     3311203 :         first->next_ = second;
    1044             :         first = temp;
    1045             :       } else {
    1046             :         // First is in order before its successor (or second), so advance first.
    1047             :         first = first->next();
    1048             :       }
    1049             :       continue;
    1050             :     }
    1051             : 
    1052             :     DCHECK(first->Start() < second->Start());
    1053             :     // If first and second intersect, split first.
    1054    15564419 :     if (first->Start() < second->End() && second->Start() < first->End()) {
    1055    15564377 :       LiveRange* temp = first->SplitAt(second->Start(), zone);
    1056    15564456 :       CHECK(temp != first);
    1057             :       temp->set_spilled(first->spilled());
    1058    15564456 :       if (!temp->spilled())
    1059             :         temp->set_assigned_register(first->assigned_register());
    1060             : 
    1061    15564456 :       first->next_ = second;
    1062             :       first = temp;
    1063    15564456 :       continue;
    1064             :     }
    1065             :     DCHECK(first->End() <= second->Start());
    1066             :   }
    1067             : 
    1068     9852711 :   TopLevel()->UpdateParentForAllChildren(TopLevel());
    1069     3284239 :   TopLevel()->UpdateSpillRangePostMerge(other);
    1070     8917507 :   TopLevel()->set_has_slot_use(TopLevel()->has_slot_use() ||
    1071             :                                other->has_slot_use());
    1072             : 
    1073             : #if DEBUG
    1074             :   Verify();
    1075             : #endif
    1076     3284233 : }
    1077             : 
    1078             : 
    1079           0 : void TopLevelLiveRange::VerifyChildrenInOrder() const {
    1080           0 :   LifetimePosition last_end = End();
    1081           0 :   for (const LiveRange* child = this->next(); child != nullptr;
    1082             :        child = child->next()) {
    1083             :     DCHECK(last_end <= child->Start());
    1084             :     last_end = child->End();
    1085             :   }
    1086           0 : }
    1087             : 
    1088             : 
    1089           0 : void TopLevelLiveRange::Verify() const {
    1090             :   VerifyChildrenInOrder();
    1091           0 :   for (const LiveRange* child = this; child != nullptr; child = child->next()) {
    1092             :     VerifyChildStructure();
    1093             :   }
    1094           0 : }
    1095             : 
    1096             : 
    1097    40564895 : void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
    1098    40564895 :   TRACE("Shorten live range %d to [%d\n", vreg(), start.value());
    1099             :   DCHECK(first_interval_ != nullptr);
    1100             :   DCHECK(first_interval_->start() <= start);
    1101             :   DCHECK(start < first_interval_->end());
    1102    40564895 :   first_interval_->set_start(start);
    1103    40564895 : }
    1104             : 
    1105             : 
    1106     1113075 : void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
    1107           0 :                                        LifetimePosition end, Zone* zone) {
    1108     1113075 :   TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
    1109             :         end.value());
    1110             :   LifetimePosition new_end = end;
    1111     4231594 :   while (first_interval_ != nullptr && first_interval_->start() <= end) {
    1112     2005444 :     if (first_interval_->end() > end) {
    1113             :       new_end = first_interval_->end();
    1114             :     }
    1115     2005444 :     first_interval_ = first_interval_->next();
    1116             :   }
    1117             : 
    1118     1113075 :   UseInterval* new_interval = new (zone) UseInterval(start, new_end);
    1119     1113075 :   new_interval->set_next(first_interval_);
    1120     1113075 :   first_interval_ = new_interval;
    1121     1113075 :   if (new_interval->next() == nullptr) {
    1122      646111 :     last_interval_ = new_interval;
    1123             :   }
    1124     1113075 : }
    1125             : 
    1126             : 
    1127   240059124 : void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
    1128           0 :                                        LifetimePosition end, Zone* zone) {
    1129   240059124 :   TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(),
    1130             :         end.value());
    1131   240073104 :   if (first_interval_ == nullptr) {
    1132    39573532 :     UseInterval* interval = new (zone) UseInterval(start, end);
    1133    39574180 :     first_interval_ = interval;
    1134    39574180 :     last_interval_ = interval;
    1135             :   } else {
    1136   200499572 :     if (end == first_interval_->start()) {
    1137             :       first_interval_->set_start(start);
    1138   127669650 :     } else if (end < first_interval_->start()) {
    1139    83806745 :       UseInterval* interval = new (zone) UseInterval(start, end);
    1140    83806670 :       interval->set_next(first_interval_);
    1141    83806670 :       first_interval_ = interval;
    1142             :     } else {
    1143             :       // Order of instruction's processing (see ProcessInstructions) guarantees
    1144             :       // that each new use interval either precedes, intersects with or touches
    1145             :       // the last added interval.
    1146             :       DCHECK(start <= first_interval_->end());
    1147             :       first_interval_->set_start(Min(start, first_interval_->start()));
    1148             :       first_interval_->set_end(Max(end, first_interval_->end()));
    1149             :     }
    1150             :   }
    1151   240073677 : }
    1152             : 
    1153             : 
    1154    67971840 : void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
    1155             :   LifetimePosition pos = use_pos->pos();
    1156    67971840 :   TRACE("Add to live range %d use position %d\n", vreg(), pos.value());
    1157             :   UsePosition* prev_hint = nullptr;
    1158      111047 :   UsePosition* prev = nullptr;
    1159    68083355 :   UsePosition* current = first_pos_;
    1160   136055657 :   while (current != nullptr && current->pos() < pos) {
    1161      111053 :     prev_hint = current->HasHint() ? current : prev_hint;
    1162             :     prev = current;
    1163             :     current = current->next();
    1164             :   }
    1165             : 
    1166    67972302 :   if (prev == nullptr) {
    1167    67861255 :     use_pos->set_next(first_pos_);
    1168    67861255 :     first_pos_ = use_pos;
    1169             :   } else {
    1170             :     use_pos->set_next(prev->next());
    1171             :     prev->set_next(use_pos);
    1172             :   }
    1173             : 
    1174   135944473 :   if (prev_hint == nullptr && use_pos->HasHint()) {
    1175    16915266 :     current_hint_position_ = use_pos;
    1176             :   }
    1177    67972162 : }
    1178             : 
    1179             : 
    1180    23459843 : static bool AreUseIntervalsIntersecting(UseInterval* interval1,
    1181     6690051 :                                         UseInterval* interval2) {
    1182    43117787 :   while (interval1 != nullptr && interval2 != nullptr) {
    1183    29942256 :     if (interval1->start() < interval2->start()) {
    1184    15699608 :       if (interval1->end() > interval2->start()) {
    1185             :         return true;
    1186             :       }
    1187             :       interval1 = interval1->next();
    1188             :     } else {
    1189    14242648 :       if (interval2->end() > interval1->start()) {
    1190             :         return true;
    1191             :       }
    1192             :       interval2 = interval2->next();
    1193             :     }
    1194             :   }
    1195             :   return false;
    1196             : }
    1197             : 
    1198             : 
    1199           0 : std::ostream& operator<<(std::ostream& os,
    1200             :                          const PrintableLiveRange& printable_range) {
    1201           0 :   const LiveRange* range = printable_range.range_;
    1202           0 :   os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id()
    1203           0 :      << " ";
    1204           0 :   if (range->TopLevel()->is_phi()) os << "phi ";
    1205           0 :   if (range->TopLevel()->is_non_loop_phi()) os << "nlphi ";
    1206             : 
    1207           0 :   os << "{" << std::endl;
    1208           0 :   UseInterval* interval = range->first_interval();
    1209           0 :   UsePosition* use_pos = range->first_pos();
    1210             :   PrintableInstructionOperand pio;
    1211           0 :   pio.register_configuration_ = printable_range.register_configuration_;
    1212           0 :   while (use_pos != nullptr) {
    1213           0 :     if (use_pos->HasOperand()) {
    1214           0 :       pio.op_ = *use_pos->operand();
    1215           0 :       os << pio << use_pos->pos() << " ";
    1216             :     }
    1217             :     use_pos = use_pos->next();
    1218             :   }
    1219             :   os << std::endl;
    1220             : 
    1221           0 :   while (interval != nullptr) {
    1222           0 :     os << '[' << interval->start() << ", " << interval->end() << ')'
    1223             :        << std::endl;
    1224             :     interval = interval->next();
    1225             :   }
    1226           0 :   os << "}";
    1227           0 :   return os;
    1228             : }
    1229             : 
    1230     2881192 : SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone)
    1231             :     : live_ranges_(zone),
    1232             :       assigned_slot_(kUnassignedSlot),
    1233     5762384 :       byte_width_(GetByteWidth(parent->representation())) {
    1234             :   // Spill ranges are created for top level, non-splintered ranges. This is so
    1235             :   // that, when merging decisions are made, we consider the full extent of the
    1236             :   // virtual register, and avoid clobbering it.
    1237             :   DCHECK(!parent->IsSplinter());
    1238             :   UseInterval* result = nullptr;
    1239             :   UseInterval* node = nullptr;
    1240             :   // Copy the intervals for all ranges.
    1241     6193845 :   for (LiveRange* range = parent; range != nullptr; range = range->next()) {
    1242     5719043 :     UseInterval* src = range->first_interval();
    1243    12344333 :     while (src != nullptr) {
    1244             :       UseInterval* new_node = new (zone) UseInterval(src->start(), src->end());
    1245     5719043 :       if (result == nullptr) {
    1246             :         result = new_node;
    1247             :       } else {
    1248             :         node->set_next(new_node);
    1249             :       }
    1250             :       node = new_node;
    1251             :       src = src->next();
    1252             :     }
    1253             :   }
    1254     2881200 :   use_interval_ = result;
    1255     2881200 :   live_ranges().push_back(parent);
    1256     2881197 :   end_position_ = node->end();
    1257     2881197 :   parent->SetSpillRange(this);
    1258     2881197 : }
    1259             : 
    1260    13902349 : bool SpillRange::IsIntersectingWith(SpillRange* other) const {
    1261    41707051 :   if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
    1262    27780628 :       this->End() <= other->use_interval_->start() ||
    1263             :       other->End() <= this->use_interval_->start()) {
    1264             :     return false;
    1265             :   }
    1266    12938760 :   return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
    1267             : }
    1268             : 
    1269             : 
    1270    43262563 : bool SpillRange::TryMerge(SpillRange* other) {
    1271    29360187 :   if (HasSlot() || other->HasSlot()) return false;
    1272    13902376 :   if (byte_width() != other->byte_width() || IsIntersectingWith(other))
    1273             :     return false;
    1274             : 
    1275             :   LifetimePosition max = LifetimePosition::MaxPosition();
    1276     1142281 :   if (End() < other->End() && other->End() != max) {
    1277       28042 :     end_position_ = other->End();
    1278             :   }
    1279     1142281 :   other->end_position_ = max;
    1280             : 
    1281     1142281 :   MergeDisjointIntervals(other->use_interval_);
    1282     1142280 :   other->use_interval_ = nullptr;
    1283             : 
    1284     3457830 :   for (TopLevelLiveRange* range : other->live_ranges()) {
    1285             :     DCHECK(range->GetSpillRange() == other);
    1286             :     range->SetSpillRange(this);
    1287             :   }
    1288             : 
    1289             :   live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
    1290     1142280 :                        other->live_ranges().end());
    1291             :   other->live_ranges().clear();
    1292             : 
    1293     1142280 :   return true;
    1294             : }
    1295             : 
    1296             : 
    1297     1142281 : void SpillRange::MergeDisjointIntervals(UseInterval* other) {
    1298             :   UseInterval* tail = nullptr;
    1299     1142281 :   UseInterval* current = use_interval_;
    1300     6484610 :   while (other != nullptr) {
    1301             :     // Make sure the 'current' list starts first
    1302     8400096 :     if (current == nullptr || current->start() > other->start()) {
    1303             :       std::swap(current, other);
    1304             :     }
    1305             :     // Check disjointness
    1306             :     DCHECK(other == nullptr || current->end() <= other->start());
    1307             :     // Append the 'current' node to the result accumulator and move forward
    1308     4200048 :     if (tail == nullptr) {
    1309     1142281 :       use_interval_ = current;
    1310             :     } else {
    1311             :       tail->set_next(current);
    1312             :     }
    1313             :     tail = current;
    1314             :     current = current->next();
    1315             :   }
    1316             :   // Other list is empty => we are done
    1317     1142281 : }
    1318             : 
    1319             : 
    1320           0 : void SpillRange::Print() const {
    1321           0 :   OFStream os(stdout);
    1322           0 :   os << "{" << std::endl;
    1323           0 :   for (TopLevelLiveRange* range : live_ranges()) {
    1324           0 :     os << range->vreg() << " ";
    1325             :   }
    1326             :   os << std::endl;
    1327             : 
    1328           0 :   for (UseInterval* i = interval(); i != nullptr; i = i->next()) {
    1329           0 :     os << '[' << i->start() << ", " << i->end() << ')' << std::endl;
    1330             :   }
    1331           0 :   os << "}" << std::endl;
    1332           0 : }
    1333             : 
    1334             : 
    1335           0 : RegisterAllocationData::PhiMapValue::PhiMapValue(PhiInstruction* phi,
    1336             :                                                  const InstructionBlock* block,
    1337             :                                                  Zone* zone)
    1338             :     : phi_(phi),
    1339             :       block_(block),
    1340             :       incoming_operands_(zone),
    1341     2670390 :       assigned_register_(kUnassignedRegister) {
    1342     2670390 :   incoming_operands_.reserve(phi->operands().size());
    1343           0 : }
    1344             : 
    1345             : 
    1346           0 : void RegisterAllocationData::PhiMapValue::AddOperand(
    1347             :     InstructionOperand* operand) {
    1348     3329814 :   incoming_operands_.push_back(operand);
    1349           0 : }
    1350             : 
    1351             : 
    1352           0 : void RegisterAllocationData::PhiMapValue::CommitAssignment(
    1353             :     const InstructionOperand& assigned) {
    1354     7994909 :   for (InstructionOperand* operand : incoming_operands_) {
    1355             :     InstructionOperand::ReplaceWith(operand, &assigned);
    1356             :   }
    1357           0 : }
    1358             : 
    1359      915098 : RegisterAllocationData::RegisterAllocationData(
    1360             :     const RegisterConfiguration* config, Zone* zone, Frame* frame,
    1361    12811196 :     InstructionSequence* code, const char* debug_name)
    1362             :     : allocation_zone_(zone),
    1363             :       frame_(frame),
    1364             :       code_(code),
    1365             :       debug_name_(debug_name),
    1366             :       config_(config),
    1367             :       phi_map_(allocation_zone()),
    1368             :       live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
    1369             :       live_out_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
    1370      915092 :       live_ranges_(code->VirtualRegisterCount() * 2, nullptr,
    1371             :                    allocation_zone()),
    1372      915075 :       fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
    1373             :                          allocation_zone()),
    1374             :       fixed_float_live_ranges_(allocation_zone()),
    1375      915082 :       fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
    1376             :                                 allocation_zone()),
    1377             :       fixed_simd128_live_ranges_(allocation_zone()),
    1378             :       spill_ranges_(code->VirtualRegisterCount(), nullptr, allocation_zone()),
    1379             :       delayed_references_(allocation_zone()),
    1380             :       assigned_registers_(nullptr),
    1381             :       assigned_double_registers_(nullptr),
    1382             :       virtual_register_count_(code->VirtualRegisterCount()),
    1383     8235773 :       preassigned_slot_ranges_(zone) {
    1384             :   if (!kSimpleFPAliasing) {
    1385             :     fixed_float_live_ranges_.resize(this->config()->num_float_registers(),
    1386             :                                     nullptr);
    1387             :     fixed_simd128_live_ranges_.resize(this->config()->num_simd128_registers(),
    1388             :                                       nullptr);
    1389             :   }
    1390             : 
    1391             :   assigned_registers_ = new (code_zone())
    1392     1830181 :       BitVector(this->config()->num_general_registers(), code_zone());
    1393             :   assigned_double_registers_ = new (code_zone())
    1394     1830179 :       BitVector(this->config()->num_double_registers(), code_zone());
    1395      915096 :   this->frame()->SetAllocatedRegisters(assigned_registers_);
    1396      915096 :   this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
    1397      915096 : }
    1398             : 
    1399             : 
    1400    25835596 : MoveOperands* RegisterAllocationData::AddGapMove(
    1401             :     int index, Instruction::GapPosition position,
    1402    25835596 :     const InstructionOperand& from, const InstructionOperand& to) {
    1403             :   Instruction* instr = code()->InstructionAt(index);
    1404    25835593 :   ParallelMove* moves = instr->GetOrCreateParallelMove(position, code_zone());
    1405    25835495 :   return moves->AddMove(from, to);
    1406             : }
    1407             : 
    1408             : 
    1409           0 : MachineRepresentation RegisterAllocationData::RepresentationFor(
    1410    42107233 :     int virtual_register) {
    1411             :   DCHECK_LT(virtual_register, code()->VirtualRegisterCount());
    1412    42107233 :   return code()->GetRepresentation(virtual_register);
    1413             : }
    1414             : 
    1415             : 
    1416   203051899 : TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) {
    1417   406103798 :   if (index >= static_cast<int>(live_ranges().size())) {
    1418           0 :     live_ranges().resize(index + 1, nullptr);
    1419             :   }
    1420   406103798 :   TopLevelLiveRange* result = live_ranges()[index];
    1421   203051899 :   if (result == nullptr) {
    1422    23349746 :     result = NewLiveRange(index, RepresentationFor(index));
    1423    46699526 :     live_ranges()[index] = result;
    1424             :   }
    1425   203051926 :   return result;
    1426             : }
    1427             : 
    1428             : 
    1429    43912905 : TopLevelLiveRange* RegisterAllocationData::NewLiveRange(
    1430    43912905 :     int index, MachineRepresentation rep) {
    1431    43912455 :   return new (allocation_zone()) TopLevelLiveRange(index, rep);
    1432             : }
    1433             : 
    1434             : 
    1435     3284162 : int RegisterAllocationData::GetNextLiveRangeId() {
    1436     3284162 :   int vreg = virtual_register_count_++;
    1437     6568324 :   if (vreg >= static_cast<int>(live_ranges().size())) {
    1438           0 :     live_ranges().resize(vreg + 1, nullptr);
    1439             :   }
    1440     3284162 :   return vreg;
    1441             : }
    1442             : 
    1443             : 
    1444     3284147 : TopLevelLiveRange* RegisterAllocationData::NextLiveRange(
    1445             :     MachineRepresentation rep) {
    1446     3284147 :   int vreg = GetNextLiveRangeId();
    1447     3284190 :   TopLevelLiveRange* ret = NewLiveRange(vreg, rep);
    1448     3284196 :   return ret;
    1449             : }
    1450             : 
    1451             : 
    1452     1335200 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap(
    1453     2670395 :     const InstructionBlock* block, PhiInstruction* phi) {
    1454             :   RegisterAllocationData::PhiMapValue* map_value = new (allocation_zone())
    1455             :       RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
    1456             :   auto res =
    1457     2670401 :       phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
    1458             :   DCHECK(res.second);
    1459             :   USE(res);
    1460     1335206 :   return map_value;
    1461             : }
    1462             : 
    1463             : 
    1464           0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
    1465             :     int virtual_register) {
    1466             :   auto it = phi_map_.find(virtual_register);
    1467             :   DCHECK(it != phi_map_.end());
    1468     3972593 :   return it->second;
    1469             : }
    1470             : 
    1471             : 
    1472           0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
    1473     3572426 :     TopLevelLiveRange* top_range) {
    1474           0 :   return GetPhiMapValueFor(top_range->vreg());
    1475             : }
    1476             : 
    1477             : 
    1478          42 : bool RegisterAllocationData::ExistsUseWithoutDefinition() {
    1479             :   bool found = false;
    1480          42 :   BitVector::Iterator iterator(live_in_sets()[0]);
    1481          84 :   while (!iterator.Done()) {
    1482             :     found = true;
    1483           0 :     int operand_index = iterator.Current();
    1484             :     PrintF("Register allocator error: live v%d reached first block.\n",
    1485           0 :            operand_index);
    1486           0 :     LiveRange* range = GetOrCreateLiveRangeFor(operand_index);
    1487           0 :     PrintF("  (first use is at %d)\n", range->first_pos()->pos().value());
    1488           0 :     if (debug_name() == nullptr) {
    1489           0 :       PrintF("\n");
    1490             :     } else {
    1491           0 :       PrintF("  (function: %s)\n", debug_name());
    1492             :     }
    1493           0 :     iterator.Advance();
    1494             :   }
    1495          42 :   return found;
    1496             : }
    1497             : 
    1498             : 
    1499             : // If a range is defined in a deferred block, we can expect all the range
    1500             : // to only cover positions in deferred blocks. Otherwise, a block on the
    1501             : // hot path would be dominated by a deferred block, meaning it is unreachable
    1502             : // without passing through the deferred block, which is contradictory.
    1503             : // In particular, when such a range contributes a result back on the hot
    1504             : // path, it will be as one of the inputs of a phi. In that case, the value
    1505             : // will be transferred via a move in the Gap::END's of the last instruction
    1506             : // of a deferred block.
    1507         250 : bool RegisterAllocationData::RangesDefinedInDeferredStayInDeferred() {
    1508         546 :   for (const TopLevelLiveRange* range : live_ranges()) {
    1509         901 :     if (range == nullptr || range->IsEmpty() ||
    1510             :         !code()
    1511             :              ->GetInstructionBlock(range->Start().ToInstructionIndex())
    1512         208 :              ->IsDeferred()) {
    1513             :       continue;
    1514             :     }
    1515           0 :     for (const UseInterval* i = range->first_interval(); i != nullptr;
    1516             :          i = i->next()) {
    1517             :       int first = i->FirstGapIndex();
    1518             :       int last = i->LastGapIndex();
    1519           0 :       for (int instr = first; instr <= last;) {
    1520           0 :         const InstructionBlock* block = code()->GetInstructionBlock(instr);
    1521           0 :         if (!block->IsDeferred()) return false;
    1522             :         instr = block->last_instruction_index() + 1;
    1523             :       }
    1524             :     }
    1525             :   }
    1526             :   return true;
    1527             : }
    1528             : 
    1529     2803585 : SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
    1530     9810926 :     TopLevelLiveRange* range) {
    1531             :   DCHECK(!range->HasSpillOperand());
    1532             : 
    1533             :   SpillRange* spill_range = range->GetAllocatedSpillRange();
    1534     2803585 :   if (spill_range == nullptr) {
    1535             :     DCHECK(!range->IsSplinter());
    1536     2014944 :     spill_range = new (allocation_zone()) SpillRange(range, allocation_zone());
    1537             :   }
    1538             :   range->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange);
    1539             : 
    1540             :   int spill_range_index =
    1541     2803634 :       range->IsSplinter() ? range->splintered_from()->vreg() : range->vreg();
    1542             : 
    1543     5607268 :   spill_ranges()[spill_range_index] = spill_range;
    1544             : 
    1545     2803634 :   return spill_range;
    1546             : }
    1547             : 
    1548             : 
    1549      866270 : SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
    1550      866270 :     TopLevelLiveRange* range) {
    1551             :   DCHECK(!range->HasSpillOperand());
    1552             :   DCHECK(!range->IsSplinter());
    1553             :   SpillRange* spill_range =
    1554      866283 :       new (allocation_zone()) SpillRange(range, allocation_zone());
    1555      866265 :   return spill_range;
    1556             : }
    1557             : 
    1558    39762406 : void RegisterAllocationData::MarkAllocated(MachineRepresentation rep,
    1559             :                                            int index) {
    1560    39762406 :   switch (rep) {
    1561             :     case MachineRepresentation::kFloat32:
    1562             :     case MachineRepresentation::kSimd128:
    1563             :       if (kSimpleFPAliasing) {
    1564    10711042 :         assigned_double_registers_->Add(index);
    1565             :       } else {
    1566             :         int alias_base_index = -1;
    1567             :         int aliases = config()->GetAliases(
    1568             :             rep, index, MachineRepresentation::kFloat64, &alias_base_index);
    1569             :         DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    1570             :         while (aliases--) {
    1571             :           int aliased_reg = alias_base_index + aliases;
    1572             :           assigned_double_registers_->Add(aliased_reg);
    1573             :         }
    1574             :       }
    1575             :       break;
    1576             :     case MachineRepresentation::kFloat64:
    1577    10650117 :       assigned_double_registers_->Add(index);
    1578             :       break;
    1579             :     default:
    1580             :       DCHECK(!IsFloatingPoint(rep));
    1581    29051364 :       assigned_registers_->Add(index);
    1582             :       break;
    1583             :   }
    1584    39762406 : }
    1585             : 
    1586    24302798 : bool RegisterAllocationData::IsBlockBoundary(LifetimePosition pos) const {
    1587    24302859 :   return pos.IsFullStart() &&
    1588    10790073 :          code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
    1589    13512786 :              pos.ToInstructionIndex();
    1590             : }
    1591             : 
    1592             : 
    1593     1830131 : ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data)
    1594     1830131 :     : data_(data) {}
    1595             : 
    1596             : 
    1597    18770087 : InstructionOperand* ConstraintBuilder::AllocateFixed(
    1598             :     UnallocatedOperand* operand, int pos, bool is_tagged) {
    1599    18770087 :   TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
    1600             :   DCHECK(operand->HasFixedPolicy());
    1601             :   InstructionOperand allocated;
    1602             :   MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
    1603             :   int virtual_register = operand->virtual_register();
    1604    18770097 :   if (virtual_register != InstructionOperand::kInvalidVirtualRegister) {
    1605             :     rep = data()->RepresentationFor(virtual_register);
    1606             :   }
    1607    18770117 :   if (operand->HasFixedSlotPolicy()) {
    1608             :     allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, rep,
    1609             :                                  operand->fixed_slot_index());
    1610    17982892 :   } else if (operand->HasFixedRegisterPolicy()) {
    1611             :     DCHECK(!IsFloatingPoint(rep));
    1612             :     allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
    1613             :                                  operand->fixed_register_index());
    1614      104261 :   } else if (operand->HasFixedFPRegisterPolicy()) {
    1615             :     DCHECK(IsFloatingPoint(rep));
    1616             :     DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
    1617             :     allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
    1618             :                                  operand->fixed_register_index());
    1619             :   } else {
    1620           0 :     UNREACHABLE();
    1621             :   }
    1622             :   InstructionOperand::ReplaceWith(operand, &allocated);
    1623    18770117 :   if (is_tagged) {
    1624    12490981 :     TRACE("Fixed reg is tagged at %d\n", pos);
    1625    12490986 :     Instruction* instr = code()->InstructionAt(pos);
    1626    12490986 :     if (instr->HasReferenceMap()) {
    1627     9944118 :       instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand));
    1628             :     }
    1629             :   }
    1630    18770107 :   return operand;
    1631             : }
    1632             : 
    1633             : 
    1634      915089 : void ConstraintBuilder::MeetRegisterConstraints() {
    1635    13288102 :   for (InstructionBlock* block : code()->instruction_blocks()) {
    1636    11457912 :     MeetRegisterConstraints(block);
    1637             :   }
    1638      915101 : }
    1639             : 
    1640             : 
    1641    11457939 : void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
    1642             :   int start = block->first_instruction_index();
    1643             :   int end = block->last_instruction_index();
    1644             :   DCHECK_NE(-1, start);
    1645    50580352 :   for (int i = start; i <= end; ++i) {
    1646    39122426 :     MeetConstraintsBefore(i);
    1647    39122582 :     if (i != end) MeetConstraintsAfter(i);
    1648             :   }
    1649             :   // Meet register constraints for the instruction in the end.
    1650    11457926 :   MeetRegisterConstraintsForLastInstructionInBlock(block);
    1651    11457917 : }
    1652             : 
    1653             : 
    1654    11457976 : void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
    1655    11457976 :     const InstructionBlock* block) {
    1656             :   int end = block->last_instruction_index();
    1657    11460396 :   Instruction* last_instruction = code()->InstructionAt(end);
    1658    22920792 :   for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
    1659             :     InstructionOperand* output_operand = last_instruction->OutputAt(i);
    1660             :     DCHECK(!output_operand->IsConstant());
    1661        2483 :     UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
    1662             :     int output_vreg = output->virtual_register();
    1663        2483 :     TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
    1664             :     bool assigned = false;
    1665        2483 :     if (output->HasFixedPolicy()) {
    1666         131 :       AllocateFixed(output, -1, false);
    1667             :       // This value is produced on the stack, we never need to spill it.
    1668         131 :       if (output->IsStackSlot()) {
    1669             :         DCHECK(LocationOperand::cast(output)->index() <
    1670             :                data()->frame()->GetSpillSlotCount());
    1671             :         range->SetSpillOperand(LocationOperand::cast(output));
    1672             :         range->SetSpillStartIndex(end);
    1673             :         assigned = true;
    1674             :       }
    1675             : 
    1676         264 :       for (const RpoNumber& succ : block->successors()) {
    1677           2 :         const InstructionBlock* successor = code()->InstructionBlockAt(succ);
    1678             :         DCHECK(successor->PredecessorCount() == 1);
    1679             :         int gap_index = successor->first_instruction_index();
    1680             :         // Create an unconstrained operand for the same virtual register
    1681             :         // and insert a gap move from the fixed output to the operand.
    1682             :         UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
    1683           4 :         data()->AddGapMove(gap_index, Instruction::START, *output, output_copy);
    1684             :       }
    1685             :     }
    1686             : 
    1687        2483 :     if (!assigned) {
    1688        9666 :       for (const RpoNumber& succ : block->successors()) {
    1689        4704 :         const InstructionBlock* successor = code()->InstructionBlockAt(succ);
    1690             :         DCHECK(successor->PredecessorCount() == 1);
    1691             :         int gap_index = successor->first_instruction_index();
    1692             :         range->RecordSpillLocation(allocation_zone(), gap_index, output);
    1693             :         range->SetSpillStartIndex(gap_index);
    1694             :       }
    1695             :     }
    1696             :   }
    1697    11457913 : }
    1698             : 
    1699             : 
    1700    27665006 : void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
    1701    77351476 :   Instruction* first = code()->InstructionAt(instr_index);
    1702             :   // Handle fixed temporaries.
    1703    56761622 :   for (size_t i = 0; i < first->TempCount(); i++) {
    1704      715844 :     UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i));
    1705      715844 :     if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false);
    1706             :   }
    1707             :   // Handle constant/fixed output operands.
    1708    70276363 :   for (size_t i = 0; i < first->OutputCount(); i++) {
    1709    21305735 :     InstructionOperand* output = first->OutputAt(i);
    1710    21305735 :     if (output->IsConstant()) {
    1711             :       int output_vreg = ConstantOperand::cast(output)->virtual_register();
    1712     9045723 :       TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
    1713     9045657 :       range->SetSpillStartIndex(instr_index + 1);
    1714             :       range->SetSpillOperand(output);
    1715             :       continue;
    1716             :     }
    1717             :     UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
    1718             :     TopLevelLiveRange* range =
    1719    12260012 :         data()->GetOrCreateLiveRangeFor(first_output->virtual_register());
    1720             :     bool assigned = false;
    1721    12260056 :     if (first_output->HasFixedPolicy()) {
    1722             :       int output_vreg = first_output->virtual_register();
    1723             :       UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
    1724             :       bool is_tagged = code()->IsReference(output_vreg);
    1725     5426547 :       if (first_output->HasSecondaryStorage()) {
    1726             :         range->MarkHasPreassignedSlot();
    1727             :         data()->preassigned_slot_ranges().push_back(
    1728     1313125 :             std::make_pair(range, first_output->GetSecondaryStorage()));
    1729             :       }
    1730     5426545 :       AllocateFixed(first_output, instr_index, is_tagged);
    1731             : 
    1732             :       // This value is produced on the stack, we never need to spill it.
    1733     5426574 :       if (first_output->IsStackSlot()) {
    1734             :         DCHECK(LocationOperand::cast(first_output)->index() <
    1735             :                data()->frame()->GetTotalFrameSlotCount());
    1736             :         range->SetSpillOperand(LocationOperand::cast(first_output));
    1737      709405 :         range->SetSpillStartIndex(instr_index + 1);
    1738             :         assigned = true;
    1739             :       }
    1740             :       data()->AddGapMove(instr_index + 1, Instruction::START, *first_output,
    1741    10853148 :                          output_copy);
    1742             :     }
    1743             :     // Make sure we add a gap move for spilling (if we have not done
    1744             :     // so already).
    1745    12260111 :     if (!assigned) {
    1746             :       range->RecordSpillLocation(allocation_zone(), instr_index + 1,
    1747    11550712 :                                  first_output);
    1748             :       range->SetSpillStartIndex(instr_index + 1);
    1749             :     }
    1750             :   }
    1751    27664930 : }
    1752             : 
    1753             : 
    1754    39122480 : void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
    1755   182112612 :   Instruction* second = code()->InstructionAt(instr_index);
    1756             :   // Handle fixed input operands of second instruction.
    1757   242733104 :   for (size_t i = 0; i < second->InputCount(); i++) {
    1758    82244004 :     InstructionOperand* input = second->InputAt(i);
    1759    82244004 :     if (input->IsImmediate() || input->IsExplicit()) {
    1760             :       continue;  // Ignore immediates and explicitly reserved registers.
    1761             :     }
    1762             :     UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
    1763    46023307 :     if (cur_input->HasFixedPolicy()) {
    1764             :       int input_vreg = cur_input->virtual_register();
    1765             :       UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
    1766             :       bool is_tagged = code()->IsReference(input_vreg);
    1767    13330836 :       AllocateFixed(cur_input, instr_index, is_tagged);
    1768    26661646 :       data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
    1769             :     }
    1770             :   }
    1771             :   // Handle "output same as input" for second instruction.
    1772    81739084 :   for (size_t i = 0; i < second->OutputCount(); i++) {
    1773    21308239 :     InstructionOperand* output = second->OutputAt(i);
    1774    40966057 :     if (!output->IsUnallocated()) continue;
    1775             :     UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
    1776    12262515 :     if (!second_output->HasSameAsInputPolicy()) continue;
    1777             :     DCHECK(i == 0);  // Only valid for first output.
    1778             :     UnallocatedOperand* cur_input =
    1779     1650421 :         UnallocatedOperand::cast(second->InputAt(0));
    1780             :     int output_vreg = second_output->virtual_register();
    1781             :     int input_vreg = cur_input->virtual_register();
    1782             :     UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
    1783             :     cur_input->set_virtual_register(second_output->virtual_register());
    1784             :     MoveOperands* gap_move = data()->AddGapMove(instr_index, Instruction::END,
    1785     3300842 :                                                 input_copy, *cur_input);
    1786     1965740 :     if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) {
    1787      315244 :       if (second->HasReferenceMap()) {
    1788             :         RegisterAllocationData::DelayedReference delayed_reference = {
    1789           0 :             second->reference_map(), &gap_move->source()};
    1790           0 :         data()->delayed_references().push_back(delayed_reference);
    1791             :       }
    1792     1335252 :     } else if (!code()->IsReference(input_vreg) &&
    1793             :                code()->IsReference(output_vreg)) {
    1794             :       // The input is assumed to immediately have a tagged representation,
    1795             :       // before the pointer map can be used. I.e. the pointer map at the
    1796             :       // instruction will include the output operand (whose value at the
    1797             :       // beginning of the instruction is equal to the input operand). If
    1798             :       // this is not desired, then the pointer map at this instruction needs
    1799             :       // to be adjusted manually.
    1800             :     }
    1801             :   }
    1802    39122577 : }
    1803             : 
    1804             : 
    1805      915111 : void ConstraintBuilder::ResolvePhis() {
    1806             :   // Process the blocks in reverse order.
    1807    24746117 :   for (InstructionBlock* block : base::Reversed(code()->instruction_blocks())) {
    1808    11457960 :     ResolvePhis(block);
    1809             :   }
    1810      915086 : }
    1811             : 
    1812             : 
    1813    12793099 : void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) {
    1814    24250993 :   for (PhiInstruction* phi : block->phis()) {
    1815             :     int phi_vreg = phi->virtual_register();
    1816             :     RegisterAllocationData::PhiMapValue* map_value =
    1817     1335200 :         data()->InitializePhiMap(block, phi);
    1818     1335198 :     InstructionOperand& output = phi->output();
    1819             :     // Map the destination operands, so the commitment phase can find them.
    1820     9330028 :     for (size_t i = 0; i < phi->operands().size(); ++i) {
    1821     3329824 :       InstructionBlock* cur_block =
    1822     6659642 :           code()->InstructionBlockAt(block->predecessors()[i]);
    1823     3329824 :       UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]);
    1824             :       MoveOperands* move = data()->AddGapMove(
    1825     3329824 :           cur_block->last_instruction_index(), Instruction::END, input, output);
    1826     3329814 :       map_value->AddOperand(&move->destination());
    1827             :       DCHECK(!code()
    1828             :                   ->InstructionAt(cur_block->last_instruction_index())
    1829             :                   ->HasReferenceMap());
    1830             :     }
    1831     1335193 :     TopLevelLiveRange* live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
    1832             :     int gap_index = block->first_instruction_index();
    1833             :     live_range->RecordSpillLocation(allocation_zone(), gap_index, &output);
    1834             :     live_range->SetSpillStartIndex(gap_index);
    1835             :     // We use the phi-ness of some nodes in some later heuristics.
    1836             :     live_range->set_is_phi(true);
    1837     1335199 :     live_range->set_is_non_loop_phi(!block->IsLoopHeader());
    1838             :   }
    1839    11457896 : }
    1840             : 
    1841             : 
    1842      915048 : LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data,
    1843             :                                    Zone* local_zone)
    1844     1830096 :     : data_(data), phi_hints_(local_zone) {}
    1845             : 
    1846             : 
    1847    11457967 : BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block,
    1848    11457981 :                                             RegisterAllocationData* data) {
    1849             :   size_t block_index = block->rpo_number().ToSize();
    1850    40136783 :   BitVector* live_out = data->live_out_sets()[block_index];
    1851    11457967 :   if (live_out == nullptr) {
    1852             :     // Compute live out for the given block, except not including backward
    1853             :     // successor edges.
    1854             :     Zone* zone = data->allocation_zone();
    1855    25568162 :     const InstructionSequence* code = data->code();
    1856             : 
    1857    11457975 :     live_out = new (zone) BitVector(code->VirtualRegisterCount(), zone);
    1858             : 
    1859             :     // Process all successor blocks.
    1860    37145524 :     for (const RpoNumber& succ : block->successors()) {
    1861             :       // Add values live on entry to the successor.
    1862    14229732 :       if (succ <= block->rpo_number()) continue;
    1863    28220362 :       BitVector* live_in = data->live_in_sets()[succ.ToSize()];
    1864    14110181 :       if (live_in != nullptr) live_out->Union(*live_in);
    1865             : 
    1866             :       // All phi input operands corresponding to this successor edge are live
    1867             :       // out from this block.
    1868    14110181 :       const InstructionBlock* successor = code->InstructionBlockAt(succ);
    1869    14110145 :       size_t index = successor->PredecessorIndexOf(block->rpo_number());
    1870             :       DCHECK(index < successor->PredecessorCount());
    1871    31330953 :       for (PhiInstruction* phi : successor->phis()) {
    1872     6221318 :         live_out->Add(phi->operands()[index]);
    1873             :       }
    1874             :     }
    1875    22915758 :     data->live_out_sets()[block_index] = live_out;
    1876             :   }
    1877    11457865 :   return live_out;
    1878             : }
    1879             : 
    1880             : 
    1881    22915782 : void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
    1882    78246359 :                                            BitVector* live_out) {
    1883             :   // Add an interval that includes the entire block to the live range for
    1884             :   // each live_out value.
    1885             :   LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
    1886    11457891 :       block->first_instruction_index());
    1887             :   LifetimePosition end = LifetimePosition::InstructionFromInstructionIndex(
    1888             :                              block->last_instruction_index())
    1889    11457891 :                              .NextStart();
    1890             :   BitVector::Iterator iterator(live_out);
    1891   179408388 :   while (!iterator.Done()) {
    1892    78246359 :     int operand_index = iterator.Current();
    1893    78246359 :     TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
    1894    78246361 :     range->AddUseInterval(start, end, allocation_zone());
    1895    78246344 :     iterator.Advance();
    1896             :   }
    1897    11457835 : }
    1898             : 
    1899     9304934 : int LiveRangeBuilder::FixedFPLiveRangeID(int index, MachineRepresentation rep) {
    1900     9304934 :   int result = -index - 1;
    1901     9304934 :   switch (rep) {
    1902             :     case MachineRepresentation::kSimd128:
    1903           0 :       result -= config()->num_float_registers();
    1904             :     // Fall through.
    1905             :     case MachineRepresentation::kFloat32:
    1906        8347 :       result -= config()->num_double_registers();
    1907             :     // Fall through.
    1908             :     case MachineRepresentation::kFloat64:
    1909     9304934 :       result -= config()->num_general_registers();
    1910             :       break;
    1911             :     default:
    1912           0 :       UNREACHABLE();
    1913             :       break;
    1914             :   }
    1915     9304934 :   return result;
    1916             : }
    1917             : 
    1918   166977210 : TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
    1919             :   DCHECK(index < config()->num_general_registers());
    1920   226539951 :   TopLevelLiveRange* result = data()->fixed_live_ranges()[index];
    1921    75513317 :   if (result == nullptr) {
    1922             :     MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
    1923     7975789 :     result = data()->NewLiveRange(FixedLiveRangeID(index), rep);
    1924             :     DCHECK(result->IsFixed());
    1925             :     result->set_assigned_register(index);
    1926     7975015 :     data()->MarkAllocated(rep, index);
    1927    15951122 :     data()->fixed_live_ranges()[index] = result;
    1928             :   }
    1929    75513089 :   return result;
    1930             : }
    1931             : 
    1932    51223817 : TopLevelLiveRange* LiveRangeBuilder::FixedFPLiveRangeFor(
    1933    60527389 :     int index, MachineRepresentation rep) {
    1934             :   int num_regs = config()->num_double_registers();
    1935             :   ZoneVector<TopLevelLiveRange*>* live_ranges =
    1936             :       &data()->fixed_double_live_ranges();
    1937             :   if (!kSimpleFPAliasing) {
    1938             :     switch (rep) {
    1939             :       case MachineRepresentation::kFloat32:
    1940             :         num_regs = config()->num_float_registers();
    1941             :         live_ranges = &data()->fixed_float_live_ranges();
    1942             :         break;
    1943             :       case MachineRepresentation::kSimd128:
    1944             :         num_regs = config()->num_simd128_registers();
    1945             :         live_ranges = &data()->fixed_simd128_live_ranges();
    1946             :         break;
    1947             :       default:
    1948             :         break;
    1949             :     }
    1950             :   }
    1951             : 
    1952             :   DCHECK(index < num_regs);
    1953             :   USE(num_regs);
    1954   111751333 :   TopLevelLiveRange* result = (*live_ranges)[index];
    1955    51223817 :   if (result == nullptr) {
    1956     9304924 :     result = data()->NewLiveRange(FixedFPLiveRangeID(index, rep), rep);
    1957             :     DCHECK(result->IsFixed());
    1958             :     result->set_assigned_register(index);
    1959     9303572 :     data()->MarkAllocated(rep, index);
    1960     9303699 :     (*live_ranges)[index] = result;
    1961             :   }
    1962    51222592 :   return result;
    1963             : }
    1964             : 
    1965   187793152 : TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
    1966   112139051 :   if (operand->IsUnallocated()) {
    1967             :     return data()->GetOrCreateLiveRangeFor(
    1968    66608289 :         UnallocatedOperand::cast(operand)->virtual_register());
    1969    45530762 :   } else if (operand->IsConstant()) {
    1970             :     return data()->GetOrCreateLiveRangeFor(
    1971     9045812 :         ConstantOperand::cast(operand)->virtual_register());
    1972    36484950 :   } else if (operand->IsRegister()) {
    1973             :     return FixedLiveRangeFor(
    1974    34702032 :         LocationOperand::cast(operand)->GetRegister().code());
    1975     1782918 :   } else if (operand->IsFPRegister()) {
    1976             :     LocationOperand* op = LocationOperand::cast(operand);
    1977      208471 :     return FixedFPLiveRangeFor(op->register_code(), op->representation());
    1978             :   } else {
    1979             :     return nullptr;
    1980             :   }
    1981             : }
    1982             : 
    1983             : 
    1984    67971425 : UsePosition* LiveRangeBuilder::NewUsePosition(LifetimePosition pos,
    1985             :                                               InstructionOperand* operand,
    1986             :                                               void* hint,
    1987             :                                               UsePositionHintType hint_type) {
    1988   135942838 :   return new (allocation_zone()) UsePosition(pos, operand, hint, hint_type);
    1989             : }
    1990             : 
    1991             : 
    1992    42715047 : UsePosition* LiveRangeBuilder::Define(LifetimePosition position,
    1993             :                                       InstructionOperand* operand, void* hint,
    1994             :                                       UsePositionHintType hint_type) {
    1995    42715047 :   TopLevelLiveRange* range = LiveRangeFor(operand);
    1996    42715270 :   if (range == nullptr) return nullptr;
    1997             : 
    1998    41928117 :   if (range->IsEmpty() || range->Start() > position) {
    1999             :     // Can happen if there is a definition without use.
    2000     1363191 :     range->AddUseInterval(position, position.NextStart(), allocation_zone());
    2001     1363190 :     range->AddUsePosition(NewUsePosition(position.NextStart()));
    2002             :   } else {
    2003    40564926 :     range->ShortenTo(position);
    2004             :   }
    2005    41928220 :   if (!operand->IsUnallocated()) return nullptr;
    2006             :   UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
    2007             :   UsePosition* use_pos =
    2008    14899573 :       NewUsePosition(position, unalloc_operand, hint, hint_type);
    2009    14899571 :   range->AddUsePosition(use_pos);
    2010    14899621 :   return use_pos;
    2011             : }
    2012             : 
    2013             : 
    2014    69424126 : UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start,
    2015             :                                    LifetimePosition position,
    2016             :                                    InstructionOperand* operand, void* hint,
    2017             :                                    UsePositionHintType hint_type) {
    2018    69424126 :   TopLevelLiveRange* range = LiveRangeFor(operand);
    2019    69424329 :   if (range == nullptr) return nullptr;
    2020             :   UsePosition* use_pos = nullptr;
    2021    68637171 :   if (operand->IsUnallocated()) {
    2022             :     UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
    2023    51709651 :     use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type);
    2024    51709681 :     range->AddUsePosition(use_pos);
    2025             :   }
    2026    68637461 :   range->AddUseInterval(block_start, position, allocation_zone());
    2027    68637003 :   return use_pos;
    2028             : }
    2029             : 
    2030             : 
    2031    44224521 : void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
    2032   102925739 :                                            BitVector* live) {
    2033             :   int block_start = block->first_instruction_index();
    2034             :   LifetimePosition block_start_position =
    2035             :       LifetimePosition::GapFromInstructionIndex(block_start);
    2036             :   bool fixed_float_live_ranges = false;
    2037             :   bool fixed_simd128_live_ranges = false;
    2038             :   if (!kSimpleFPAliasing) {
    2039             :     int mask = data()->code()->representation_mask();
    2040             :     fixed_float_live_ranges = (mask & kFloatRepBit) != 0;
    2041             :     fixed_simd128_live_ranges = (mask & kSimd128RepBit) != 0;
    2042             :   }
    2043             : 
    2044    50580920 :   for (int index = block->last_instruction_index(); index >= block_start;
    2045             :        index--) {
    2046             :     LifetimePosition curr_position =
    2047             :         LifetimePosition::InstructionFromInstructionIndex(index);
    2048   221638946 :     Instruction* instr = code()->InstructionAt(index);
    2049             :     DCHECK(instr != nullptr);
    2050             :     DCHECK(curr_position.IsInstructionPosition());
    2051             :     // Process output, inputs, and temps of this instruction.
    2052   120862076 :     for (size_t i = 0; i < instr->OutputCount(); i++) {
    2053    21308355 :       InstructionOperand* output = instr->OutputAt(i);
    2054    21308355 :       if (output->IsUnallocated()) {
    2055             :         // Unsupported.
    2056             :         DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
    2057             :         int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
    2058             :         live->Remove(out_vreg);
    2059    14472472 :       } else if (output->IsConstant()) {
    2060             :         int out_vreg = ConstantOperand::cast(output)->virtual_register();
    2061             :         live->Remove(out_vreg);
    2062             :       }
    2063    21969056 :       if (block->IsHandler() && index == block_start && output->IsAllocated() &&
    2064    21517932 :           output->IsRegister() &&
    2065             :           AllocatedOperand::cast(output)->GetRegister().is(
    2066             :               v8::internal::kReturnRegister0)) {
    2067             :         // The register defined here is blocked from gap start - it is the
    2068             :         // exception value.
    2069             :         // TODO(mtrofin): should we explore an explicit opcode for
    2070             :         // the first instruction in the handler?
    2071             :         Define(LifetimePosition::GapFromInstructionIndex(index), output);
    2072             :       } else {
    2073             :         Define(curr_position, output);
    2074             :       }
    2075             :     }
    2076             : 
    2077    39122683 :     if (instr->ClobbersRegisters()) {
    2078    85025542 :       for (int i = 0; i < config()->num_allocatable_general_registers(); ++i) {
    2079             :         // Create a UseInterval at this instruction for all fixed registers,
    2080             :         // (including the instruction outputs). Adding another UseInterval here
    2081             :         // is OK because AddUseInterval will just merge it with the existing
    2082             :         // one at the end of the range.
    2083    40812302 :         int code = config()->GetAllocatableGeneralCode(i);
    2084    40812302 :         TopLevelLiveRange* range = FixedLiveRangeFor(code);
    2085             :         range->AddUseInterval(curr_position, curr_position.End(),
    2086    40812030 :                               allocation_zone());
    2087             :       }
    2088             :     }
    2089             : 
    2090    39122625 :     if (instr->ClobbersDoubleRegisters()) {
    2091   105431738 :       for (int i = 0; i < config()->num_allocatable_double_registers(); ++i) {
    2092             :         // Add a UseInterval for all DoubleRegisters. See comment above for
    2093             :         // general registers.
    2094    51015441 :         int code = config()->GetAllocatableDoubleCode(i);
    2095             :         TopLevelLiveRange* range =
    2096    51015441 :             FixedFPLiveRangeFor(code, MachineRepresentation::kFloat64);
    2097             :         range->AddUseInterval(curr_position, curr_position.End(),
    2098    51014138 :                               allocation_zone());
    2099             :       }
    2100             :       // Clobber fixed float registers on archs with non-simple aliasing.
    2101             :       if (!kSimpleFPAliasing) {
    2102             :         if (fixed_float_live_ranges) {
    2103             :           for (int i = 0; i < config()->num_allocatable_float_registers();
    2104             :                ++i) {
    2105             :             // Add a UseInterval for all FloatRegisters. See comment above for
    2106             :             // general registers.
    2107             :             int code = config()->GetAllocatableFloatCode(i);
    2108             :             TopLevelLiveRange* range =
    2109             :                 FixedFPLiveRangeFor(code, MachineRepresentation::kFloat32);
    2110             :             range->AddUseInterval(curr_position, curr_position.End(),
    2111             :                                   allocation_zone());
    2112             :           }
    2113             :         }
    2114             :         if (fixed_simd128_live_ranges) {
    2115             :           for (int i = 0; i < config()->num_allocatable_simd128_registers();
    2116             :                ++i) {
    2117             :             int code = config()->GetAllocatableSimd128Code(i);
    2118             :             TopLevelLiveRange* range =
    2119             :                 FixedFPLiveRangeFor(code, MachineRepresentation::kSimd128);
    2120             :             range->AddUseInterval(curr_position, curr_position.End(),
    2121             :                                   allocation_zone());
    2122             :           }
    2123             :         }
    2124             :       }
    2125             :     }
    2126             : 
    2127   203610218 :     for (size_t i = 0; i < instr->InputCount(); i++) {
    2128    82243779 :       InstructionOperand* input = instr->InputAt(i);
    2129    82243779 :       if (input->IsImmediate() || input->IsExplicit()) {
    2130             :         continue;  // Ignore immediates and explicitly reserved registers.
    2131             :       }
    2132             :       LifetimePosition use_pos;
    2133    78715858 :       if (input->IsUnallocated() &&
    2134             :           UnallocatedOperand::cast(input)->IsUsedAtStart()) {
    2135             :         use_pos = curr_position;
    2136             :       } else {
    2137             :         use_pos = curr_position.End();
    2138             :       }
    2139             : 
    2140    46023246 :       if (input->IsUnallocated()) {
    2141             :         UnallocatedOperand* unalloc = UnallocatedOperand::cast(input);
    2142             :         int vreg = unalloc->virtual_register();
    2143             :         live->Add(vreg);
    2144    32692630 :         if (unalloc->HasSlotPolicy()) {
    2145    12133916 :           data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(true);
    2146             :         }
    2147             :       }
    2148             :       Use(block_start_position, use_pos, input);
    2149             :     }
    2150             : 
    2151    40560515 :     for (size_t i = 0; i < instr->TempCount(); i++) {
    2152      718967 :       InstructionOperand* temp = instr->TempAt(i);
    2153             :       // Unsupported.
    2154             :       DCHECK_IMPLIES(temp->IsUnallocated(),
    2155             :                      !UnallocatedOperand::cast(temp)->HasSlotPolicy());
    2156      718967 :       if (instr->ClobbersTemps()) {
    2157           0 :         if (temp->IsRegister()) continue;
    2158           0 :         if (temp->IsUnallocated()) {
    2159             :           UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
    2160           0 :           if (temp_unalloc->HasFixedPolicy()) {
    2161             :             continue;
    2162             :           }
    2163             :         }
    2164             :       }
    2165             :       Use(block_start_position, curr_position.End(), temp);
    2166             :       Define(curr_position, temp);
    2167             :     }
    2168             : 
    2169             :     // Process the moves of the instruction's gaps, making their sources live.
    2170             :     const Instruction::GapPosition kPositions[] = {Instruction::END,
    2171    39122581 :                                                    Instruction::START};
    2172             :     curr_position = curr_position.PrevStart();
    2173             :     DCHECK(curr_position.IsGapPosition());
    2174   117367818 :     for (const Instruction::GapPosition& position : kPositions) {
    2175    78244981 :       ParallelMove* move = instr->GetParallelMove(position);
    2176    78244981 :       if (move == nullptr) continue;
    2177    14211134 :       if (position == Instruction::END) {
    2178             :         curr_position = curr_position.End();
    2179             :       } else {
    2180             :         curr_position = curr_position.Start();
    2181             :       }
    2182    52160066 :       for (MoveOperands* cur : *move) {
    2183    23737542 :         InstructionOperand& from = cur->source();
    2184    23737542 :         InstructionOperand& to = cur->destination();
    2185             :         void* hint = &to;
    2186    23737542 :         UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to);
    2187             :         UsePosition* to_use = nullptr;
    2188             :         int phi_vreg = -1;
    2189    23737675 :         if (to.IsUnallocated()) {
    2190             :           int to_vreg = UnallocatedOperand::cast(to).virtual_register();
    2191    10406963 :           TopLevelLiveRange* to_range =
    2192    10406901 :               data()->GetOrCreateLiveRangeFor(to_vreg);
    2193    10406963 :           if (to_range->is_phi()) {
    2194             :             phi_vreg = to_vreg;
    2195     3329859 :             if (to_range->is_non_loop_phi()) {
    2196     2929689 :               hint = to_range->current_hint_position();
    2197             :               hint_type = hint == nullptr ? UsePositionHintType::kNone
    2198     2929689 :                                           : UsePositionHintType::kUsePos;
    2199             :             } else {
    2200             :               hint_type = UsePositionHintType::kPhi;
    2201             :               hint = data()->GetPhiMapValueFor(to_vreg);
    2202             :             }
    2203             :           } else {
    2204     7077104 :             if (live->Contains(to_vreg)) {
    2205             :               to_use = Define(curr_position, &to, &from,
    2206     6022198 :                               UsePosition::HintTypeForOperand(from));
    2207             :               live->Remove(to_vreg);
    2208             :             } else {
    2209             :               cur->Eliminate();
    2210             :               continue;
    2211             :             }
    2212             :           }
    2213             :         } else {
    2214             :           Define(curr_position, &to);
    2215             :         }
    2216             :         UsePosition* from_use =
    2217    22682887 :             Use(block_start_position, curr_position, &from, hint, hint_type);
    2218             :         // Mark range live.
    2219    22682737 :         if (from.IsUnallocated()) {
    2220             :           live->Add(UnallocatedOperand::cast(from).virtual_register());
    2221             :         }
    2222             :         // Resolve use position hints just created.
    2223    22682737 :         if (to_use != nullptr && from_use != nullptr) {
    2224             :           to_use->ResolveHint(from_use);
    2225             :           from_use->ResolveHint(to_use);
    2226             :         }
    2227             :         DCHECK_IMPLIES(to_use != nullptr, to_use->IsResolved());
    2228             :         DCHECK_IMPLIES(from_use != nullptr, from_use->IsResolved());
    2229             :         // Potentially resolve phi hint.
    2230    22682737 :         if (phi_vreg != -1) ResolvePhiHint(&from, from_use);
    2231             :       }
    2232             :     }
    2233             :   }
    2234    11458056 : }
    2235             : 
    2236    12793169 : void LiveRangeBuilder::ProcessPhis(const InstructionBlock* block,
    2237     1335208 :                                    BitVector* live) {
    2238    24251136 :   for (PhiInstruction* phi : block->phis()) {
    2239             :     // The live range interval already ends at the first instruction of the
    2240             :     // block.
    2241             :     int phi_vreg = phi->virtual_register();
    2242             :     live->Remove(phi_vreg);
    2243             :     // Select a hint from a predecessor block that preceeds this block in the
    2244             :     // rpo order. In order of priority:
    2245             :     // - Avoid hints from deferred blocks.
    2246             :     // - Prefer hints from allocated (or explicit) operands.
    2247             :     // - Prefer hints from empty blocks (containing just parallel moves and a
    2248             :     //   jump). In these cases, if we can elide the moves, the jump threader
    2249             :     //   is likely to be able to elide the jump.
    2250             :     // The enforcement of hinting in rpo order is required because hint
    2251             :     // resolution that happens later in the compiler pipeline visits
    2252             :     // instructions in reverse rpo order, relying on the fact that phis are
    2253             :     // encountered before their hints.
    2254             :     InstructionOperand* hint = nullptr;
    2255             :     int hint_preference = 0;
    2256             : 
    2257             :     // The cost of hinting increases with the number of predecessors. At the
    2258             :     // same time, the typical benefit decreases, since this hinting only
    2259             :     // optimises the execution path through one predecessor. A limit of 2 is
    2260             :     // sufficient to hit the common if/else pattern.
    2261             :     int predecessor_limit = 2;
    2262             : 
    2263     4224472 :     for (RpoNumber predecessor : block->predecessors()) {
    2264     7465158 :       const InstructionBlock* predecessor_block =
    2265     2707231 :           code()->InstructionBlockAt(predecessor);
    2266             :       DCHECK_EQ(predecessor_block->rpo_number(), predecessor);
    2267             : 
    2268             :       // Only take hints from earlier rpo numbers.
    2269     2707230 :       if (predecessor >= block->rpo_number()) continue;
    2270             : 
    2271             :       // Look up the predecessor instruction.
    2272             :       const Instruction* predecessor_instr =
    2273     2488388 :           GetLastInstruction(code(), predecessor_block);
    2274             :       InstructionOperand* predecessor_hint = nullptr;
    2275             :       // Phis are assigned in the END position of the last instruction in each
    2276             :       // predecessor block.
    2277     5805943 :       for (MoveOperands* move :
    2278     5805928 :            *predecessor_instr->GetParallelMove(Instruction::END)) {
    2279             :         InstructionOperand& to = move->destination();
    2280     6635085 :         if (to.IsUnallocated() &&
    2281             :             UnallocatedOperand::cast(to).virtual_register() == phi_vreg) {
    2282     2488371 :           predecessor_hint = &move->source();
    2283     2488371 :           break;
    2284             :         }
    2285             :       }
    2286             :       DCHECK_NOT_NULL(predecessor_hint);
    2287             : 
    2288             :       // For each predecessor, generate a score according to the priorities
    2289             :       // described above, and pick the best one. Flags in higher-order bits have
    2290             :       // a higher priority than those in lower-order bits.
    2291             :       int predecessor_hint_preference = 0;
    2292             :       const int kNotDeferredBlockPreference = (1 << 2);
    2293             :       const int kMoveIsAllocatedPreference = (1 << 1);
    2294             :       const int kBlockIsEmptyPreference = (1 << 0);
    2295             : 
    2296             :       // - Avoid hints from deferred blocks.
    2297     2488386 :       if (!predecessor_block->IsDeferred()) {
    2298             :         predecessor_hint_preference |= kNotDeferredBlockPreference;
    2299             :       }
    2300             : 
    2301             :       // - Prefer hints from allocated (or explicit) operands.
    2302             :       //
    2303             :       // Already-allocated or explicit operands are typically assigned using
    2304             :       // the parallel moves on the last instruction. For example:
    2305             :       //
    2306             :       //      gap (v101 = [x0|R|w32]) (v100 = v101)
    2307             :       //      ArchJmp
    2308             :       //    ...
    2309             :       //    phi: v100 = v101 v102
    2310             :       //
    2311             :       // We have already found the END move, so look for a matching START move
    2312             :       // from an allocated (or explicit) operand.
    2313             :       //
    2314             :       // Note that we cannot simply look up data()->live_ranges()[vreg] here
    2315             :       // because the live ranges are still being built when this function is
    2316             :       // called.
    2317             :       // TODO(v8): Find a way to separate hinting from live range analysis in
    2318             :       // BuildLiveRanges so that we can use the O(1) live-range look-up.
    2319             :       auto moves = predecessor_instr->GetParallelMove(Instruction::START);
    2320     2488386 :       if (moves != nullptr) {
    2321     1014118 :         for (MoveOperands* move : *moves) {
    2322             :           InstructionOperand& to = move->destination();
    2323      458291 :           if (predecessor_hint->Equals(to)) {
    2324      360755 :             if (move->source().IsAllocated() || move->source().IsExplicit()) {
    2325      360756 :               predecessor_hint_preference |= kMoveIsAllocatedPreference;
    2326             :             }
    2327             :             break;
    2328             :           }
    2329             :         }
    2330             :       }
    2331             : 
    2332             :       // - Prefer hints from empty blocks.
    2333     2488386 :       if (predecessor_block->last_instruction_index() ==
    2334             :           predecessor_block->first_instruction_index()) {
    2335      842388 :         predecessor_hint_preference |= kBlockIsEmptyPreference;
    2336             :       }
    2337             : 
    2338     4976772 :       if ((hint == nullptr) ||
    2339     2488386 :           (predecessor_hint_preference > hint_preference)) {
    2340             :         // Take the hint from this predecessor.
    2341             :         hint = predecessor_hint;
    2342             :         hint_preference = predecessor_hint_preference;
    2343             :       }
    2344             : 
    2345     2488386 :       if (--predecessor_limit <= 0) break;
    2346             :     }
    2347             :     DCHECK_NOT_NULL(hint);
    2348             : 
    2349             :     LifetimePosition block_start = LifetimePosition::GapFromInstructionIndex(
    2350     1335205 :         block->first_instruction_index());
    2351     1335203 :     UsePosition* use_pos = Define(block_start, &phi->output(), hint,
    2352     2670408 :                                   UsePosition::HintTypeForOperand(*hint));
    2353             :     MapPhiHint(hint, use_pos);
    2354             :   }
    2355    11457964 : }
    2356             : 
    2357             : 
    2358      206766 : void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block,
    2359     1113075 :                                          BitVector* live) {
    2360             :   DCHECK(block->IsLoopHeader());
    2361             :   // Add a live range stretching from the first loop instruction to the last
    2362             :   // for each value live on entry to the header.
    2363             :   BitVector::Iterator iterator(live);
    2364             :   LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
    2365      103383 :       block->first_instruction_index());
    2366             :   LifetimePosition end = LifetimePosition::GapFromInstructionIndex(
    2367      103383 :                              code()->LastLoopInstructionIndex(block))
    2368      103383 :                              .NextFullStart();
    2369     2536299 :   while (!iterator.Done()) {
    2370     1113075 :     int operand_index = iterator.Current();
    2371     1113075 :     TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
    2372     1113075 :     range->EnsureInterval(start, end, allocation_zone());
    2373     1113075 :     iterator.Advance();
    2374             :   }
    2375             :   // Insert all values into the live in sets of all blocks in the loop.
    2376     2019179 :   for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt();
    2377             :        ++i) {
    2378     5747388 :     live_in_sets()[i]->Union(*live);
    2379             :   }
    2380      103383 : }
    2381             : 
    2382             : 
    2383     4385293 : void LiveRangeBuilder::BuildLiveRanges() {
    2384             :   // Process the blocks in reverse order.
    2385    13288129 :   for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
    2386             :        --block_id) {
    2387             :     InstructionBlock* block =
    2388    11457981 :         code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
    2389    11457970 :     BitVector* live = ComputeLiveOut(block, data());
    2390             :     // Initially consider all live_out values live for the entire block. We
    2391             :     // will shorten these intervals if necessary.
    2392    11457888 :     AddInitialIntervals(block, live);
    2393             :     // Process the instructions in reverse order, generating and killing
    2394             :     // live values.
    2395    11457849 :     ProcessInstructions(block, live);
    2396             :     // All phi output operands are killed by this block.
    2397    11458027 :     ProcessPhis(block, live);
    2398             :     // Now live is live_in for this block except not including values live
    2399             :     // out on backward successor edges.
    2400    11457978 :     if (block->IsLoopHeader()) ProcessLoopHeader(block, live);
    2401    34374093 :     live_in_sets()[block_id] = live;
    2402             :   }
    2403             :   // Postprocess the ranges.
    2404    82108043 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    2405    47172639 :     if (range == nullptr) continue;
    2406             :     // Give slots to all ranges with a non fixed slot use.
    2407    25562186 :     if (range->has_slot_use() && range->HasNoSpillType()) {
    2408     1601795 :       data()->AssignSpillRangeToLiveRange(range);
    2409             :     }
    2410             :     // TODO(bmeurer): This is a horrible hack to make sure that for constant
    2411             :     // live ranges, every use requires the constant to be in a register.
    2412             :     // Without this hack, all uses with "any" policy would get the constant
    2413             :     // operand assigned.
    2414    33105206 :     if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
    2415    22199219 :       for (UsePosition* pos = range->first_pos(); pos != nullptr;
    2416             :            pos = pos->next()) {
    2417    13153399 :         if (pos->type() == UsePositionType::kRequiresSlot) continue;
    2418             :         UsePositionType new_type = UsePositionType::kAny;
    2419             :         // Can't mark phis as needing a register.
    2420    13153404 :         if (!pos->pos().IsGapPosition()) {
    2421             :           new_type = UsePositionType::kRequiresRegister;
    2422             :         }
    2423             :         pos->set_type(new_type, true);
    2424             :       }
    2425             :     }
    2426             :   }
    2427     2267903 :   for (auto preassigned : data()->preassigned_slot_ranges()) {
    2428      399456 :     TopLevelLiveRange* range = preassigned.first;
    2429             :     int slot_id = preassigned.second;
    2430             :     SpillRange* spill = range->HasSpillRange()
    2431             :                             ? range->GetSpillRange()
    2432      475962 :                             : data()->AssignSpillRangeToLiveRange(range);
    2433             :     spill->set_assigned_slot(slot_id);
    2434             :   }
    2435             : #ifdef DEBUG
    2436             :   Verify();
    2437             : #endif
    2438      915097 : }
    2439             : 
    2440             : 
    2441           0 : void LiveRangeBuilder::MapPhiHint(InstructionOperand* operand,
    2442             :                                   UsePosition* use_pos) {
    2443             :   DCHECK(!use_pos->IsResolved());
    2444     2670414 :   auto res = phi_hints_.insert(std::make_pair(operand, use_pos));
    2445             :   DCHECK(res.second);
    2446             :   USE(res);
    2447           0 : }
    2448             : 
    2449             : 
    2450     3329853 : void LiveRangeBuilder::ResolvePhiHint(InstructionOperand* operand,
    2451             :                                       UsePosition* use_pos) {
    2452             :   auto it = phi_hints_.find(operand);
    2453     6659710 :   if (it == phi_hints_.end()) return;
    2454             :   DCHECK(!it->second->IsResolved());
    2455     1335210 :   it->second->ResolveHint(use_pos);
    2456             : }
    2457             : 
    2458             : 
    2459           0 : void LiveRangeBuilder::Verify() const {
    2460           0 :   for (auto& hint : phi_hints_) {
    2461           0 :     CHECK(hint.second->IsResolved());
    2462             :   }
    2463           0 :   for (const TopLevelLiveRange* current : data()->live_ranges()) {
    2464           0 :     if (current != nullptr && !current->IsEmpty()) {
    2465             :       // New LiveRanges should not be split.
    2466           0 :       CHECK_NULL(current->next());
    2467             :       // General integrity check.
    2468           0 :       current->Verify();
    2469           0 :       const UseInterval* first = current->first_interval();
    2470           0 :       if (first->next() == nullptr) continue;
    2471             : 
    2472             :       // Consecutive intervals should not end and start in the same block,
    2473             :       // otherwise the intervals should have been joined, because the
    2474             :       // variable is live throughout that block.
    2475           0 :       CHECK(NextIntervalStartsInDifferentBlocks(first));
    2476             : 
    2477           0 :       for (const UseInterval* i = first->next(); i != nullptr; i = i->next()) {
    2478             :         // Except for the first interval, the other intevals must start at
    2479             :         // a block boundary, otherwise data wouldn't flow to them.
    2480           0 :         CHECK(IntervalStartsAtBlockBoundary(i));
    2481             :         // The last instruction of the predecessors of the block the interval
    2482             :         // starts must be covered by the range.
    2483           0 :         CHECK(IntervalPredecessorsCoveredByRange(i, current));
    2484           0 :         if (i->next() != nullptr) {
    2485             :           // Check the consecutive intervals property, except for the last
    2486             :           // interval, where it doesn't apply.
    2487           0 :           CHECK(NextIntervalStartsInDifferentBlocks(i));
    2488             :         }
    2489             :       }
    2490             :     }
    2491             :   }
    2492           0 : }
    2493             : 
    2494           0 : bool LiveRangeBuilder::IntervalStartsAtBlockBoundary(
    2495           0 :     const UseInterval* interval) const {
    2496             :   LifetimePosition start = interval->start();
    2497           0 :   if (!start.IsFullStart()) return false;
    2498             :   int instruction_index = start.ToInstructionIndex();
    2499           0 :   const InstructionBlock* block =
    2500           0 :       data()->code()->GetInstructionBlock(instruction_index);
    2501           0 :   return block->first_instruction_index() == instruction_index;
    2502             : }
    2503             : 
    2504           0 : bool LiveRangeBuilder::IntervalPredecessorsCoveredByRange(
    2505           0 :     const UseInterval* interval, const TopLevelLiveRange* range) const {
    2506             :   LifetimePosition start = interval->start();
    2507             :   int instruction_index = start.ToInstructionIndex();
    2508             :   const InstructionBlock* block =
    2509           0 :       data()->code()->GetInstructionBlock(instruction_index);
    2510           0 :   for (RpoNumber pred_index : block->predecessors()) {
    2511           0 :     const InstructionBlock* predecessor =
    2512           0 :         data()->code()->InstructionBlockAt(pred_index);
    2513             :     LifetimePosition last_pos = LifetimePosition::GapFromInstructionIndex(
    2514             :         predecessor->last_instruction_index());
    2515             :     last_pos = last_pos.NextStart().End();
    2516           0 :     if (!range->Covers(last_pos)) return false;
    2517             :   }
    2518           0 :   return true;
    2519             : }
    2520             : 
    2521           0 : bool LiveRangeBuilder::NextIntervalStartsInDifferentBlocks(
    2522           0 :     const UseInterval* interval) const {
    2523             :   DCHECK_NOT_NULL(interval->next());
    2524             :   LifetimePosition end = interval->end();
    2525             :   LifetimePosition next_start = interval->next()->start();
    2526             :   // Since end is not covered, but the previous position is, move back a
    2527             :   // position
    2528           0 :   end = end.IsStart() ? end.PrevStart().End() : end.Start();
    2529             :   int last_covered_index = end.ToInstructionIndex();
    2530             :   const InstructionBlock* block =
    2531           0 :       data()->code()->GetInstructionBlock(last_covered_index);
    2532             :   const InstructionBlock* next_block =
    2533           0 :       data()->code()->GetInstructionBlock(next_start.ToInstructionIndex());
    2534           0 :   return block->rpo_number() < next_block->rpo_number();
    2535             : }
    2536             : 
    2537     3660280 : RegisterAllocator::RegisterAllocator(RegisterAllocationData* data,
    2538             :                                      RegisterKind kind)
    2539             :     : data_(data),
    2540             :       mode_(kind),
    2541             :       num_registers_(GetRegisterCount(data->config(), kind)),
    2542             :       num_allocatable_registers_(
    2543             :           GetAllocatableRegisterCount(data->config(), kind)),
    2544             :       allocatable_register_codes_(
    2545             :           GetAllocatableRegisterCodes(data->config(), kind)),
    2546     7320560 :       check_fp_aliasing_(false) {
    2547             :   if (!kSimpleFPAliasing && kind == FP_REGISTERS) {
    2548             :     check_fp_aliasing_ = (data->code()->representation_mask() &
    2549             :                           (kFloatRepBit | kSimd128RepBit)) != 0;
    2550             :   }
    2551     1830140 : }
    2552             : 
    2553           0 : LifetimePosition RegisterAllocator::GetSplitPositionForInstruction(
    2554     9160557 :     const LiveRange* range, int instruction_index) {
    2555             :   LifetimePosition ret = LifetimePosition::Invalid();
    2556             : 
    2557             :   ret = LifetimePosition::GapFromInstructionIndex(instruction_index);
    2558    18329383 :   if (range->Start() >= ret || ret >= range->End()) {
    2559             :     return LifetimePosition::Invalid();
    2560             :   }
    2561           0 :   return ret;
    2562             : }
    2563             : 
    2564    96174785 : void RegisterAllocator::SplitAndSpillRangesDefinedByMemoryOperand() {
    2565     1830234 :   size_t initial_range_count = data()->live_ranges().size();
    2566    96174727 :   for (size_t i = 0; i < initial_range_count; ++i) {
    2567   202139008 :     TopLevelLiveRange* range = data()->live_ranges()[i];
    2568    94344551 :     if (!CanProcessRange(range)) continue;
    2569    51158160 :     if (range->HasNoSpillType() ||
    2570     2351968 :         (range->HasSpillRange() && !range->has_slot_use())) {
    2571             :       continue;
    2572             :     }
    2573           0 :     LifetimePosition start = range->Start();
    2574    13449900 :     TRACE("Live range %d:%d is defined by a spill operand.\n",
    2575             :           range->TopLevel()->vreg(), range->relative_id());
    2576             :     LifetimePosition next_pos = start;
    2577    13449906 :     if (next_pos.IsGapPosition()) {
    2578             :       next_pos = next_pos.NextStart();
    2579             :     }
    2580             : 
    2581             :     // With splinters, we can be more strict and skip over positions
    2582             :     // not strictly needing registers.
    2583             :     UsePosition* pos =
    2584             :         range->IsSplinter()
    2585     2259988 :             ? range->NextRegisterPosition(next_pos)
    2586    15709894 :             : range->NextUsePositionRegisterIsBeneficial(next_pos);
    2587             :     // If the range already has a spill operand and it doesn't need a
    2588             :     // register immediately, split it and spill the first part of the range.
    2589    13449917 :     if (pos == nullptr) {
    2590     3806814 :       Spill(range);
    2591     9643103 :     } else if (pos->pos() > range->Start().NextStart()) {
    2592             :       // Do not spill live range eagerly if use position that can benefit from
    2593             :       // the register is too close to the start of live range.
    2594             :       LifetimePosition split_pos = GetSplitPositionForInstruction(
    2595             :           range, pos->pos().ToInstructionIndex());
    2596             :       // There is no place to split, so we can't split and spill.
    2597     9168826 :       if (!split_pos.IsValid()) continue;
    2598             : 
    2599             :       split_pos =
    2600     9160552 :           FindOptimalSplitPos(range->Start().NextFullStart(), split_pos);
    2601             : 
    2602     9160480 :       SplitRangeAt(range, split_pos);
    2603     9160488 :       Spill(range);
    2604             :     }
    2605             :   }
    2606     1830176 : }
    2607             : 
    2608             : 
    2609    15168453 : LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
    2610             :                                            LifetimePosition pos) {
    2611             :   DCHECK(!range->TopLevel()->IsFixed());
    2612    15168453 :   TRACE("Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(),
    2613             :         range->relative_id(), pos.value());
    2614             : 
    2615    15168521 :   if (pos <= range->Start()) return range;
    2616             : 
    2617             :   // We can't properly connect liveranges if splitting occurred at the end
    2618             :   // a block.
    2619             :   DCHECK(pos.IsStart() || pos.IsGapPosition() ||
    2620             :          (GetInstructionBlock(code(), pos)->last_instruction_index() !=
    2621             :           pos.ToInstructionIndex()));
    2622             : 
    2623    13801118 :   LiveRange* result = range->SplitAt(pos, allocation_zone());
    2624    13801100 :   return result;
    2625             : }
    2626             : 
    2627             : 
    2628     1404680 : LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
    2629             :                                            LifetimePosition start,
    2630             :                                            LifetimePosition end) {
    2631             :   DCHECK(!range->TopLevel()->IsFixed());
    2632     1404680 :   TRACE("Splitting live range %d:%d in position between [%d, %d]\n",
    2633             :         range->TopLevel()->vreg(), range->relative_id(), start.value(),
    2634             :         end.value());
    2635             : 
    2636     1404680 :   LifetimePosition split_pos = FindOptimalSplitPos(start, end);
    2637             :   DCHECK(split_pos >= start);
    2638     1404680 :   return SplitRangeAt(range, split_pos);
    2639             : }
    2640             : 
    2641             : 
    2642    10565147 : LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
    2643             :                                                         LifetimePosition end) {
    2644             :   int start_instr = start.ToInstructionIndex();
    2645             :   int end_instr = end.ToInstructionIndex();
    2646             :   DCHECK(start_instr <= end_instr);
    2647             : 
    2648             :   // We have no choice
    2649    10565147 :   if (start_instr == end_instr) return end;
    2650             : 
    2651             :   const InstructionBlock* start_block = GetInstructionBlock(code(), start);
    2652             :   const InstructionBlock* end_block = GetInstructionBlock(code(), end);
    2653             : 
    2654     7091974 :   if (end_block == start_block) {
    2655             :     // The interval is split in the same basic block. Split at the latest
    2656             :     // possible position.
    2657     5271865 :     return end;
    2658             :   }
    2659             : 
    2660      124524 :   const InstructionBlock* block = end_block;
    2661             :   // Find header of outermost loop.
    2662             :   do {
    2663             :     const InstructionBlock* loop = GetContainingLoop(code(), block);
    2664     1921176 :     if (loop == nullptr ||
    2665             :         loop->rpo_number().ToInt() <= start_block->rpo_number().ToInt()) {
    2666             :       // No more loops or loop starts before the lifetime start.
    2667             :       break;
    2668             :     }
    2669             :     block = loop;
    2670             :   } while (true);
    2671             : 
    2672             :   // We did not find any suitable outer loop. Split at the latest possible
    2673             :   // position unless end_block is a loop header itself.
    2674     3541565 :   if (block == end_block && !end_block->IsLoopHeader()) return end;
    2675             : 
    2676             :   return LifetimePosition::GapFromInstructionIndex(
    2677             :       block->first_instruction_index());
    2678             : }
    2679             : 
    2680             : 
    2681      198991 : LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
    2682             :     LiveRange* range, LifetimePosition pos) {
    2683             :   const InstructionBlock* block = GetInstructionBlock(code(), pos.Start());
    2684       48077 :   const InstructionBlock* loop_header =
    2685      198991 :       block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
    2686             : 
    2687      198991 :   if (loop_header == nullptr) return pos;
    2688             : 
    2689             :   const UsePosition* prev_use =
    2690             :       range->PreviousUsePositionRegisterIsBeneficial(pos);
    2691             : 
    2692       85198 :   while (loop_header != nullptr) {
    2693             :     // We are going to spill live range inside the loop.
    2694             :     // If possible try to move spilling position backwards to loop header.
    2695             :     // This will reduce number of memory moves on the back edge.
    2696             :     LifetimePosition loop_start = LifetimePosition::GapFromInstructionIndex(
    2697             :         loop_header->first_instruction_index());
    2698             : 
    2699       48077 :     if (range->Covers(loop_start)) {
    2700       35734 :       if (prev_use == nullptr || prev_use->pos() < loop_start) {
    2701             :         // No register beneficial use inside the loop before the pos.
    2702             :         pos = loop_start;
    2703             :       }
    2704             :     }
    2705             : 
    2706             :     // Try hoisting out to an outer loop.
    2707             :     loop_header = GetContainingLoop(code(), loop_header);
    2708             :   }
    2709             : 
    2710       37121 :   return pos;
    2711             : }
    2712             : 
    2713             : 
    2714    18035121 : void RegisterAllocator::Spill(LiveRange* range) {
    2715             :   DCHECK(!range->spilled());
    2716           0 :   TopLevelLiveRange* first = range->TopLevel();
    2717    16899057 :   TRACE("Spilling live range %d:%d\n", first->vreg(), range->relative_id());
    2718             : 
    2719    16899121 :   if (first->HasNoSpillType()) {
    2720     1136064 :     data()->AssignSpillRangeToLiveRange(first);
    2721             :   }
    2722             :   range->Spill();
    2723    16899141 : }
    2724             : 
    2725           0 : const char* RegisterAllocator::RegisterName(int register_code) const {
    2726           0 :   if (mode() == GENERAL_REGISTERS) {
    2727           0 :     return data()->config()->GetGeneralRegisterName(register_code);
    2728             :   } else {
    2729           0 :     return data()->config()->GetDoubleRegisterName(register_code);
    2730             :   }
    2731             : }
    2732             : 
    2733             : 
    2734     1830125 : LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
    2735             :                                          RegisterKind kind, Zone* local_zone)
    2736             :     : RegisterAllocator(data, kind),
    2737             :       unhandled_live_ranges_(local_zone),
    2738             :       active_live_ranges_(local_zone),
    2739     1830125 :       inactive_live_ranges_(local_zone) {
    2740             :   unhandled_live_ranges().reserve(
    2741     1830148 :       static_cast<size_t>(code()->VirtualRegisterCount() * 2));
    2742     1830209 :   active_live_ranges().reserve(8);
    2743     1830203 :   inactive_live_ranges().reserve(8);
    2744             :   // TryAllocateFreeReg and AllocateBlockedReg assume this
    2745             :   // when allocating local arrays.
    2746             :   DCHECK(RegisterConfiguration::kMaxFPRegisters >=
    2747             :          this->data()->config()->num_general_registers());
    2748     1830199 : }
    2749             : 
    2750             : 
    2751     1830198 : void LinearScanAllocator::AllocateRegisters() {
    2752             :   DCHECK(unhandled_live_ranges().empty());
    2753             :   DCHECK(active_live_ranges().empty());
    2754             :   DCHECK(inactive_live_ranges().empty());
    2755             : 
    2756     5490850 :   SplitAndSpillRangesDefinedByMemoryOperand();
    2757             : 
    2758    98004352 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    2759    94343991 :     if (!CanProcessRange(range)) continue;
    2760    69479175 :     for (LiveRange* to_add = range; to_add != nullptr;
    2761             :          to_add = to_add->next()) {
    2762    34739572 :       if (!to_add->spilled()) {
    2763    21772295 :         AddToUnhandledUnsorted(to_add);
    2764             :       }
    2765             :     }
    2766             :   }
    2767     1830196 :   SortUnhandled();
    2768             :   DCHECK(UnhandledIsSorted());
    2769             : 
    2770     1830326 :   if (mode() == GENERAL_REGISTERS) {
    2771    16469868 :     for (TopLevelLiveRange* current : data()->fixed_live_ranges()) {
    2772    14639555 :       if (current != nullptr) AddToInactive(current);
    2773             :     }
    2774             :   } else {
    2775    16471309 :     for (TopLevelLiveRange* current : data()->fixed_double_live_ranges()) {
    2776    14641099 :       if (current != nullptr) AddToInactive(current);
    2777             :     }
    2778             :     if (!kSimpleFPAliasing && check_fp_aliasing()) {
    2779             :       for (TopLevelLiveRange* current : data()->fixed_float_live_ranges()) {
    2780             :         if (current != nullptr) AddToInactive(current);
    2781             :       }
    2782             :       for (TopLevelLiveRange* current : data()->fixed_simd128_live_ranges()) {
    2783             :         if (current != nullptr) AddToInactive(current);
    2784             :       }
    2785             :     }
    2786             :   }
    2787             : 
    2788    28047100 :   while (!unhandled_live_ranges().empty()) {
    2789             :     DCHECK(UnhandledIsSorted());
    2790    26216911 :     LiveRange* current = unhandled_live_ranges().back();
    2791             :     unhandled_live_ranges().pop_back();
    2792             :     DCHECK(UnhandledIsSorted());
    2793             :     LifetimePosition position = current->Start();
    2794             : #ifdef DEBUG
    2795             :     allocation_finger_ = position;
    2796             : #endif
    2797    26216911 :     TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
    2798             :           current->relative_id(), position.value());
    2799             : 
    2800    26217828 :     if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
    2801             :       continue;
    2802             : 
    2803   223511679 :     for (size_t i = 0; i < active_live_ranges().size(); ++i) {
    2804    98660608 :       LiveRange* cur_active = active_live_ranges()[i];
    2805    98660608 :       if (cur_active->End() <= position) {
    2806    21237806 :         ActiveToHandled(cur_active);
    2807    21238076 :         --i;  // The live range was removed from the list of active live ranges.
    2808    77422802 :       } else if (!cur_active->Covers(position)) {
    2809    18366967 :         ActiveToInactive(cur_active);
    2810    18366962 :         --i;  // The live range was removed from the list of active live ranges.
    2811             :       }
    2812             :     }
    2813             : 
    2814   623120060 :     for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
    2815   298465599 :       LiveRange* cur_inactive = inactive_live_ranges()[i];
    2816   298465599 :       if (cur_inactive->End() <= position) {
    2817     6272115 :         InactiveToHandled(cur_inactive);
    2818     6272408 :         --i;  // Live range was removed from the list of inactive live ranges.
    2819   292193484 :       } else if (cur_inactive->Covers(position)) {
    2820    19515251 :         InactiveToActive(cur_inactive);
    2821    19515239 :         --i;  // Live range was removed from the list of inactive live ranges.
    2822             :       }
    2823             :     }
    2824             : 
    2825             :     DCHECK(!current->HasRegisterAssigned() && !current->spilled());
    2826             : 
    2827    26189584 :     ProcessCurrentRange(current);
    2828             :   }
    2829     1830189 : }
    2830             : 
    2831     1107186 : bool LinearScanAllocator::TrySplitAndSpillSplinter(LiveRange* range) {
    2832             :   DCHECK(range->TopLevel()->IsSplinter());
    2833             :   // If we can spill the whole range, great. Otherwise, split above the
    2834             :   // first use needing a register and spill the top part.
    2835     1107186 :   const UsePosition* next_reg = range->NextRegisterPosition(range->Start());
    2836     1107176 :   if (next_reg == nullptr) {
    2837      713450 :     Spill(range);
    2838      713453 :     return true;
    2839      393726 :   } else if (range->FirstHintPosition() == nullptr) {
    2840             :     // If there was no hint, but we have a use position requiring a
    2841             :     // register, apply the hot path heuristics.
    2842             :     return false;
    2843      217938 :   } else if (next_reg->pos().PrevStart() > range->Start()) {
    2844      103789 :     LiveRange* tail = SplitRangeAt(range, next_reg->pos().PrevStart());
    2845      103789 :     AddToUnhandledSorted(tail);
    2846      103789 :     Spill(range);
    2847      103789 :     return true;
    2848             :   }
    2849             :   return false;
    2850             : }
    2851             : 
    2852    22484080 : void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
    2853             :                                                        int reg) {
    2854    23584379 :   data()->MarkAllocated(range->representation(), reg);
    2855             :   range->set_assigned_register(reg);
    2856    22484097 :   range->SetUseHints(reg);
    2857    34322690 :   if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
    2858             :     data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
    2859             :   }
    2860    22484166 : }
    2861             : 
    2862             : 
    2863    22484137 : void LinearScanAllocator::AddToActive(LiveRange* range) {
    2864    22484137 :   TRACE("Add live range %d:%d to active\n", range->TopLevel()->vreg(),
    2865             :         range->relative_id());
    2866    22484137 :   active_live_ranges().push_back(range);
    2867    22484131 : }
    2868             : 
    2869             : 
    2870    17280255 : void LinearScanAllocator::AddToInactive(LiveRange* range) {
    2871    17280255 :   TRACE("Add live range %d:%d to inactive\n", range->TopLevel()->vreg(),
    2872             :         range->relative_id());
    2873    17280255 :   inactive_live_ranges().push_back(range);
    2874    17280019 : }
    2875             : 
    2876             : 
    2877     4444812 : void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) {
    2878     8889624 :   if (range == nullptr || range->IsEmpty()) return;
    2879             :   DCHECK(!range->HasRegisterAssigned() && !range->spilled());
    2880             :   DCHECK(allocation_finger_ <= range->Start());
    2881    85273075 :   for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0;
    2882             :        --i) {
    2883   161545114 :     LiveRange* cur_range = unhandled_live_ranges().at(i);
    2884   157174888 :     if (!range->ShouldBeAllocatedBefore(cur_range)) continue;
    2885     4377782 :     TRACE("Add live range %d:%d to unhandled at %d\n",
    2886             :           range->TopLevel()->vreg(), range->relative_id(), i + 1);
    2887     8755564 :     auto it = unhandled_live_ranges().begin() + (i + 1);
    2888     4377782 :     unhandled_live_ranges().insert(it, range);
    2889             :     DCHECK(UnhandledIsSorted());
    2890     4377782 :     return;
    2891             :   }
    2892       67035 :   TRACE("Add live range %d:%d to unhandled at start\n",
    2893             :         range->TopLevel()->vreg(), range->relative_id());
    2894       67035 :   unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range);
    2895             :   DCHECK(UnhandledIsSorted());
    2896             : }
    2897             : 
    2898             : 
    2899    21772214 : void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) {
    2900    65316628 :   if (range == nullptr || range->IsEmpty()) return;
    2901             :   DCHECK(!range->HasRegisterAssigned() && !range->spilled());
    2902    21772307 :   TRACE("Add live range %d:%d to unhandled unsorted at end\n",
    2903             :         range->TopLevel()->vreg(), range->relative_id());
    2904    21772307 :   unhandled_live_ranges().push_back(range);
    2905             : }
    2906             : 
    2907             : 
    2908   168875328 : static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) {
    2909             :   DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a));
    2910   161351462 :   if (a->ShouldBeAllocatedBefore(b)) return false;
    2911   113870316 :   if (b->ShouldBeAllocatedBefore(a)) return true;
    2912     7523866 :   return a->TopLevel()->vreg() < b->TopLevel()->vreg();
    2913             : }
    2914             : 
    2915             : 
    2916             : // Sort the unhandled live ranges so that the ranges to be processed first are
    2917             : // at the end of the array list.  This is convenient for the register allocation
    2918             : // algorithm because it is efficient to remove elements from the end.
    2919     1830128 : void LinearScanAllocator::SortUnhandled() {
    2920     1830128 :   TRACE("Sort unhandled\n");
    2921             :   std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(),
    2922     1830128 :             &UnhandledSortHelper);
    2923     1830170 : }
    2924             : 
    2925             : 
    2926           0 : bool LinearScanAllocator::UnhandledIsSorted() {
    2927           0 :   size_t len = unhandled_live_ranges().size();
    2928           0 :   for (size_t i = 1; i < len; i++) {
    2929           0 :     LiveRange* a = unhandled_live_ranges().at(i - 1);
    2930           0 :     LiveRange* b = unhandled_live_ranges().at(i);
    2931           0 :     if (a->Start() < b->Start()) return false;
    2932             :   }
    2933             :   return true;
    2934             : }
    2935             : 
    2936             : 
    2937    21438038 : void LinearScanAllocator::ActiveToHandled(LiveRange* range) {
    2938    21438038 :   RemoveElement(&active_live_ranges(), range);
    2939    21438041 :   TRACE("Moving live range %d:%d from active to handled\n",
    2940             :         range->TopLevel()->vreg(), range->relative_id());
    2941    21438041 : }
    2942             : 
    2943             : 
    2944    18366916 : void LinearScanAllocator::ActiveToInactive(LiveRange* range) {
    2945    18366916 :   RemoveElement(&active_live_ranges(), range);
    2946    18366928 :   inactive_live_ranges().push_back(range);
    2947    18366963 :   TRACE("Moving live range %d:%d from active to inactive\n",
    2948             :         range->TopLevel()->vreg(), range->relative_id());
    2949    18366963 : }
    2950             : 
    2951             : 
    2952     6273869 : void LinearScanAllocator::InactiveToHandled(LiveRange* range) {
    2953     6273869 :   RemoveElement(&inactive_live_ranges(), range);
    2954     6273785 :   TRACE("Moving live range %d:%d from inactive to handled\n",
    2955             :         range->TopLevel()->vreg(), range->relative_id());
    2956     6273785 : }
    2957             : 
    2958             : 
    2959    19515242 : void LinearScanAllocator::InactiveToActive(LiveRange* range) {
    2960    19515242 :   RemoveElement(&inactive_live_ranges(), range);
    2961    19515245 :   active_live_ranges().push_back(range);
    2962    19515240 :   TRACE("Moving live range %d:%d from inactive to active\n",
    2963             :         range->TopLevel()->vreg(), range->relative_id());
    2964    19515240 : }
    2965             : 
    2966           0 : void LinearScanAllocator::GetFPRegisterSet(MachineRepresentation rep,
    2967             :                                            int* num_regs, int* num_codes,
    2968             :                                            const int** codes) const {
    2969             :   DCHECK(!kSimpleFPAliasing);
    2970           0 :   if (rep == MachineRepresentation::kFloat32) {
    2971           0 :     *num_regs = data()->config()->num_float_registers();
    2972           0 :     *num_codes = data()->config()->num_allocatable_float_registers();
    2973           0 :     *codes = data()->config()->allocatable_float_codes();
    2974           0 :   } else if (rep == MachineRepresentation::kSimd128) {
    2975           0 :     *num_regs = data()->config()->num_simd128_registers();
    2976           0 :     *num_codes = data()->config()->num_allocatable_simd128_registers();
    2977           0 :     *codes = data()->config()->allocatable_simd128_codes();
    2978             :   } else {
    2979           0 :     UNREACHABLE();
    2980             :   }
    2981           0 : }
    2982             : 
    2983    26189612 : void LinearScanAllocator::FindFreeRegistersForRange(
    2984             :     LiveRange* range, Vector<LifetimePosition> positions) {
    2985    26189612 :   int num_regs = num_registers();
    2986             :   int num_codes = num_allocatable_registers();
    2987             :   const int* codes = allocatable_register_codes();
    2988             :   MachineRepresentation rep = range->representation();
    2989             :   if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
    2990             :                              rep == MachineRepresentation::kSimd128))
    2991             :     GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
    2992             :   DCHECK_GE(positions.length(), num_regs);
    2993             : 
    2994   445219238 :   for (int i = 0; i < num_regs; ++i) {
    2995   419029626 :     positions[i] = LifetimePosition::MaxPosition();
    2996             :   }
    2997             : 
    2998   130952934 :   for (LiveRange* cur_active : active_live_ranges()) {
    2999             :     int cur_reg = cur_active->assigned_register();
    3000             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3001    78573709 :       positions[cur_reg] = LifetimePosition::GapFromInstructionIndex(0);
    3002    78573709 :       TRACE("Register %s is free until pos %d (1)\n", RegisterName(cur_reg),
    3003             :             LifetimePosition::GapFromInstructionIndex(0).value());
    3004             :     } else {
    3005             :       int alias_base_index = -1;
    3006             :       int aliases = data()->config()->GetAliases(
    3007             :           cur_active->representation(), cur_reg, rep, &alias_base_index);
    3008             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    3009             :       while (aliases--) {
    3010             :         int aliased_reg = alias_base_index + aliases;
    3011             :         positions[aliased_reg] = LifetimePosition::GapFromInstructionIndex(0);
    3012             :       }
    3013             :     }
    3014             :   }
    3015             : 
    3016   325061079 :   for (LiveRange* cur_inactive : inactive_live_ranges()) {
    3017             :     DCHECK(cur_inactive->End() > range->Start());
    3018             :     int cur_reg = cur_inactive->assigned_register();
    3019             :     // No need to carry out intersections, when this register won't be
    3020             :     // interesting to this range anyway.
    3021             :     // TODO(mtrofin): extend to aliased ranges, too.
    3022   272682022 :     if ((kSimpleFPAliasing || !check_fp_aliasing()) &&
    3023             :         positions[cur_reg] < range->Start()) {
    3024   234067115 :       continue;
    3025             :     }
    3026             : 
    3027   222019450 :     LifetimePosition next_intersection = cur_inactive->FirstIntersection(range);
    3028   222023339 :     if (!next_intersection.IsValid()) continue;
    3029             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3030    38618796 :       positions[cur_reg] = Min(positions[cur_reg], next_intersection);
    3031    38618796 :       TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
    3032             :             Min(positions[cur_reg], next_intersection).value());
    3033             :     } else {
    3034             :       int alias_base_index = -1;
    3035             :       int aliases = data()->config()->GetAliases(
    3036             :           cur_inactive->representation(), cur_reg, rep, &alias_base_index);
    3037             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    3038             :       while (aliases--) {
    3039             :         int aliased_reg = alias_base_index + aliases;
    3040             :         positions[aliased_reg] = Min(positions[aliased_reg], next_intersection);
    3041             :       }
    3042             :     }
    3043             :   }
    3044    26189444 : }
    3045             : 
    3046             : // High-level register allocation summary:
    3047             : //
    3048             : // For regular, or hot (i.e. not splinter) ranges, we attempt to first
    3049             : // allocate first the preferred (hint) register. If that is not possible,
    3050             : // we find a register that's free, and allocate that. If that's not possible,
    3051             : // we search for a register to steal from a range that was allocated. The
    3052             : // goal is to optimize for throughput by avoiding register-to-memory
    3053             : // moves, which are expensive.
    3054             : //
    3055             : // For splinters, the goal is to minimize the number of moves. First we try
    3056             : // to allocate the preferred register (more discussion follows). Failing that,
    3057             : // we bail out and spill as far as we can, unless the first use is at start,
    3058             : // case in which we apply the same behavior as we do for regular ranges.
    3059             : // If there is no hint, we apply the hot-path behavior.
    3060             : //
    3061             : // For the splinter, the hint register may come from:
    3062             : //
    3063             : // - the hot path (we set it at splintering time with SetHint). In this case, if
    3064             : // we cannot offer the hint register, spilling is better because it's at most
    3065             : // 1 move, while trying to find and offer another register is at least 1 move.
    3066             : //
    3067             : // - a constraint. If we cannot offer that register, it's because  there is some
    3068             : // interference. So offering the hint register up to the interference would
    3069             : // result
    3070             : // in a move at the interference, plus a move to satisfy the constraint. This is
    3071             : // also the number of moves if we spill, with the potential of the range being
    3072             : // already spilled and thus saving a move (the spill).
    3073             : // Note that this can only be an input constraint, if it were an output one,
    3074             : // the range wouldn't be a splinter because it means it'd be defined in a
    3075             : // deferred
    3076             : // block, and we don't mark those as splinters (they live in deferred blocks
    3077             : // only).
    3078             : //
    3079             : // - a phi. The same analysis as in the case of the input constraint applies.
    3080             : //
    3081    42609137 : void LinearScanAllocator::ProcessCurrentRange(LiveRange* current) {
    3082   864248245 :   LifetimePosition free_until_pos_buff[RegisterConfiguration::kMaxFPRegisters];
    3083             :   Vector<LifetimePosition> free_until_pos(
    3084             :       free_until_pos_buff, RegisterConfiguration::kMaxFPRegisters);
    3085    26189493 :   FindFreeRegistersForRange(current, free_until_pos);
    3086    26189471 :   if (!TryAllocatePreferredReg(current, free_until_pos)) {
    3087    16419644 :     if (current->TopLevel()->IsSplinter()) {
    3088     1924445 :       if (TrySplitAndSpillSplinter(current)) return;
    3089             :     }
    3090    15602375 :     if (!TryAllocateFreeReg(current, free_until_pos)) {
    3091     3087072 :       AllocateBlockedReg(current);
    3092             :     }
    3093             :   }
    3094    25372139 :   if (current->HasRegisterAssigned()) {
    3095    22484111 :     AddToActive(current);
    3096             :   }
    3097             : }
    3098             : 
    3099    29125712 : bool LinearScanAllocator::TryAllocatePreferredReg(
    3100    31456646 :     LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
    3101             :   int hint_register;
    3102    29125712 :   if (current->FirstHintPosition(&hint_register) != nullptr) {
    3103    15728286 :     TRACE(
    3104             :         "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
    3105             :         RegisterName(hint_register), free_until_pos[hint_register].value(),
    3106             :         current->TopLevel()->vreg(), current->relative_id(),
    3107             :         current->End().value());
    3108             : 
    3109             :     // The desired register is free until the end of the current live range.
    3110    31456646 :     if (free_until_pos[hint_register] >= current->End()) {
    3111    10154261 :       TRACE("Assigning preferred reg %s to live range %d:%d\n",
    3112             :             RegisterName(hint_register), current->TopLevel()->vreg(),
    3113             :             current->relative_id());
    3114    10154261 :       SetLiveRangeAssignedRegister(current, hint_register);
    3115    10154263 :       return true;
    3116             :     }
    3117             :   }
    3118             :   return false;
    3119             : }
    3120             : 
    3121    15602390 : bool LinearScanAllocator::TryAllocateFreeReg(
    3122   203291266 :     LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
    3123             :   int num_regs = 0;  // used only for the call to GetFPRegisterSet.
    3124    15602390 :   int num_codes = num_allocatable_registers();
    3125             :   const int* codes = allocatable_register_codes();
    3126             :   MachineRepresentation rep = current->representation();
    3127             :   if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
    3128             :                              rep == MachineRepresentation::kSimd128)) {
    3129             :     GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
    3130             :   }
    3131             : 
    3132             :   DCHECK_GE(free_until_pos.length(), num_codes);
    3133             : 
    3134             :   // Find the register which stays free for the longest time.
    3135    15602390 :   int reg = codes[0];
    3136   190775917 :   for (int i = 1; i < num_codes; ++i) {
    3137   175173527 :     int code = codes[i];
    3138   175173527 :     if (free_until_pos[code] > free_until_pos[reg]) {
    3139             :       reg = code;
    3140             :     }
    3141             :   }
    3142             : 
    3143    15602390 :   LifetimePosition pos = free_until_pos[reg];
    3144             : 
    3145    15602390 :   if (pos <= current->Start()) {
    3146             :     // All registers are blocked.
    3147             :     return false;
    3148             :   }
    3149             : 
    3150    12515349 :   if (pos < current->End()) {
    3151             :     // Register reg is available at the range start but becomes blocked before
    3152             :     // the range end. Split current at position where it becomes blocked.
    3153     2936332 :     LiveRange* tail = SplitRangeAt(current, pos);
    3154     2936334 :     AddToUnhandledSorted(tail);
    3155             : 
    3156             :     // Try to allocate preferred register once more.
    3157     2936334 :     if (TryAllocatePreferredReg(current, free_until_pos)) return true;
    3158             :   }
    3159             : 
    3160             :   // Register reg is available at the range start and is free until the range
    3161             :   // end.
    3162             :   DCHECK(pos >= current->End());
    3163    12131025 :   TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
    3164             :         current->TopLevel()->vreg(), current->relative_id());
    3165    12131025 :   SetLiveRangeAssignedRegister(current, reg);
    3166             : 
    3167    12131024 :   return true;
    3168             : }
    3169             : 
    3170     3286065 : void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
    3171     3087072 :   UsePosition* register_use = current->NextRegisterPosition(current->Start());
    3172     3087085 :   if (register_use == nullptr) {
    3173             :     // There is no use in the current live range that requires a register.
    3174             :     // We can just spill it.
    3175     1526992 :     Spill(current);
    3176     1526992 :     return;
    3177             :   }
    3178             : 
    3179             :   int num_regs = num_registers();
    3180             :   int num_codes = num_allocatable_registers();
    3181             :   const int* codes = allocatable_register_codes();
    3182             :   MachineRepresentation rep = current->representation();
    3183             :   if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
    3184             :                              rep == MachineRepresentation::kSimd128))
    3185             :     GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
    3186             : 
    3187             :   // use_pos keeps track of positions a register/alias is used at.
    3188             :   // block_pos keeps track of positions where a register/alias is blocked
    3189             :   // from.
    3190    51482589 :   LifetimePosition use_pos[RegisterConfiguration::kMaxFPRegisters];
    3191    49922528 :   LifetimePosition block_pos[RegisterConfiguration::kMaxFPRegisters];
    3192    24961094 :   for (int i = 0; i < num_regs; i++) {
    3193    24961094 :     use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
    3194             :   }
    3195             : 
    3196    40903170 :   for (LiveRange* range : active_live_ranges()) {
    3197             :     int cur_reg = range->assigned_register();
    3198             :     bool is_fixed_or_cant_spill =
    3199    21482419 :         range->TopLevel()->IsFixed() || !range->CanBeSpilled(current->Start());
    3200             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3201    18891488 :       if (is_fixed_or_cant_spill) {
    3202             :         block_pos[cur_reg] = use_pos[cur_reg] =
    3203    16488311 :             LifetimePosition::GapFromInstructionIndex(0);
    3204             :       } else {
    3205             :         DCHECK_NE(LifetimePosition::GapFromInstructionIndex(0),
    3206             :                   block_pos[cur_reg]);
    3207             :         use_pos[cur_reg] =
    3208     2403177 :             range->NextLifetimePositionRegisterIsBeneficial(current->Start());
    3209             :       }
    3210             :     } else {
    3211             :       int alias_base_index = -1;
    3212             :       int aliases = data()->config()->GetAliases(
    3213             :           range->representation(), cur_reg, rep, &alias_base_index);
    3214             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    3215             :       while (aliases--) {
    3216             :         int aliased_reg = alias_base_index + aliases;
    3217             :         if (is_fixed_or_cant_spill) {
    3218             :           block_pos[aliased_reg] = use_pos[aliased_reg] =
    3219             :               LifetimePosition::GapFromInstructionIndex(0);
    3220             :         } else {
    3221             :           use_pos[aliased_reg] =
    3222             :               Min(block_pos[aliased_reg],
    3223             :                   range->NextLifetimePositionRegisterIsBeneficial(
    3224             :                       current->Start()));
    3225             :         }
    3226             :       }
    3227             :     }
    3228             :   }
    3229             : 
    3230    10656982 :   for (LiveRange* range : inactive_live_ranges()) {
    3231             :     DCHECK(range->End() > current->Start());
    3232             :     int cur_reg = range->assigned_register();
    3233     3768412 :     bool is_fixed = range->TopLevel()->IsFixed();
    3234             : 
    3235             :     // Don't perform costly intersections if they are guaranteed to not update
    3236             :     // block_pos or use_pos.
    3237             :     // TODO(mtrofin): extend to aliased ranges, too.
    3238             :     if ((kSimpleFPAliasing || !check_fp_aliasing())) {
    3239     3768412 :       if (is_fixed) {
    3240     8299528 :         if (block_pos[cur_reg] < range->Start()) continue;
    3241             :       } else {
    3242     2655266 :         if (use_pos[cur_reg] < range->Start()) continue;
    3243             :       }
    3244             :     }
    3245             : 
    3246     2391882 :     LifetimePosition next_intersection = range->FirstIntersection(current);
    3247     2391882 :     if (!next_intersection.IsValid()) continue;
    3248             : 
    3249             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3250      350442 :       if (is_fixed) {
    3251      341074 :         block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
    3252      341074 :         use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
    3253             :       } else {
    3254        9368 :         use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
    3255             :       }
    3256             :     } else {
    3257             :       int alias_base_index = -1;
    3258             :       int aliases = data()->config()->GetAliases(
    3259             :           range->representation(), cur_reg, rep, &alias_base_index);
    3260             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    3261             :       while (aliases--) {
    3262             :         int aliased_reg = alias_base_index + aliases;
    3263             :         if (is_fixed) {
    3264             :           block_pos[aliased_reg] =
    3265             :               Min(block_pos[aliased_reg], next_intersection);
    3266             :           use_pos[aliased_reg] =
    3267             :               Min(block_pos[aliased_reg], use_pos[aliased_reg]);
    3268             :         } else {
    3269             :           use_pos[aliased_reg] = Min(use_pos[aliased_reg], next_intersection);
    3270             :         }
    3271             :       }
    3272             :     }
    3273             :   }
    3274             : 
    3275     1560079 :   int reg = codes[0];
    3276    18891511 :   for (int i = 1; i < num_codes; ++i) {
    3277    17331432 :     int code = codes[i];
    3278    34662864 :     if (use_pos[code] > use_pos[reg]) {
    3279             :       reg = code;
    3280             :     }
    3281             :   }
    3282             : 
    3283     3120158 :   if (use_pos[reg] < register_use->pos()) {
    3284             :     // If there is a gap position before the next register use, we can
    3285             :     // spill until there. The gap position will then fit the fill move.
    3286     1361085 :     if (LifetimePosition::ExistsGapPositionBetween(current->Start(),
    3287             :                                                    register_use->pos())) {
    3288             :       SpillBetween(current, current->Start(), register_use->pos());
    3289             :       return;
    3290             :     }
    3291             :   }
    3292             : 
    3293             :   // We couldn't spill until the next register use. Split before the register
    3294             :   // is blocked, if applicable.
    3295      397986 :   if (block_pos[reg] < current->End()) {
    3296             :     // Register becomes blocked before the current range end. Split before that
    3297             :     // position.
    3298             :     LiveRange* tail =
    3299       14023 :         SplitBetween(current, current->Start(), block_pos[reg].Start());
    3300       14023 :     AddToUnhandledSorted(tail);
    3301             :   }
    3302             : 
    3303             :   // Register reg is not blocked for the whole range.
    3304             :   DCHECK(block_pos[reg] >= current->End());
    3305      198991 :   TRACE("Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg),
    3306             :         current->TopLevel()->vreg(), current->relative_id());
    3307      198991 :   SetLiveRangeAssignedRegister(current, reg);
    3308             : 
    3309             :   // This register was not free. Thus we need to find and spill
    3310             :   // parts of active and inactive live regions that use the same register
    3311             :   // at the same lifetime positions as current.
    3312      198991 :   SplitAndSpillIntersecting(current);
    3313             : }
    3314             : 
    3315             : 
    3316      198991 : void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
    3317             :   DCHECK(current->HasRegisterAssigned());
    3318             :   int reg = current->assigned_register();
    3319             :   LifetimePosition split_pos = current->Start();
    3320     5185112 :   for (size_t i = 0; i < active_live_ranges().size(); ++i) {
    3321     2393565 :     LiveRange* range = active_live_ranges()[i];
    3322             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3323     4588139 :       if (range->assigned_register() != reg) continue;
    3324             :     } else {
    3325             :       if (!data()->config()->AreAliases(current->representation(), reg,
    3326             :                                         range->representation(),
    3327             :                                         range->assigned_register())) {
    3328             :         continue;
    3329             :       }
    3330             :     }
    3331             : 
    3332      198991 :     UsePosition* next_pos = range->NextRegisterPosition(current->Start());
    3333      198991 :     LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
    3334      198991 :     if (next_pos == nullptr) {
    3335      172494 :       SpillAfter(range, spill_pos);
    3336             :     } else {
    3337             :       // When spilling between spill_pos and next_pos ensure that the range
    3338             :       // remains spilled at least until the start of the current live range.
    3339             :       // This guarantees that we will not introduce new unhandled ranges that
    3340             :       // start before the current range as this violates allocation invariants
    3341             :       // and will lead to an inconsistent state of active and inactive
    3342             :       // live-ranges: ranges are allocated in order of their start positions,
    3343             :       // ranges are retired from active/inactive when the start of the
    3344             :       // current live-range is larger than their end.
    3345             :       DCHECK(LifetimePosition::ExistsGapPositionBetween(current->Start(),
    3346             :                                                         next_pos->pos()));
    3347       26497 :       SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
    3348             :     }
    3349      198991 :     ActiveToHandled(range);
    3350      198991 :     --i;
    3351             :   }
    3352             : 
    3353     4951027 :   for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
    3354     2506193 :     LiveRange* range = inactive_live_ranges()[i];
    3355             :     DCHECK(range->End() > current->Start());
    3356     2376018 :     if (range->TopLevel()->IsFixed()) continue;
    3357             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3358      130175 :       if (range->assigned_register() != reg) continue;
    3359             :     } else {
    3360             :       if (!data()->config()->AreAliases(current->representation(), reg,
    3361             :                                         range->representation(),
    3362             :                                         range->assigned_register()))
    3363             :         continue;
    3364             :     }
    3365             : 
    3366        8669 :     LifetimePosition next_intersection = range->FirstIntersection(current);
    3367        8669 :     if (next_intersection.IsValid()) {
    3368          86 :       UsePosition* next_pos = range->NextRegisterPosition(current->Start());
    3369          86 :       if (next_pos == nullptr) {
    3370          63 :         SpillAfter(range, split_pos);
    3371             :       } else {
    3372             :         next_intersection = Min(next_intersection, next_pos->pos());
    3373             :         SpillBetween(range, split_pos, next_intersection);
    3374             :       }
    3375          86 :       InactiveToHandled(range);
    3376          86 :       --i;
    3377             :     }
    3378             :   }
    3379      198991 : }
    3380             : 
    3381             : 
    3382    12611663 : bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
    3383    12611663 :   if (!range->is_phi()) return false;
    3384             : 
    3385             :   DCHECK(!range->HasSpillOperand());
    3386     1136921 :   RegisterAllocationData::PhiMapValue* phi_map_value =
    3387     4212813 :       data()->GetPhiMapValueFor(range);
    3388             :   const PhiInstruction* phi = phi_map_value->phi();
    3389             :   const InstructionBlock* block = phi_map_value->block();
    3390             :   // Count the number of spilled operands.
    3391             :   size_t spilled_count = 0;
    3392       32981 :   LiveRange* first_op = nullptr;
    3393     8018736 :   for (size_t i = 0; i < phi->operands().size(); i++) {
    3394     2872450 :     int op = phi->operands()[i];
    3395     4230463 :     LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op);
    3396     2872447 :     if (!op_range->TopLevel()->HasSpillRange()) continue;
    3397      303745 :     const InstructionBlock* pred =
    3398      607490 :         code()->InstructionBlockAt(block->predecessors()[i]);
    3399             :     LifetimePosition pred_end =
    3400             :         LifetimePosition::InstructionFromInstructionIndex(
    3401             :             pred->last_instruction_index());
    3402     2841616 :     while (op_range != nullptr && !op_range->CanCover(pred_end)) {
    3403             :       op_range = op_range->next();
    3404             :     }
    3405      604147 :     if (op_range != nullptr && op_range->spilled()) {
    3406      245852 :       spilled_count++;
    3407      245852 :       if (first_op == nullptr) {
    3408             :         first_op = op_range->TopLevel();
    3409             :       }
    3410             :     }
    3411             :   }
    3412             : 
    3413             :   // Only continue if more than half of the operands are spilled.
    3414     1136918 :   if (spilled_count * 2 <= phi->operands().size()) {
    3415             :     return false;
    3416             :   }
    3417             : 
    3418             :   // Try to merge the spilled operands and count the number of merged spilled
    3419             :   // operands.
    3420             :   DCHECK(first_op != nullptr);
    3421       62110 :   SpillRange* first_op_spill = first_op->TopLevel()->GetSpillRange();
    3422             :   size_t num_merged = 1;
    3423      417842 :   for (size_t i = 1; i < phi->operands().size(); i++) {
    3424      175940 :     int op = phi->operands()[i];
    3425      688025 :     TopLevelLiveRange* op_range = data()->live_ranges()[op];
    3426      175940 :     if (!op_range->HasSpillRange()) continue;
    3427             :     SpillRange* op_spill = op_range->GetSpillRange();
    3428      160205 :     if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) {
    3429      146619 :       num_merged++;
    3430             :     }
    3431             :   }
    3432             : 
    3433             :   // Only continue if enough operands could be merged to the
    3434             :   // same spill slot.
    3435       62110 :   if (num_merged * 2 <= phi->operands().size() ||
    3436             :       AreUseIntervalsIntersecting(first_op_spill->interval(),
    3437       85765 :                                   range->first_interval())) {
    3438             :     return false;
    3439             :   }
    3440             : 
    3441             :   // If the range does not need register soon, spill it to the merged
    3442             :   // spill range.
    3443             :   LifetimePosition next_pos = range->Start();
    3444       28912 :   if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
    3445       28912 :   UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
    3446       28912 :   if (pos == nullptr) {
    3447             :     SpillRange* spill_range =
    3448             :         range->TopLevel()->HasSpillRange()
    3449           6 :             ? range->TopLevel()->GetSpillRange()
    3450       48878 :             : data()->AssignSpillRangeToLiveRange(range->TopLevel());
    3451       24442 :     bool merged = first_op_spill->TryMerge(spill_range);
    3452       24442 :     if (!merged) return false;
    3453       24442 :     Spill(range);
    3454       24442 :     return true;
    3455        4470 :   } else if (pos->pos() > range->Start().NextStart()) {
    3456             :     SpillRange* spill_range =
    3457             :         range->TopLevel()->HasSpillRange()
    3458           0 :             ? range->TopLevel()->GetSpillRange()
    3459        6130 :             : data()->AssignSpillRangeToLiveRange(range->TopLevel());
    3460        3065 :     bool merged = first_op_spill->TryMerge(spill_range);
    3461        3065 :     if (!merged) return false;
    3462             :     SpillBetween(range, range->Start(), pos->pos());
    3463             :     DCHECK(UnhandledIsSorted());
    3464        3065 :     return true;
    3465             :   }
    3466             :   return false;
    3467             : }
    3468             : 
    3469             : 
    3470      172557 : void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
    3471      172557 :   LiveRange* second_part = SplitRangeAt(range, pos);
    3472      172557 :   Spill(second_part);
    3473      172557 : }
    3474             : 
    3475             : 
    3476           0 : void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
    3477             :                                        LifetimePosition end) {
    3478     1364174 :   SpillBetweenUntil(range, start, start, end);
    3479           0 : }
    3480             : 
    3481             : 
    3482     1390671 : void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
    3483             :                                             LifetimePosition start,
    3484             :                                             LifetimePosition until,
    3485             :                                             LifetimePosition end) {
    3486     1390671 :   CHECK(start < end);
    3487     2781328 :   LiveRange* second_part = SplitRangeAt(range, start);
    3488             : 
    3489     1390673 :   if (second_part->Start() < end) {
    3490             :     // The split result intersects with [start, end[.
    3491             :     // Split it at position between ]start+1, end[, spill the middle part
    3492             :     // and put the rest to unhandled.
    3493     1390657 :     LifetimePosition third_part_end = end.PrevStart().End();
    3494     1390657 :     if (data()->IsBlockBoundary(end.Start())) {
    3495           0 :       third_part_end = end.Start();
    3496             :     }
    3497             :     LiveRange* third_part = SplitBetween(
    3498     1390657 :         second_part, Max(second_part->Start().End(), until), third_part_end);
    3499             : 
    3500             :     DCHECK(third_part != second_part);
    3501             : 
    3502     1390656 :     Spill(second_part);
    3503     1390656 :     AddToUnhandledSorted(third_part);
    3504             :   } else {
    3505             :     // The split result does not intersect with [start, end[.
    3506             :     // Nothing to spill. Just put it to unhandled as whole.
    3507          16 :     AddToUnhandledSorted(second_part);
    3508             :   }
    3509     1390673 : }
    3510             : 
    3511             : 
    3512      915087 : SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
    3513      915087 :     : data_(data) {}
    3514             : 
    3515             : 
    3516      915057 : void SpillSlotLocator::LocateSpillSlots() {
    3517      915057 :   const InstructionSequence* code = data()->code();
    3518    53653535 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    3519    70522359 :     if (range == nullptr || range->IsEmpty()) continue;
    3520             :     // We care only about ranges which spill in the frame.
    3521    24935720 :     if (!range->HasSpillRange() || range->IsSpilledOnlyInDeferredBlocks()) {
    3522             :       continue;
    3523             :     }
    3524             :     TopLevelLiveRange::SpillMoveInsertionList* spills =
    3525             :         range->GetSpillMoveInsertionLocations();
    3526             :     DCHECK_NOT_NULL(spills);
    3527     4020295 :     for (; spills != nullptr; spills = spills->next) {
    3528     2010124 :       code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
    3529             :     }
    3530             :   }
    3531      915104 : }
    3532             : 
    3533             : 
    3534     1830179 : OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
    3535             : 
    3536             : 
    3537      915067 : void OperandAssigner::AssignSpillSlots() {
    3538             :   ZoneVector<SpillRange*>& spill_ranges = data()->spill_ranges();
    3539             :   // Merge disjoint spill ranges
    3540    49002808 :   for (size_t i = 0; i < spill_ranges.size(); ++i) {
    3541   165111432 :     SpillRange* range = spill_ranges[i];
    3542    23586325 :     if (range == nullptr) continue;
    3543     2640825 :     if (range->IsEmpty()) continue;
    3544   234047406 :     for (size_t j = i + 1; j < spill_ranges.size(); ++j) {
    3545   115525146 :       SpillRange* other = spill_ranges[j];
    3546   134643584 :       if (other != nullptr && !other->IsEmpty()) {
    3547    15165045 :         range->TryMerge(other);
    3548             :       }
    3549             :     }
    3550             :   }
    3551             :   // Allocate slots for the merged spill ranges.
    3552    50646606 :   for (SpillRange* range : spill_ranges) {
    3553    26226894 :     if (range == nullptr || range->IsEmpty()) continue;
    3554             :     // Allocate a new operand referring to the spill slot.
    3555     1498503 :     if (!range->HasSlot()) {
    3556     1060792 :       int index = data()->frame()->AllocateSpillSlot(range->byte_width());
    3557             :       range->set_assigned_slot(index);
    3558             :     }
    3559             :   }
    3560      915079 : }
    3561             : 
    3562             : 
    3563      915098 : void OperandAssigner::CommitAssignment() {
    3564    73163702 :   for (TopLevelLiveRange* top_range : data()->live_ranges()) {
    3565   117694228 :     if (top_range == nullptr || top_range->IsEmpty()) continue;
    3566             :     InstructionOperand spill_operand;
    3567    22294539 :     if (top_range->HasSpillOperand()) {
    3568     9755212 :       spill_operand = *top_range->TopLevel()->GetSpillOperand();
    3569    12539327 :     } else if (top_range->TopLevel()->HasSpillRange()) {
    3570     2640832 :       spill_operand = top_range->TopLevel()->GetSpillRangeOperand();
    3571             :     }
    3572    22294539 :     if (top_range->is_phi()) {
    3573             :       data()->GetPhiMapValueFor(top_range)->CommitAssignment(
    3574     2670184 :           top_range->GetAssignedOperand());
    3575             :     }
    3576    77238620 :     for (LiveRange* range = top_range; range != nullptr;
    3577             :          range = range->next()) {
    3578    54943837 :       InstructionOperand assigned = range->GetAssignedOperand();
    3579    54943852 :       range->ConvertUsesToOperand(assigned, spill_operand);
    3580             :     }
    3581             : 
    3582    22294783 :     if (!spill_operand.IsInvalid()) {
    3583             :       // If this top level range has a child spilled in a deferred block, we use
    3584             :       // the range and control flow connection mechanism instead of spilling at
    3585             :       // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
    3586             :       // phases. Normally, when we spill at definition, we do not insert a
    3587             :       // connecting move when a successor child range is spilled - because the
    3588             :       // spilled range picks up its value from the slot which was assigned at
    3589             :       // definition. For ranges that are determined to spill only in deferred
    3590             :       // blocks, we let ConnectLiveRanges and ResolveControlFlow find the blocks
    3591             :       // where a spill operand is expected, and then finalize by inserting the
    3592             :       // spills in the deferred blocks dominators.
    3593    12395988 :       if (!top_range->IsSpilledOnlyInDeferredBlocks()) {
    3594             :         // Spill at definition if the range isn't spilled only in deferred
    3595             :         // blocks.
    3596             :         top_range->CommitSpillMoves(
    3597             :             data()->code(), spill_operand,
    3598    33243470 :             top_range->has_slot_use() || top_range->spilled());
    3599             :       }
    3600             :     }
    3601             :   }
    3602      915100 : }
    3603             : 
    3604             : 
    3605      915092 : ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
    3606      915092 :     : data_(data) {}
    3607             : 
    3608             : 
    3609           0 : bool ReferenceMapPopulator::SafePointsAreInOrder() const {
    3610             :   int safe_point = 0;
    3611           0 :   for (ReferenceMap* map : *data()->code()->reference_maps()) {
    3612           0 :     if (safe_point > map->instruction_position()) return false;
    3613             :     safe_point = map->instruction_position();
    3614             :   }
    3615           0 :   return true;
    3616             : }
    3617             : 
    3618             : 
    3619      915090 : void ReferenceMapPopulator::PopulateReferenceMaps() {
    3620             :   DCHECK(SafePointsAreInOrder());
    3621             :   // Map all delayed references.
    3622     1830180 :   for (RegisterAllocationData::DelayedReference& delayed_reference :
    3623             :        data()->delayed_references()) {
    3624             :     delayed_reference.map->RecordReference(
    3625           0 :         AllocatedOperand::cast(*delayed_reference.operand));
    3626             :   }
    3627             :   // Iterate over all safe point positions and record a pointer
    3628             :   // for all spilled live ranges at this point.
    3629             :   int last_range_start = 0;
    3630      915090 :   const ReferenceMapDeque* reference_maps = data()->code()->reference_maps();
    3631             :   ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
    3632   106661787 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    3633    47172423 :     if (range == nullptr) continue;
    3634             :     // Skip non-reference values.
    3635    23349916 :     if (!data()->IsReference(range)) continue;
    3636             :     // Skip empty live ranges.
    3637    10470409 :     if (range->IsEmpty()) continue;
    3638     9417726 :     if (range->has_preassigned_slot()) continue;
    3639             : 
    3640             :     // Find the extent of the range and its children.
    3641             :     int start = range->Start().ToInstructionIndex();
    3642             :     int end = 0;
    3643    35114809 :     for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) {
    3644             :       LifetimePosition this_end = cur->End();
    3645    26134868 :       if (this_end.ToInstructionIndex() > end)
    3646             :         end = this_end.ToInstructionIndex();
    3647             :       DCHECK(cur->Start().ToInstructionIndex() >= start);
    3648             :     }
    3649             : 
    3650             :     // Most of the ranges are in order, but not all.  Keep an eye on when they
    3651             :     // step backwards and reset the first_it so we don't miss any safe points.
    3652     8979941 :     if (start < last_range_start) first_it = reference_maps->begin();
    3653             :     last_range_start = start;
    3654             : 
    3655             :     // Step across all the safe points that are before the start of this range,
    3656             :     // recording how far we step in order to save doing this for the next range.
    3657   253013929 :     for (; first_it != reference_maps->end(); ++first_it) {
    3658   242968150 :       ReferenceMap* map = *first_it;
    3659   242968150 :       if (map->instruction_position() >= start) break;
    3660             :     }
    3661             : 
    3662             :     InstructionOperand spill_operand;
    3663    12578203 :     if (((range->HasSpillOperand() &&
    3664    17292901 :           !range->GetSpillOperand()->IsConstant()) ||
    3665             :          range->HasSpillRange())) {
    3666     2127571 :       if (range->HasSpillOperand()) {
    3667      667075 :         spill_operand = *range->GetSpillOperand();
    3668             :       } else {
    3669             :         spill_operand = range->GetSpillRangeOperand();
    3670             :       }
    3671             :       DCHECK(spill_operand.IsStackSlot());
    3672             :       DCHECK(CanBeTaggedPointer(
    3673             :           AllocatedOperand::cast(spill_operand).representation()));
    3674             :     }
    3675             : 
    3676    41488534 :     LiveRange* cur = range;
    3677             :     // Step through the safe points to see whether they are in the range.
    3678    44595408 :     for (auto it = first_it; it != reference_maps->end(); ++it) {
    3679    32917499 :       ReferenceMap* map = *it;
    3680             :       int safe_point = map->instruction_position();
    3681             : 
    3682             :       // The safe points are sorted so we can stop searching here.
    3683    32917499 :       if (safe_point - 1 > end) break;
    3684             : 
    3685             :       // Advance to the next active range that covers the current
    3686             :       // safe point position.
    3687             :       LifetimePosition safe_point_pos =
    3688             :           LifetimePosition::InstructionFromInstructionIndex(safe_point);
    3689             : 
    3690             :       // Search for the child range (cur) that covers safe_point_pos. If we
    3691             :       // don't find it before the children pass safe_point_pos, keep cur at
    3692             :       // the last child, because the next safe_point_pos may be covered by cur.
    3693             :       // This may happen if cur has more than one interval, and the current
    3694             :       // safe_point_pos is in between intervals.
    3695             :       // For that reason, cur may be at most the last child.
    3696             :       DCHECK_NOT_NULL(cur);
    3697             :       DCHECK(safe_point_pos >= cur->Start() || range == cur);
    3698             :       bool found = false;
    3699    90272488 :       while (!found) {
    3700    41488447 :         if (cur->Covers(safe_point_pos)) {
    3701             :           found = true;
    3702             :         } else {
    3703             :           LiveRange* next = cur->next();
    3704    34953646 :           if (next == nullptr || next->Start() > safe_point_pos) {
    3705             :             break;
    3706             :           }
    3707             :           cur = next;
    3708             :         }
    3709             :       }
    3710             : 
    3711    26635524 :       if (!found) {
    3712             :         continue;
    3713             :       }
    3714             : 
    3715             :       // Check if the live range is spilled and the safe point is after
    3716             :       // the spill position.
    3717             :       int spill_index = range->IsSpilledOnlyInDeferredBlocks()
    3718             :                             ? cur->Start().ToInstructionIndex()
    3719    22148602 :                             : range->spill_start_index();
    3720             : 
    3721    22148602 :       if (!spill_operand.IsInvalid() && safe_point >= spill_index) {
    3722    10827296 :         TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
    3723             :               range->vreg(), spill_index, safe_point);
    3724    10827296 :         map->RecordReference(AllocatedOperand::cast(spill_operand));
    3725             :       }
    3726             : 
    3727    22148604 :       if (!cur->spilled()) {
    3728           0 :         TRACE(
    3729             :             "Pointer in register for range %d:%d (start at %d) "
    3730             :             "at safe point %d\n",
    3731             :             range->vreg(), cur->relative_id(), cur->Start().value(),
    3732             :             safe_point);
    3733           0 :         InstructionOperand operand = cur->GetAssignedOperand();
    3734             :         DCHECK(!operand.IsStackSlot());
    3735             :         DCHECK(CanBeTaggedPointer(
    3736             :             AllocatedOperand::cast(operand).representation()));
    3737           0 :         map->RecordReference(AllocatedOperand::cast(operand));
    3738             :       }
    3739             :     }
    3740             :   }
    3741      915084 : }
    3742             : 
    3743             : 
    3744     1830172 : LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
    3745     1830172 :     : data_(data) {}
    3746             : 
    3747             : 
    3748           0 : bool LiveRangeConnector::CanEagerlyResolveControlFlow(
    3749             :     const InstructionBlock* block) const {
    3750    12176020 :   if (block->PredecessorCount() != 1) return false;
    3751           0 :   return block->predecessors()[0].IsNext(block->rpo_number());
    3752             : }
    3753             : 
    3754             : 
    3755      915062 : void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
    3756             :   // Lazily linearize live ranges in memory for fast lookup.
    3757      915062 :   LiveRangeFinder finder(data(), local_zone);
    3758             :   ZoneVector<BitVector*>& live_in_sets = data()->live_in_sets();
    3759    15529173 :   for (const InstructionBlock* block : code()->instruction_blocks()) {
    3760    15850201 :     if (CanEagerlyResolveControlFlow(block)) continue;
    3761    14131726 :     BitVector* live = live_in_sets[block->rpo_number().ToInt()];
    3762             :     BitVector::Iterator iterator(live);
    3763   117207260 :     while (!iterator.Done()) {
    3764    51537780 :       int vreg = iterator.Current();
    3765    51537780 :       LiveRangeBoundArray* array = finder.ArrayFor(vreg);
    3766   182549838 :       for (const RpoNumber& pred : block->predecessors()) {
    3767             :         FindResult result;
    3768    79637022 :         const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
    3769    79474457 :         if (!array->FindConnectableSubranges(block, pred_block, &result)) {
    3770    78318763 :           continue;
    3771             :         }
    3772     4714171 :         InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
    3773     4714168 :         InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
    3774     4714166 :         if (pred_op.Equals(cur_op)) continue;
    3775     2375700 :         if (!pred_op.IsAnyRegister() && cur_op.IsAnyRegister()) {
    3776             :           // We're doing a reload.
    3777             :           // We don't need to, if:
    3778             :           // 1) there's no register use in this block, and
    3779             :           // 2) the range ends before the block does, and
    3780             :           // 3) we don't have a successor, or the successor is spilled.
    3781             :           LifetimePosition block_start =
    3782     1120419 :               LifetimePosition::GapFromInstructionIndex(block->code_start());
    3783             :           LifetimePosition block_end =
    3784             :               LifetimePosition::GapFromInstructionIndex(block->code_end());
    3785     2141598 :           const LiveRange* current = result.cur_cover_;
    3786      159396 :           const LiveRange* successor = current->next();
    3787     2240838 :           if (current->End() < block_end &&
    3788      159396 :               (successor == nullptr || successor->spilled())) {
    3789             :             // verify point 1: no register use. We can go to the end of the
    3790             :             // range, since it's all within the block.
    3791             : 
    3792             :             bool uses_reg = false;
    3793      262730 :             for (const UsePosition* use = current->NextUsePosition(block_start);
    3794             :                  use != nullptr; use = use->next()) {
    3795      163490 :               if (use->operand()->IsAnyRegister()) {
    3796             :                 uses_reg = true;
    3797             :                 break;
    3798             :               }
    3799             :             }
    3800      361850 :             if (!uses_reg) continue;
    3801             :           }
    3802     1183796 :           if (current->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
    3803             :               pred_block->IsDeferred()) {
    3804             :             // The spill location should be defined in pred_block, so add
    3805             :             // pred_block to the list of blocks requiring a spill operand.
    3806             :             current->TopLevel()->GetListOfBlocksRequiringSpillOperands()->Add(
    3807      162617 :                 pred_block->rpo_number().ToInt());
    3808             :           }
    3809             :         }
    3810     1156041 :         int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op);
    3811             :         USE(move_loc);
    3812             :         DCHECK_IMPLIES(
    3813             :             result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
    3814             :                 !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()),
    3815             :             code()->GetInstructionBlock(move_loc)->IsDeferred());
    3816             :       }
    3817    51537790 :       iterator.Advance();
    3818             :     }
    3819             :   }
    3820             : 
    3821             :   // At this stage, we collected blocks needing a spill operand from
    3822             :   // ConnectRanges and from ResolveControlFlow. Time to commit the spills for
    3823             :   // deferred blocks.
    3824    71928523 :   for (TopLevelLiveRange* top : data()->live_ranges()) {
    3825    92817547 :     if (top == nullptr || top->IsEmpty() ||
    3826             :         !top->IsSpilledOnlyInDeferredBlocks())
    3827             :       continue;
    3828      630720 :     CommitSpillsInDeferredBlocks(top, finder.ArrayFor(top->vreg()), local_zone);
    3829             :   }
    3830      915104 : }
    3831             : 
    3832             : 
    3833     1658690 : int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
    3834             :                                            const InstructionOperand& cur_op,
    3835      653392 :                                            const InstructionBlock* pred,
    3836             :                                            const InstructionOperand& pred_op) {
    3837             :   DCHECK(!pred_op.Equals(cur_op));
    3838             :   int gap_index;
    3839             :   Instruction::GapPosition position;
    3840     1156041 :   if (block->PredecessorCount() == 1) {
    3841             :     gap_index = block->first_instruction_index();
    3842             :     position = Instruction::START;
    3843             :   } else {
    3844             :     DCHECK(pred->SuccessorCount() == 1);
    3845             :     DCHECK(!code()
    3846             :                 ->InstructionAt(pred->last_instruction_index())
    3847             :                 ->HasReferenceMap());
    3848             :     gap_index = pred->last_instruction_index();
    3849             :     position = Instruction::END;
    3850             :   }
    3851     1156041 :   data()->AddGapMove(gap_index, position, pred_op, cur_op);
    3852     1156041 :   return gap_index;
    3853             : }
    3854             : 
    3855      915133 : void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
    3856             :   DelayedInsertionMap delayed_insertion_map(local_zone);
    3857    73134550 :   for (TopLevelLiveRange* top_range : data()->live_ranges()) {
    3858    47172372 :     if (top_range == nullptr) continue;
    3859             :     bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
    3860    35920747 :     LiveRange* first_range = top_range;
    3861    88649322 :     for (LiveRange *second_range = first_range->next(); second_range != nullptr;
    3862             :          first_range = second_range, second_range = second_range->next()) {
    3863             :       LifetimePosition pos = second_range->Start();
    3864             :       // Add gap move if the two live ranges touch and there is no block
    3865             :       // boundary.
    3866    53720020 :       if (second_range->spilled()) continue;
    3867    12570889 :       if (first_range->End() != pos) continue;
    3868    12840073 :       if (data()->IsBlockBoundary(pos) &&
    3869             :           !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
    3870             :         continue;
    3871             :       }
    3872    11733667 :       InstructionOperand prev_operand = first_range->GetAssignedOperand();
    3873    11733654 :       InstructionOperand cur_operand = second_range->GetAssignedOperand();
    3874    11733670 :       if (prev_operand.Equals(cur_operand)) continue;
    3875             :       bool delay_insertion = false;
    3876             :       Instruction::GapPosition gap_pos;
    3877             :       int gap_index = pos.ToInstructionIndex();
    3878    13146633 :       if (connect_spilled && !prev_operand.IsAnyRegister() &&
    3879             :           cur_operand.IsAnyRegister()) {
    3880      782102 :         const InstructionBlock* block = code()->GetInstructionBlock(gap_index);
    3881             :         DCHECK(block->IsDeferred());
    3882             :         // Performing a reload in this block, meaning the spill operand must
    3883             :         // be defined here.
    3884             :         top_range->GetListOfBlocksRequiringSpillOperands()->Add(
    3885      782100 :             block->rpo_number().ToInt());
    3886             :       }
    3887             : 
    3888    11579550 :       if (pos.IsGapPosition()) {
    3889    11579424 :         gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
    3890             :       } else {
    3891         126 :         if (pos.IsStart()) {
    3892             :           delay_insertion = true;
    3893             :         } else {
    3894           0 :           gap_index++;
    3895             :         }
    3896         126 :         gap_pos = delay_insertion ? Instruction::END : Instruction::START;
    3897             :       }
    3898             :       // Reloads or spills for spilled in deferred blocks ranges must happen
    3899             :       // only in deferred blocks.
    3900             :       DCHECK_IMPLIES(
    3901             :           connect_spilled &&
    3902             :               !(prev_operand.IsAnyRegister() && cur_operand.IsAnyRegister()),
    3903             :           code()->GetInstructionBlock(gap_index)->IsDeferred());
    3904             : 
    3905             :       ParallelMove* move =
    3906             :           code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
    3907    11579558 :               gap_pos, code_zone());
    3908    11579543 :       if (!delay_insertion) {
    3909             :         move->AddMove(prev_operand, cur_operand);
    3910             :       } else {
    3911             :         delayed_insertion_map.insert(
    3912         126 :             std::make_pair(std::make_pair(move, prev_operand), cur_operand));
    3913             :       }
    3914             :     }
    3915             :   }
    3916     1830149 :   if (delayed_insertion_map.empty()) return;
    3917             :   // Insert all the moves which should occur after the stored move.
    3918             :   ZoneVector<MoveOperands*> to_insert(local_zone);
    3919             :   ZoneVector<MoveOperands*> to_eliminate(local_zone);
    3920          75 :   to_insert.reserve(4);
    3921          75 :   to_eliminate.reserve(4);
    3922          75 :   ParallelMove* moves = delayed_insertion_map.begin()->first.first;
    3923             :   for (auto it = delayed_insertion_map.begin();; ++it) {
    3924             :     bool done = it == delayed_insertion_map.end();
    3925         201 :     if (done || it->first.first != moves) {
    3926             :       // Commit the MoveOperands for current ParallelMove.
    3927         252 :       for (MoveOperands* move : to_eliminate) {
    3928             :         move->Eliminate();
    3929             :       }
    3930         378 :       for (MoveOperands* move : to_insert) {
    3931         126 :         moves->push_back(move);
    3932             :       }
    3933         126 :       if (done) break;
    3934             :       // Reset state.
    3935             :       to_eliminate.clear();
    3936             :       to_insert.clear();
    3937          51 :       moves = it->first.first;
    3938             :     }
    3939             :     // Gather all MoveOperands for a single ParallelMove.
    3940             :     MoveOperands* move =
    3941         126 :         new (code_zone()) MoveOperands(it->first.second, it->second);
    3942         126 :     moves->PrepareInsertAfter(move, &to_eliminate);
    3943         126 :     to_insert.push_back(move);
    3944             :   }
    3945             : }
    3946             : 
    3947             : 
    3948      630712 : void LiveRangeConnector::CommitSpillsInDeferredBlocks(
    3949     3145586 :     TopLevelLiveRange* range, LiveRangeBoundArray* array, Zone* temp_zone) {
    3950             :   DCHECK(range->IsSpilledOnlyInDeferredBlocks());
    3951             :   DCHECK(!range->spilled());
    3952             : 
    3953     6596829 :   InstructionSequence* code = data()->code();
    3954      630712 :   InstructionOperand spill_operand = range->GetSpillRangeOperand();
    3955             : 
    3956      630712 :   TRACE("Live Range %d will be spilled only in deferred blocks.\n",
    3957             :         range->vreg());
    3958             :   // If we have ranges that aren't spilled but require the operand on the stack,
    3959             :   // make sure we insert the spill.
    3960     6000680 :   for (const LiveRange* child = range; child != nullptr;
    3961             :        child = child->next()) {
    3962     5539216 :     for (const UsePosition* pos = child->first_pos(); pos != nullptr;
    3963             :          pos = pos->next()) {
    3964     6078032 :       if (pos->type() != UsePositionType::kRequiresSlot && !child->spilled())
    3965             :         continue;
    3966             :       range->AddBlockRequiringSpillOperand(
    3967             :           code->GetInstructionBlock(pos->pos().ToInstructionIndex())
    3968      371338 :               ->rpo_number());
    3969             :     }
    3970             :   }
    3971             : 
    3972      630723 :   ZoneQueue<int> worklist(temp_zone);
    3973             : 
    3974     1650181 :   for (BitVector::Iterator iterator(
    3975             :            range->GetListOfBlocksRequiringSpillOperands());
    3976     1019467 :        !iterator.Done(); iterator.Advance()) {
    3977     2038929 :     worklist.push(iterator.Current());
    3978             :   }
    3979             : 
    3980             :   ZoneSet<std::pair<RpoNumber, int>> done_moves(temp_zone);
    3981             :   // Seek the deferred blocks that dominate locations requiring spill operands,
    3982             :   // and spill there. We only need to spill at the start of such blocks.
    3983             :   BitVector done_blocks(
    3984      630719 :       range->GetListOfBlocksRequiringSpillOperands()->length(), temp_zone);
    3985     4628245 :   while (!worklist.empty()) {
    3986     3366868 :     int block_id = worklist.front();
    3987             :     worklist.pop();
    3988     6733798 :     if (done_blocks.Contains(block_id)) continue;
    3989             :     done_blocks.Add(block_id);
    3990      942080 :     InstructionBlock* spill_block =
    3991     2676628 :         code->InstructionBlockAt(RpoNumber::FromInt(block_id));
    3992             : 
    3993     8642783 :     for (const RpoNumber& pred : spill_block->predecessors()) {
    3994     4231562 :       const InstructionBlock* pred_block = code->InstructionBlockAt(pred);
    3995             : 
    3996     3289494 :       if (pred_block->IsDeferred()) {
    3997     4694847 :         worklist.push(pred_block->rpo_number().ToInt());
    3998             :       } else {
    3999             :         LifetimePosition pred_end =
    4000             :             LifetimePosition::InstructionFromInstructionIndex(
    4001             :                 pred_block->last_instruction_index());
    4002             : 
    4003             :         LiveRangeBound* bound = array->Find(pred_end);
    4004             : 
    4005      942073 :         InstructionOperand pred_op = bound->range_->GetAssignedOperand();
    4006             : 
    4007             :         RpoNumber spill_block_number = spill_block->rpo_number();
    4008      942096 :         if (done_moves.find(std::make_pair(
    4009     1884176 :                 spill_block_number, range->vreg())) == done_moves.end()) {
    4010             :           data()->AddGapMove(spill_block->first_instruction_index(),
    4011             :                              Instruction::GapPosition::START, pred_op,
    4012      942080 :                              spill_operand);
    4013     1884167 :           done_moves.insert(std::make_pair(spill_block_number, range->vreg()));
    4014             :           spill_block->mark_needs_frame();
    4015             :         }
    4016             :       }
    4017             :     }
    4018             :   }
    4019      630717 : }
    4020             : 
    4021             : 
    4022             : }  // namespace compiler
    4023             : }  // namespace internal
    4024             : }  // namespace v8

Generated by: LCOV version 1.10