LCOV - code coverage report
Current view: top level - src/compiler - register-allocator.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 1269 1479 85.8 %
Date: 2017-10-20 Functions: 119 167 71.3 %

          Line data    Source code
       1             : // Copyright 2014 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/compiler/register-allocator.h"
       6             : 
       7             : #include "src/assembler-inl.h"
       8             : #include "src/base/adapters.h"
       9             : #include "src/compiler/linkage.h"
      10             : #include "src/string-stream.h"
      11             : 
      12             : namespace v8 {
      13             : namespace internal {
      14             : namespace compiler {
      15             : 
      16             : #define TRACE(...)                             \
      17             :   do {                                         \
      18             :     if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
      19             :   } while (false)
      20             : 
      21             : 
      22             : namespace {
      23             : 
      24             : static const int kFloatRepBit =
      25             :     1 << static_cast<int>(MachineRepresentation::kFloat32);
      26             : static const int kSimd128RepBit =
      27             :     1 << static_cast<int>(MachineRepresentation::kSimd128);
      28             : 
      29    85646546 : void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) {
      30    85646636 :   auto it = std::find(v->begin(), v->end(), range);
      31             :   DCHECK(it != v->end());
      32    85646636 :   v->erase(it);
      33    85646591 : }
      34             : 
      35     1301647 : int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) {
      36             :   return kind == FP_REGISTERS ? cfg->num_double_registers()
      37     2603089 :                               : cfg->num_general_registers();
      38             : }
      39             : 
      40             : 
      41     1301620 : int GetAllocatableRegisterCount(const RegisterConfiguration* cfg,
      42             :                                 RegisterKind kind) {
      43             :   return kind == FP_REGISTERS ? cfg->num_allocatable_double_registers()
      44     2603089 :                               : cfg->num_allocatable_general_registers();
      45             : }
      46             : 
      47             : 
      48     1301588 : const int* GetAllocatableRegisterCodes(const RegisterConfiguration* cfg,
      49             :                                        RegisterKind kind) {
      50             :   return kind == FP_REGISTERS ? cfg->allocatable_double_codes()
      51     2603089 :                               : cfg->allocatable_general_codes();
      52             : }
      53             : 
      54             : 
      55      307937 : const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence,
      56             :                                           const InstructionBlock* block) {
      57             :   RpoNumber index = block->loop_header();
      58     2126373 :   if (!index.IsValid()) return nullptr;
      59      307937 :   return sequence->InstructionBlockAt(index);
      60             : }
      61             : 
      62             : 
      63             : const InstructionBlock* GetInstructionBlock(const InstructionSequence* code,
      64             :                                             LifetimePosition pos) {
      65    15971866 :   return code->GetInstructionBlock(pos.ToInstructionIndex());
      66             : }
      67             : 
      68             : 
      69     2610551 : Instruction* GetLastInstruction(InstructionSequence* code,
      70     2610551 :                                 const InstructionBlock* block) {
      71     2610559 :   return code->InstructionAt(block->last_instruction_index());
      72             : }
      73             : 
      74             : // TODO(dcarney): fix frame to allow frame accesses to half size location.
      75     2842151 : int GetByteWidth(MachineRepresentation rep) {
      76     2842151 :   switch (rep) {
      77             :     case MachineRepresentation::kBit:
      78             :     case MachineRepresentation::kWord8:
      79             :     case MachineRepresentation::kWord16:
      80             :     case MachineRepresentation::kWord32:
      81             :     case MachineRepresentation::kTaggedSigned:
      82             :     case MachineRepresentation::kTaggedPointer:
      83             :     case MachineRepresentation::kTagged:
      84             :     case MachineRepresentation::kFloat32:
      85             :       return kPointerSize;
      86             :     case MachineRepresentation::kWord64:
      87             :     case MachineRepresentation::kFloat64:
      88             :       return kDoubleSize;
      89             :     case MachineRepresentation::kSimd128:
      90           0 :       return kSimd128Size;
      91             :     case MachineRepresentation::kNone:
      92             :       break;
      93             :   }
      94           0 :   UNREACHABLE();
      95             : }
      96             : 
      97             : }  // namespace
      98             : 
      99             : class LiveRangeBound {
     100             :  public:
     101    28688560 :   explicit LiveRangeBound(LiveRange* range, bool skip)
     102    86065680 :       : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
     103             :     DCHECK(!range->IsEmpty());
     104             :   }
     105             : 
     106             :   bool CanCover(LifetimePosition position) {
     107   155228879 :     return start_ <= position && position < end_;
     108             :   }
     109             : 
     110             :   LiveRange* const range_;
     111             :   const LifetimePosition start_;
     112             :   const LifetimePosition end_;
     113             :   const bool skip_;
     114             : 
     115             :  private:
     116             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
     117             : };
     118             : 
     119             : 
     120             : struct FindResult {
     121             :   LiveRange* cur_cover_;
     122             :   LiveRange* pred_cover_;
     123             : };
     124             : 
     125             : 
     126             : class LiveRangeBoundArray {
     127             :  public:
     128    54922644 :   LiveRangeBoundArray() : length_(0), start_(nullptr) {}
     129             : 
     130             :   bool ShouldInitialize() { return start_ == nullptr; }
     131             : 
     132     4469343 :   void Initialize(Zone* zone, TopLevelLiveRange* range) {
     133     4469343 :     length_ = range->GetChildCount();
     134             : 
     135     4469340 :     start_ = zone->NewArray<LiveRangeBound>(length_);
     136             :     LiveRangeBound* curr = start_;
     137             :     // Normally, spilled ranges do not need connecting moves, because the spill
     138             :     // location has been assigned at definition. For ranges spilled in deferred
     139             :     // blocks, that is not the case, so we need to connect the spilled children.
     140    33157927 :     for (LiveRange *i = range; i != nullptr; i = i->next(), ++curr) {
     141    28688587 :       new (curr) LiveRangeBound(i, i->spilled());
     142             :     }
     143     4469340 :   }
     144             : 
     145             :   LiveRangeBound* Find(const LifetimePosition position) const {
     146             :     size_t left_index = 0;
     147   107754962 :     size_t right_index = length_;
     148             :     while (true) {
     149   389227389 :       size_t current_index = left_index + (right_index - left_index) / 2;
     150             :       DCHECK(right_index > current_index);
     151   389227389 :       LiveRangeBound* bound = &start_[current_index];
     152   389227389 :       if (bound->start_ <= position) {
     153   266925920 :         if (position < bound->end_) return bound;
     154             :         DCHECK(left_index < current_index);
     155             :         left_index = current_index;
     156             :       } else {
     157             :         right_index = current_index;
     158             :       }
     159             :     }
     160             :   }
     161             : 
     162             :   LiveRangeBound* FindPred(const InstructionBlock* pred) {
     163             :     LifetimePosition pred_end =
     164             :         LifetimePosition::InstructionFromInstructionIndex(
     165             :             pred->last_instruction_index());
     166             :     return Find(pred_end);
     167             :   }
     168             : 
     169             :   LiveRangeBound* FindSucc(const InstructionBlock* succ) {
     170             :     LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
     171             :         succ->first_instruction_index());
     172             :     return Find(succ_start);
     173             :   }
     174             : 
     175   156022512 :   bool FindConnectableSubranges(const InstructionBlock* block,
     176    78011256 :                                 const InstructionBlock* pred,
     177             :                                 FindResult* result) const {
     178             :     LifetimePosition pred_end =
     179             :         LifetimePosition::InstructionFromInstructionIndex(
     180             :             pred->last_instruction_index());
     181             :     LiveRangeBound* bound = Find(pred_end);
     182    78011256 :     result->pred_cover_ = bound->range_;
     183             :     LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
     184             :         block->first_instruction_index());
     185             : 
     186    78011256 :     if (bound->CanCover(cur_start)) {
     187             :       // Both blocks are covered by the same range, so there is nothing to
     188             :       // connect.
     189             :       return false;
     190             :     }
     191             :     bound = Find(cur_start);
     192    28636088 :     if (bound->skip_) {
     193             :       return false;
     194             :     }
     195     4905471 :     result->cur_cover_ = bound->range_;
     196             :     DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
     197     4905471 :     return (result->cur_cover_ != result->pred_cover_);
     198             :   }
     199             : 
     200             :  private:
     201             :   size_t length_;
     202             :   LiveRangeBound* start_;
     203             : 
     204             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
     205             : };
     206             : 
     207             : 
     208             : class LiveRangeFinder {
     209             :  public:
     210     1301414 :   explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
     211             :       : data_(data),
     212     1301414 :         bounds_length_(static_cast<int>(data_->live_ranges().size())),
     213     1301414 :         bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
     214     3904428 :         zone_(zone) {
     215    56224251 :     for (int i = 0; i < bounds_length_; ++i) {
     216    54922651 :       new (&bounds_[i]) LiveRangeBoundArray();
     217             :     }
     218     1301600 :   }
     219             : 
     220    52068989 :   LiveRangeBoundArray* ArrayFor(int operand_index) {
     221             :     DCHECK(operand_index < bounds_length_);
     222   104137978 :     TopLevelLiveRange* range = data_->live_ranges()[operand_index];
     223             :     DCHECK(range != nullptr && !range->IsEmpty());
     224    52068989 :     LiveRangeBoundArray* array = &bounds_[operand_index];
     225    52068989 :     if (array->ShouldInitialize()) {
     226     4469342 :       array->Initialize(zone_, range);
     227             :     }
     228    52068985 :     return array;
     229             :   }
     230             : 
     231             :  private:
     232             :   const RegisterAllocationData* const data_;
     233             :   const int bounds_length_;
     234             :   LiveRangeBoundArray* const bounds_;
     235             :   Zone* const zone_;
     236             : 
     237             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
     238             : };
     239             : 
     240             : 
     241             : typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
     242             : 
     243             : 
     244             : struct DelayedInsertionMapCompare {
     245             :   bool operator()(const DelayedInsertionMapKey& a,
     246             :                   const DelayedInsertionMapKey& b) const {
     247         216 :     if (a.first == b.first) {
     248             :       return a.second.Compare(b.second);
     249             :     }
     250         216 :     return a.first < b.first;
     251             :   }
     252             : };
     253             : 
     254             : 
     255             : typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
     256             :                 DelayedInsertionMapCompare> DelayedInsertionMap;
     257             : 
     258             : 
     259    77546297 : UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
     260             :                          void* hint, UsePositionHintType hint_type)
     261    77546297 :     : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) {
     262             :   DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone);
     263             :   bool register_beneficial = true;
     264             :   UsePositionType type = UsePositionType::kAny;
     265   153164345 :   if (operand_ != nullptr && operand_->IsUnallocated()) {
     266             :     const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
     267    75618122 :     if (unalloc->HasRegisterPolicy()) {
     268             :       type = UsePositionType::kRequiresRegister;
     269    53127969 :     } else if (unalloc->HasSlotPolicy()) {
     270             :       type = UsePositionType::kRequiresSlot;
     271             :       register_beneficial = false;
     272             :     } else {
     273    39791929 :       register_beneficial = !unalloc->HasAnyPolicy();
     274             :     }
     275             :   }
     276   155092594 :   flags_ = TypeField::encode(type) | HintTypeField::encode(hint_type) |
     277    77546297 :            RegisterBeneficialField::encode(register_beneficial) |
     278    77546297 :            AssignedRegisterField::encode(kUnassignedRegister);
     279             :   DCHECK(pos_.IsValid());
     280    77546297 : }
     281             : 
     282             : 
     283           0 : bool UsePosition::HasHint() const {
     284             :   int hint_register;
     285    77673729 :   return HintRegister(&hint_register);
     286             : }
     287             : 
     288             : 
     289   207390490 : bool UsePosition::HintRegister(int* register_code) const {
     290   207390490 :   if (hint_ == nullptr) return false;
     291   105554890 :   switch (HintTypeField::decode(flags_)) {
     292             :     case UsePositionHintType::kNone:
     293             :     case UsePositionHintType::kUnresolved:
     294             :       return false;
     295             :     case UsePositionHintType::kUsePos: {
     296             :       UsePosition* use_pos = reinterpret_cast<UsePosition*>(hint_);
     297     9448460 :       int assigned_register = AssignedRegisterField::decode(use_pos->flags_);
     298     9448460 :       if (assigned_register == kUnassignedRegister) return false;
     299     5039709 :       *register_code = assigned_register;
     300     5039709 :       return true;
     301             :     }
     302             :     case UsePositionHintType::kOperand: {
     303             :       InstructionOperand* operand =
     304             :           reinterpret_cast<InstructionOperand*>(hint_);
     305    36287569 :       *register_code = LocationOperand::cast(operand)->register_code();
     306    36287569 :       return true;
     307             :     }
     308             :     case UsePositionHintType::kPhi: {
     309      936907 :       RegisterAllocationData::PhiMapValue* phi =
     310             :           reinterpret_cast<RegisterAllocationData::PhiMapValue*>(hint_);
     311             :       int assigned_register = phi->assigned_register();
     312      936907 :       if (assigned_register == kUnassignedRegister) return false;
     313      187372 :       *register_code = assigned_register;
     314      187372 :       return true;
     315             :     }
     316             :   }
     317           0 :   UNREACHABLE();
     318             : }
     319             : 
     320             : 
     321    37494345 : UsePositionHintType UsePosition::HintTypeForOperand(
     322             :     const InstructionOperand& op) {
     323    37494345 :   switch (op.kind()) {
     324             :     case InstructionOperand::CONSTANT:
     325             :     case InstructionOperand::IMMEDIATE:
     326             :     case InstructionOperand::EXPLICIT:
     327             :       return UsePositionHintType::kNone;
     328             :     case InstructionOperand::UNALLOCATED:
     329    15419131 :       return UsePositionHintType::kUnresolved;
     330             :     case InstructionOperand::ALLOCATED:
     331    24213682 :       if (op.IsRegister() || op.IsFPRegister()) {
     332             :         return UsePositionHintType::kOperand;
     333             :       } else {
     334             :         DCHECK(op.IsStackSlot() || op.IsFPStackSlot());
     335     1194501 :         return UsePositionHintType::kNone;
     336             :       }
     337             :     case InstructionOperand::INVALID:
     338             :       break;
     339             :   }
     340           0 :   UNREACHABLE();
     341             : }
     342             : 
     343           0 : void UsePosition::SetHint(UsePosition* use_pos) {
     344             :   DCHECK_NOT_NULL(use_pos);
     345     8928522 :   hint_ = use_pos;
     346    17857044 :   flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
     347           0 : }
     348             : 
     349           0 : void UsePosition::ResolveHint(UsePosition* use_pos) {
     350             :   DCHECK_NOT_NULL(use_pos);
     351     9691804 :   if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return;
     352     4845914 :   hint_ = use_pos;
     353     4845914 :   flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
     354             : }
     355             : 
     356             : 
     357           0 : void UsePosition::set_type(UsePositionType type, bool register_beneficial) {
     358             :   DCHECK_IMPLIES(type == UsePositionType::kRequiresSlot, !register_beneficial);
     359             :   DCHECK_EQ(kUnassignedRegister, AssignedRegisterField::decode(flags_));
     360    14781877 :   flags_ = TypeField::encode(type) |
     361    14781877 :            RegisterBeneficialField::encode(register_beneficial) |
     362    14781877 :            HintTypeField::encode(HintTypeField::decode(flags_)) |
     363    14781877 :            AssignedRegisterField::encode(kUnassignedRegister);
     364           0 : }
     365             : 
     366             : 
     367    34056080 : UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
     368             :   DCHECK(Contains(pos) && pos != start());
     369    34056080 :   UseInterval* after = new (zone) UseInterval(pos, end_);
     370    34056062 :   after->next_ = next_;
     371    34056062 :   next_ = nullptr;
     372    34056062 :   end_ = pos;
     373    34056062 :   return after;
     374             : }
     375             : 
     376             : 
     377           0 : void LifetimePosition::Print() const {
     378           0 :   OFStream os(stdout);
     379           0 :   os << *this << std::endl;
     380           0 : }
     381             : 
     382             : 
     383           0 : std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) {
     384           0 :   os << '@' << pos.ToInstructionIndex();
     385           0 :   if (pos.IsGapPosition()) {
     386             :     os << 'g';
     387             :   } else {
     388             :     os << 'i';
     389             :   }
     390           0 :   if (pos.IsStart()) {
     391             :     os << 's';
     392             :   } else {
     393             :     os << 'e';
     394             :   }
     395           0 :   return os;
     396             : }
     397             : 
     398           0 : LiveRange::LiveRange(int relative_id, MachineRepresentation rep,
     399             :                      TopLevelLiveRange* top_level)
     400             :     : relative_id_(relative_id),
     401             :       bits_(0),
     402             :       last_interval_(nullptr),
     403             :       first_interval_(nullptr),
     404             :       first_pos_(nullptr),
     405             :       top_level_(top_level),
     406             :       next_(nullptr),
     407             :       current_interval_(nullptr),
     408             :       last_processed_use_(nullptr),
     409             :       current_hint_position_(nullptr),
     410   105326807 :       splitting_pointer_(nullptr) {
     411             :   DCHECK(AllocatedOperand::IsSupportedRepresentation(rep));
     412   105326807 :   bits_ = AssignedRegisterField::encode(kUnassignedRegister) |
     413   105326807 :           RepresentationField::encode(rep);
     414           0 : }
     415             : 
     416             : 
     417           0 : void LiveRange::VerifyPositions() const {
     418             :   // Walk the positions, verifying that each is in an interval.
     419           0 :   UseInterval* interval = first_interval_;
     420           0 :   for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
     421           0 :     CHECK(Start() <= pos->pos());
     422           0 :     CHECK(pos->pos() <= End());
     423           0 :     CHECK_NOT_NULL(interval);
     424           0 :     while (!interval->Contains(pos->pos()) && interval->end() != pos->pos()) {
     425             :       interval = interval->next();
     426           0 :       CHECK_NOT_NULL(interval);
     427             :     }
     428             :   }
     429           0 : }
     430             : 
     431             : 
     432           0 : void LiveRange::VerifyIntervals() const {
     433             :   DCHECK(first_interval()->start() == Start());
     434             :   LifetimePosition last_end = first_interval()->end();
     435           0 :   for (UseInterval* interval = first_interval()->next(); interval != nullptr;
     436             :        interval = interval->next()) {
     437             :     DCHECK(last_end <= interval->start());
     438             :     last_end = interval->end();
     439             :   }
     440             :   DCHECK(last_end == End());
     441           0 : }
     442             : 
     443             : 
     444           0 : void LiveRange::set_assigned_register(int reg) {
     445             :   DCHECK(!HasRegisterAssigned() && !spilled());
     446   102172820 :   bits_ = AssignedRegisterField::update(bits_, reg);
     447           0 : }
     448             : 
     449             : 
     450           0 : void LiveRange::UnsetAssignedRegister() {
     451             :   DCHECK(HasRegisterAssigned() && !spilled());
     452           0 :   bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
     453           0 : }
     454             : 
     455             : 
     456           0 : void LiveRange::Spill() {
     457             :   DCHECK(!spilled());
     458             :   DCHECK(!TopLevel()->HasNoSpillType());
     459             :   set_spilled(true);
     460    18969394 :   bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
     461           0 : }
     462             : 
     463             : 
     464   116691697 : RegisterKind LiveRange::kind() const {
     465   116691697 :   return IsFloatingPoint(representation()) ? FP_REGISTERS : GENERAL_REGISTERS;
     466             : }
     467             : 
     468             : 
     469    34573778 : UsePosition* LiveRange::FirstHintPosition(int* register_index) const {
     470   143662503 :   for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
     471   129721952 :     if (pos->HintRegister(register_index)) return pos;
     472             :   }
     473             :   return nullptr;
     474             : }
     475             : 
     476             : 
     477    53951502 : UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
     478    51421416 :   UsePosition* use_pos = last_processed_use_;
     479    27235518 :   if (use_pos == nullptr || use_pos->pos() > start) {
     480             :     use_pos = first_pos();
     481             :   }
     482    51421416 :   while (use_pos != nullptr && use_pos->pos() < start) {
     483             :     use_pos = use_pos->next();
     484             :   }
     485    27235518 :   last_processed_use_ = use_pos;
     486    27235518 :   return use_pos;
     487             : }
     488             : 
     489             : 
     490    14795055 : UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
     491             :     LifetimePosition start) const {
     492    47057587 :   UsePosition* pos = NextUsePosition(start);
     493    61852638 :   while (pos != nullptr && !pos->RegisterIsBeneficial()) {
     494             :     pos = pos->next();
     495             :   }
     496    14795053 :   return pos;
     497             : }
     498             : 
     499     2758537 : LifetimePosition LiveRange::NextLifetimePositionRegisterIsBeneficial(
     500      656816 :     const LifetimePosition& start) const {
     501     2758537 :   UsePosition* next_use = NextUsePositionRegisterIsBeneficial(start);
     502     2758550 :   if (next_use == nullptr) return End();
     503             :   return next_use->pos();
     504             : }
     505             : 
     506           0 : UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
     507       40601 :     LifetimePosition start) const {
     508      263246 :   UsePosition* pos = first_pos();
     509             :   UsePosition* prev = nullptr;
     510      172224 :   while (pos != nullptr && pos->pos() < start) {
     511      131623 :     if (pos->RegisterIsBeneficial()) prev = pos;
     512             :     pos = pos->next();
     513             :   }
     514           0 :   return prev;
     515             : }
     516             : 
     517             : 
     518    10675978 : UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const {
     519    44509585 :   UsePosition* pos = NextUsePosition(start);
     520    55185623 :   while (pos != nullptr && pos->type() != UsePositionType::kRequiresRegister) {
     521             :     pos = pos->next();
     522             :   }
     523    10676008 :   return pos;
     524             : }
     525             : 
     526             : 
     527     1204652 : UsePosition* LiveRange::NextSlotPosition(LifetimePosition start) const {
     528     4974481 :   for (UsePosition* pos = NextUsePosition(start); pos != nullptr;
     529             :        pos = pos->next()) {
     530     3770608 :     if (pos->type() != UsePositionType::kRequiresSlot) continue;
     531             :     return pos;
     532             :   }
     533             :   return nullptr;
     534             : }
     535             : 
     536             : 
     537     2952299 : bool LiveRange::CanBeSpilled(LifetimePosition pos) const {
     538             :   // We cannot spill a live range that has a use requiring a register
     539             :   // at the current or the immediate next position.
     540     2952299 :   UsePosition* use_pos = NextRegisterPosition(pos);
     541     2952306 :   if (use_pos == nullptr) return true;
     542             :   return use_pos->pos() > pos.NextStart().End();
     543             : }
     544             : 
     545             : 
     546    56975367 : bool LiveRange::IsTopLevel() const { return top_level_ == this; }
     547             : 
     548             : 
     549   147206813 : InstructionOperand LiveRange::GetAssignedOperand() const {
     550    99739679 :   if (HasRegisterAssigned()) {
     551             :     DCHECK(!spilled());
     552             :     return AllocatedOperand(LocationOperand::REGISTER, representation(),
     553    52272545 :                             assigned_register());
     554             :   }
     555             :   DCHECK(spilled());
     556             :   DCHECK(!HasRegisterAssigned());
     557    47467134 :   if (TopLevel()->HasSpillOperand()) {
     558    34158837 :     InstructionOperand* op = TopLevel()->GetSpillOperand();
     559             :     DCHECK(!op->IsUnallocated());
     560    34158837 :     return *op;
     561             :   }
     562    13308297 :   return TopLevel()->GetSpillRangeOperand();
     563             : }
     564             : 
     565             : 
     566           0 : UseInterval* LiveRange::FirstSearchIntervalForPosition(
     567             :     LifetimePosition position) const {
     568   742038395 :   if (current_interval_ == nullptr) return first_interval_;
     569   641700437 :   if (current_interval_->start() > position) {
     570     1926566 :     current_interval_ = nullptr;
     571     1652780 :     return first_interval_;
     572             :   }
     573             :   return current_interval_;
     574             : }
     575             : 
     576             : 
     577           0 : void LiveRange::AdvanceLastProcessedMarker(
     578             :     UseInterval* to_start_of, LifetimePosition but_not_past) const {
     579   900844992 :   if (to_start_of == nullptr) return;
     580   900844004 :   if (to_start_of->start() > but_not_past) return;
     581   486023131 :   LifetimePosition start = current_interval_ == nullptr
     582             :                                ? LifetimePosition::Invalid()
     583   486023131 :                                : current_interval_->start();
     584   486023131 :   if (to_start_of->start() > start) {
     585    85824393 :     current_interval_ = to_start_of;
     586             :   }
     587             : }
     588             : 
     589             : 
     590   125609208 : LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
     591             :   int new_id = TopLevel()->GetNextChildId();
     592             :   LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel());
     593             :   // If we split, we do so because we're about to switch registers or move
     594             :   // to/from a slot, so there's no value in connecting hints.
     595    31402260 :   DetachAt(position, child, zone, DoNotConnectHints);
     596             : 
     597    31402317 :   child->top_level_ = TopLevel();
     598    31402317 :   child->next_ = next_;
     599    31402317 :   next_ = child;
     600    31402317 :   return child;
     601             : }
     602             : 
     603    50582440 : UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
     604             :                                  Zone* zone,
     605    42574325 :                                  HintConnectionOption connect_hints) {
     606             :   DCHECK(Start() < position);
     607             :   DCHECK(End() > position);
     608             :   DCHECK(result->IsEmpty());
     609             :   // Find the last interval that ends before the position. If the
     610             :   // position is contained in one of the intervals in the chain, we
     611             :   // split that interval and use the first part.
     612    29688705 :   UseInterval* current = FirstSearchIntervalForPosition(position);
     613             : 
     614             :   // If the split position coincides with the beginning of a use interval
     615             :   // we need to split use positons in a special way.
     616             :   bool split_at_start = false;
     617             : 
     618    50582440 :   if (current->start() == position) {
     619             :     // When splitting at start we need to locate the previous use interval.
     620         682 :     current = first_interval_;
     621             :   }
     622             : 
     623             :   UseInterval* after = nullptr;
     624    63742711 :   while (current != nullptr) {
     625    63742954 :     if (current->Contains(position)) {
     626    34054249 :       after = current->SplitAt(position, zone);
     627    34055969 :       break;
     628             :     }
     629             :     UseInterval* next = current->next();
     630    29688705 :     if (next->start() >= position) {
     631             :       split_at_start = (next->start() == position);
     632             :       after = next;
     633             :       current->set_next(nullptr);
     634             :       break;
     635             :     }
     636             :     current = next;
     637             :   }
     638             :   DCHECK_NOT_NULL(after);
     639             : 
     640             :   // Partition original use intervals to the two live ranges.
     641             :   UseInterval* before = current;
     642             :   result->last_interval_ =
     643    50584160 :       (last_interval_ == before)
     644             :           ? after            // Only interval in the range after split.
     645    50584160 :           : last_interval_;  // Last interval of the original range.
     646    50584160 :   result->first_interval_ = after;
     647    50584160 :   last_interval_ = before;
     648             : 
     649             :   // Find the last use position before the split and the first use
     650             :   // position after it.
     651    44090663 :   UsePosition* use_after =
     652    59687809 :       splitting_pointer_ == nullptr || splitting_pointer_->pos() > position
     653             :           ? first_pos()
     654    93158485 :           : splitting_pointer_;
     655             :   UsePosition* use_before = nullptr;
     656    50584160 :   if (split_at_start) {
     657             :     // The split position coincides with the beginning of a use interval (the
     658             :     // end of a lifetime hole). Use at this position should be attributed to
     659             :     // the split child because split child owns use interval covering it.
     660     2900283 :     while (use_after != nullptr && use_after->pos() < position) {
     661             :       use_before = use_after;
     662             :       use_after = use_after->next();
     663             :     }
     664             :   } else {
     665    91774540 :     while (use_after != nullptr && use_after->pos() <= position) {
     666             :       use_before = use_after;
     667             :       use_after = use_after->next();
     668             :     }
     669             :   }
     670             : 
     671             :   // Partition original use positions to the two live ranges.
     672    50584160 :   if (use_before != nullptr) {
     673             :     use_before->set_next(nullptr);
     674             :   } else {
     675    29626476 :     first_pos_ = nullptr;
     676             :   }
     677    50584160 :   result->first_pos_ = use_after;
     678             : 
     679             :   // Discard cached iteration state. It might be pointing
     680             :   // to the use that no longer belongs to this live range.
     681    50584160 :   last_processed_use_ = nullptr;
     682    50584160 :   current_interval_ = nullptr;
     683             : 
     684    60748583 :   if (connect_hints == ConnectHints && use_before != nullptr &&
     685    10164423 :       use_after != nullptr) {
     686             :     use_after->SetHint(use_before);
     687             :   }
     688             : #ifdef DEBUG
     689             :   VerifyChildStructure();
     690             :   result->VerifyChildStructure();
     691             : #endif
     692    50584160 :   return use_before;
     693             : }
     694             : 
     695             : 
     696           0 : void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
     697    27871700 :   LiveRange* child = this;
     698    31637240 :   for (; child != nullptr; child = child->next()) {
     699    27871700 :     child->top_level_ = new_top_level;
     700             :   }
     701           0 : }
     702             : 
     703             : 
     704    60575186 : void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
     705    60575186 :                                      const InstructionOperand& spill_op) {
     706   212098497 :   for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
     707             :     DCHECK(Start() <= pos->pos() && pos->pos() <= End());
     708    75904447 :     if (!pos->HasOperand()) continue;
     709    75618864 :     switch (pos->type()) {
     710             :       case UsePositionType::kRequiresSlot:
     711             :         DCHECK(spill_op.IsStackSlot() || spill_op.IsFPStackSlot());
     712             :         InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
     713             :         break;
     714             :       case UsePositionType::kRequiresRegister:
     715             :         DCHECK(op.IsRegister() || op.IsFPRegister());
     716             :       // Fall through.
     717             :       case UsePositionType::kAny:
     718             :         InstructionOperand::ReplaceWith(pos->operand(), &op);
     719             :         break;
     720             :     }
     721             :   }
     722    60575186 : }
     723             : 
     724             : 
     725             : // This implements an ordering on live ranges so that they are ordered by their
     726             : // start positions.  This is needed for the correctness of the register
     727             : // allocation algorithm.  If two live ranges start at the same offset then there
     728             : // is a tie breaker based on where the value is first used.  This part of the
     729             : // ordering is merely a heuristic.
     730    25093574 : bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
     731             :   LifetimePosition start = Start();
     732             :   LifetimePosition other_start = other->Start();
     733   484550530 :   if (start == other_start) {
     734             :     UsePosition* pos = first_pos();
     735    13573999 :     if (pos == nullptr) return false;
     736             :     UsePosition* other_pos = other->first_pos();
     737    11519575 :     if (other_pos == nullptr) return true;
     738             :     return pos->pos() < other_pos->pos();
     739             :   }
     740           0 :   return start < other_start;
     741             : }
     742             : 
     743             : 
     744    26225069 : void LiveRange::SetUseHints(int register_index) {
     745   129834335 :   for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
     746    51947311 :     if (!pos->HasOperand()) continue;
     747    51661955 :     switch (pos->type()) {
     748             :       case UsePositionType::kRequiresSlot:
     749             :         break;
     750             :       case UsePositionType::kRequiresRegister:
     751             :       case UsePositionType::kAny:
     752             :         pos->set_assigned_register(register_index);
     753             :         break;
     754             :     }
     755             :   }
     756    26225069 : }
     757             : 
     758             : 
     759   450129101 : bool LiveRange::CanCover(LifetimePosition position) const {
     760   484939690 :   if (IsEmpty()) return false;
     761   935069784 :   return Start() <= position && position < End();
     762             : }
     763             : 
     764             : 
     765   484300356 : bool LiveRange::Covers(LifetimePosition position) const {
     766   484300356 :   if (!CanCover(position)) return false;
     767             :   UseInterval* start_search = FirstSearchIntervalForPosition(position);
     768   793201368 :   for (UseInterval* interval = start_search; interval != nullptr;
     769             :        interval = interval->next()) {
     770             :     DCHECK(interval->next() == nullptr ||
     771             :            interval->next()->start() >= interval->start());
     772             :     AdvanceLastProcessedMarker(interval, position);
     773   793187156 :     if (interval->Contains(position)) return true;
     774   670863433 :     if (interval->start() > position) return false;
     775             :   }
     776             :   return false;
     777             : }
     778             : 
     779             : 
     780  1356756424 : LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
     781    74239303 :   UseInterval* b = other->first_interval();
     782   261968314 :   if (b == nullptr) return LifetimePosition::Invalid();
     783             :   LifetimePosition advance_last_processed_up_to = b->start();
     784   275981841 :   UseInterval* a = FirstSearchIntervalForPosition(b->start());
     785   705833767 :   while (a != nullptr && b != nullptr) {
     786   418760914 :     if (a->start() > other->End()) break;
     787   400110314 :     if (b->start() > End()) break;
     788   396998458 :     LifetimePosition cur_intersection = a->Intersect(b);
     789   397009847 :     if (cur_intersection.IsValid()) {
     790    46788703 :       return cur_intersection;
     791             :     }
     792   350221144 :     if (a->start() < b->start()) {
     793             :       a = a->next();
     794   551898723 :       if (a == nullptr || a->start() > other->End()) break;
     795             :       AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
     796             :     } else {
     797             :       b = b->next();
     798             :     }
     799             :   }
     800             :   return LifetimePosition::Invalid();
     801             : }
     802             : 
     803           0 : void LiveRange::Print(const RegisterConfiguration* config,
     804             :                       bool with_children) const {
     805           0 :   OFStream os(stdout);
     806             :   PrintableLiveRange wrapper;
     807           0 :   wrapper.register_configuration_ = config;
     808           0 :   for (const LiveRange* i = this; i != nullptr; i = i->next()) {
     809           0 :     wrapper.range_ = i;
     810           0 :     os << wrapper << std::endl;
     811           0 :     if (!with_children) break;
     812           0 :   }
     813           0 : }
     814             : 
     815             : 
     816           0 : void LiveRange::Print(bool with_children) const {
     817           0 :   Print(RegisterConfiguration::Default(), with_children);
     818           0 : }
     819             : 
     820             : 
     821             : struct TopLevelLiveRange::SpillMoveInsertionList : ZoneObject {
     822             :   SpillMoveInsertionList(int gap_index, InstructionOperand* operand,
     823             :                          SpillMoveInsertionList* next)
     824    15243700 :       : gap_index(gap_index), operand(operand), next(next) {}
     825             :   const int gap_index;
     826             :   InstructionOperand* const operand;
     827             :   SpillMoveInsertionList* const next;
     828             : };
     829             : 
     830             : 
     831          71 : TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineRepresentation rep)
     832             :     : LiveRange(0, rep, this),
     833             :       vreg_(vreg),
     834             :       last_child_id_(0),
     835             :       splintered_from_(nullptr),
     836             :       spill_operand_(nullptr),
     837             :       spill_move_insertion_locations_(nullptr),
     838             :       spilled_in_deferred_blocks_(false),
     839             :       spill_start_index_(kMaxInt),
     840             :       last_pos_(nullptr),
     841             :       splinter_(nullptr),
     842    64906768 :       has_preassigned_slot_(false) {
     843             :   bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
     844          71 : }
     845             : 
     846             : 
     847             : #if DEBUG
     848             : int TopLevelLiveRange::debug_virt_reg() const {
     849             :   return IsSplinter() ? splintered_from()->vreg() : vreg();
     850             : }
     851             : #endif
     852             : 
     853             : 
     854           0 : void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index,
     855             :                                             InstructionOperand* operand) {
     856             :   DCHECK(HasNoSpillType());
     857             :   spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList(
     858    30487400 :       gap_index, operand, spill_move_insertion_locations_);
     859           0 : }
     860             : 
     861    12924997 : void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
     862             :                                          const InstructionOperand& op,
     863    15553414 :                                          bool might_be_duplicated) {
     864             :   DCHECK_IMPLIES(op.IsConstant(), GetSpillMoveInsertionLocations() == nullptr);
     865             :   Zone* zone = sequence->zone();
     866             : 
     867    14697704 :   for (SpillMoveInsertionList* to_spill = GetSpillMoveInsertionLocations();
     868             :        to_spill != nullptr; to_spill = to_spill->next) {
     869     1772708 :     Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
     870             :     ParallelMove* move =
     871     1772704 :         instr->GetOrCreateParallelMove(Instruction::START, zone);
     872             :     // Skip insertion if it's possible that the move exists already as a
     873             :     // constraint move from a fixed output register to a slot.
     874     2628417 :     if (might_be_duplicated || has_preassigned_slot()) {
     875             :       bool found = false;
     876     2146390 :       for (MoveOperands* move_op : *move) {
     877      873127 :         if (move_op->IsEliminated()) continue;
     878     2410846 :         if (move_op->source().Equals(*to_spill->operand) &&
     879             :             move_op->destination().Equals(op)) {
     880             :           found = true;
     881      621783 :           if (has_preassigned_slot()) move_op->Eliminate();
     882             :           break;
     883             :         }
     884             :       }
     885      947523 :       if (found) continue;
     886             :     }
     887     1150921 :     if (!has_preassigned_slot()) {
     888     1119285 :       move->AddMove(*to_spill->operand, op);
     889             :     }
     890             :   }
     891    12924996 : }
     892             : 
     893             : 
     894           0 : void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) {
     895             :   DCHECK(HasNoSpillType());
     896             :   DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
     897             :   set_spill_type(SpillType::kSpillOperand);
     898    11152343 :   spill_operand_ = operand;
     899           0 : }
     900             : 
     901             : 
     902           0 : void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
     903             :   DCHECK(!HasSpillOperand());
     904             :   DCHECK(spill_range);
     905     5682610 :   spill_range_ = spill_range;
     906           0 : }
     907             : 
     908             : 
     909    17835263 : AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
     910    17835263 :   SpillRange* spill_range = GetSpillRange();
     911             :   int index = spill_range->assigned_slot();
     912      824409 :   return AllocatedOperand(LocationOperand::STACK_SLOT, representation(), index);
     913             : }
     914             : 
     915             : 
     916    10164432 : void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end,
     917    43082542 :                                  Zone* zone) {
     918             :   DCHECK(start != Start() || end != End());
     919             :   DCHECK(start < end);
     920             : 
     921    29346643 :   TopLevelLiveRange splinter_temp(-1, representation());
     922             :   UsePosition* last_in_splinter = nullptr;
     923             :   // Live ranges defined in deferred blocks stay in deferred blocks, so we
     924             :   // don't need to splinter them. That means that start should always be
     925             :   // after the beginning of the range.
     926             :   DCHECK(start > Start());
     927             : 
     928    10164432 :   if (end >= End()) {
     929             :     DCHECK(start > Start());
     930     1146637 :     DetachAt(start, &splinter_temp, zone, ConnectHints);
     931     1146717 :     next_ = nullptr;
     932             :   } else {
     933             :     DCHECK(start < End() && Start() < end);
     934             : 
     935             :     const int kInvalidId = std::numeric_limits<int>::max();
     936             : 
     937     9017795 :     UsePosition* last = DetachAt(start, &splinter_temp, zone, ConnectHints);
     938             : 
     939             :     LiveRange end_part(kInvalidId, this->representation(), nullptr);
     940             :     // The last chunk exits the deferred region, and we don't want to connect
     941             :     // hints here, because the non-deferred region shouldn't be affected
     942             :     // by allocation decisions on the deferred path.
     943             :     last_in_splinter =
     944     9017779 :         splinter_temp.DetachAt(end, &end_part, zone, DoNotConnectHints);
     945             : 
     946     9017775 :     next_ = end_part.next_;
     947     9017775 :     last_interval_->set_next(end_part.first_interval_);
     948             :     // The next splinter will happen either at or after the current interval.
     949             :     // We can optimize DetachAt by setting current_interval_ accordingly,
     950             :     // which will then be picked up by FirstSearchIntervalForPosition.
     951     9017775 :     current_interval_ = last_interval_;
     952     9017775 :     last_interval_ = end_part.last_interval_;
     953             : 
     954     9017775 :     if (first_pos_ == nullptr) {
     955      581040 :       first_pos_ = end_part.first_pos_;
     956             :     } else {
     957     8436735 :       splitting_pointer_ = last;
     958     8436735 :       if (last != nullptr) last->set_next(end_part.first_pos_);
     959             :     }
     960             :   }
     961             : 
     962    10164492 :   if (splinter()->IsEmpty()) {
     963     3765548 :     splinter()->first_interval_ = splinter_temp.first_interval_;
     964     3765548 :     splinter()->last_interval_ = splinter_temp.last_interval_;
     965             :   } else {
     966     6398944 :     splinter()->last_interval_->set_next(splinter_temp.first_interval_);
     967     6398944 :     splinter()->last_interval_ = splinter_temp.last_interval_;
     968             :   }
     969    10164492 :   if (splinter()->first_pos() == nullptr) {
     970     7996839 :     splinter()->first_pos_ = splinter_temp.first_pos_;
     971             :   } else {
     972     2167653 :     splinter()->last_pos_->set_next(splinter_temp.first_pos_);
     973             :   }
     974    10164492 :   if (last_in_splinter != nullptr) {
     975     1975190 :     splinter()->last_pos_ = last_in_splinter;
     976             :   } else {
     977    10836855 :     if (splinter()->first_pos() != nullptr &&
     978     2647553 :         splinter()->last_pos_ == nullptr) {
     979     1053802 :       splinter()->last_pos_ = splinter()->first_pos();
     980     2424574 :       for (UsePosition* pos = splinter()->first_pos(); pos != nullptr;
     981             :            pos = pos->next()) {
     982     1370772 :         splinter()->last_pos_ = pos;
     983             :       }
     984             :     }
     985             :   }
     986             : #if DEBUG
     987             :   Verify();
     988             :   splinter()->Verify();
     989             : #endif
     990    10164492 : }
     991             : 
     992             : 
     993     3765467 : void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) {
     994     3765467 :   splintered_from_ = splinter_parent;
     995     3765467 :   if (!HasSpillOperand() && splinter_parent->spill_range_ != nullptr) {
     996     1881252 :     SetSpillRange(splinter_parent->spill_range_);
     997             :   }
     998     3765467 : }
     999             : 
    1000             : 
    1001     4327266 : void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) {
    1002             :   DCHECK(merged->TopLevel() == this);
    1003             : 
    1004     4572356 :   if (HasNoSpillType() && merged->HasSpillRange()) {
    1005             :     set_spill_type(merged->spill_type());
    1006             :     DCHECK_LT(0, GetSpillRange()->live_ranges().size());
    1007      561727 :     merged->spill_range_ = nullptr;
    1008             :     merged->bits_ =
    1009     1123454 :         SpillTypeField::update(merged->bits_, SpillType::kNoSpillType);
    1010             :   }
    1011     3765539 : }
    1012             : 
    1013             : 
    1014     6634925 : void TopLevelLiveRange::Merge(TopLevelLiveRange* other, Zone* zone) {
    1015             :   DCHECK(Start() < other->Start());
    1016             :   DCHECK(other->splintered_from() == this);
    1017             : 
    1018    49634638 :   LiveRange* first = this;
    1019    15383540 :   LiveRange* second = other;
    1020             :   DCHECK(first->Start() < second->Start());
    1021    44957323 :   while (first != nullptr && second != nullptr) {
    1022             :     DCHECK(first != second);
    1023             :     // Make sure the ranges are in order each time we iterate.
    1024    37426417 :     if (second->Start() < first->Start()) {
    1025             :       LiveRange* tmp = second;
    1026             :       second = first;
    1027             :       first = tmp;
    1028             :       continue;
    1029             :     }
    1030             : 
    1031    22010211 :     if (first->End() <= second->Start()) {
    1032     9487820 :       if (first->next() == nullptr ||
    1033             :           first->next()->Start() > second->Start()) {
    1034             :         // First is in order before second.
    1035             :         LiveRange* temp = first->next();
    1036     3798510 :         first->next_ = second;
    1037             :         first = temp;
    1038             :       } else {
    1039             :         // First is in order before its successor (or second), so advance first.
    1040             :         first = first->next();
    1041             :       }
    1042             :       continue;
    1043             :     }
    1044             : 
    1045             :     DCHECK(first->Start() < second->Start());
    1046             :     // If first and second intersect, split first.
    1047    15383540 :     if (first->Start() < second->End() && second->Start() < first->End()) {
    1048    15383432 :       LiveRange* temp = first->SplitAt(second->Start(), zone);
    1049    15383606 :       CHECK(temp != first);
    1050             :       temp->set_spilled(first->spilled());
    1051    15383606 :       if (!temp->spilled())
    1052             :         temp->set_assigned_register(first->assigned_register());
    1053             : 
    1054    15383606 :       first->next_ = second;
    1055             :       first = temp;
    1056    15383606 :       continue;
    1057             :     }
    1058             :     DCHECK(first->End() <= second->Start());
    1059             :   }
    1060             : 
    1061    11296613 :   TopLevel()->UpdateParentForAllChildren(TopLevel());
    1062     3765540 :   TopLevel()->UpdateSpillRangePostMerge(other);
    1063    10400625 :   TopLevel()->set_has_slot_use(TopLevel()->has_slot_use() ||
    1064             :                                other->has_slot_use());
    1065             : 
    1066             : #if DEBUG
    1067             :   Verify();
    1068             : #endif
    1069     3765533 : }
    1070             : 
    1071             : 
    1072           0 : void TopLevelLiveRange::VerifyChildrenInOrder() const {
    1073           0 :   LifetimePosition last_end = End();
    1074           0 :   for (const LiveRange* child = this->next(); child != nullptr;
    1075             :        child = child->next()) {
    1076             :     DCHECK(last_end <= child->Start());
    1077             :     last_end = child->End();
    1078             :   }
    1079           0 : }
    1080             : 
    1081             : 
    1082           0 : void TopLevelLiveRange::Verify() const {
    1083             :   VerifyChildrenInOrder();
    1084           0 :   for (const LiveRange* child = this; child != nullptr; child = child->next()) {
    1085             :     VerifyChildStructure();
    1086             :   }
    1087           0 : }
    1088             : 
    1089             : 
    1090    47717861 : void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
    1091    47717861 :   TRACE("Shorten live range %d to [%d\n", vreg(), start.value());
    1092             :   DCHECK_NOT_NULL(first_interval_);
    1093             :   DCHECK(first_interval_->start() <= start);
    1094             :   DCHECK(start < first_interval_->end());
    1095    47717861 :   first_interval_->set_start(start);
    1096    47717861 : }
    1097             : 
    1098             : 
    1099     1269239 : void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
    1100           0 :                                        LifetimePosition end, Zone* zone) {
    1101     1269239 :   TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
    1102             :         end.value());
    1103             :   LifetimePosition new_end = end;
    1104     4816752 :   while (first_interval_ != nullptr && first_interval_->start() <= end) {
    1105     2278274 :     if (first_interval_->end() > end) {
    1106             :       new_end = first_interval_->end();
    1107             :     }
    1108     2278274 :     first_interval_ = first_interval_->next();
    1109             :   }
    1110             : 
    1111     2538479 :   UseInterval* new_interval = new (zone) UseInterval(start, new_end);
    1112     1269240 :   new_interval->set_next(first_interval_);
    1113     1269240 :   first_interval_ = new_interval;
    1114     1269240 :   if (new_interval->next() == nullptr) {
    1115      650893 :     last_interval_ = new_interval;
    1116             :   }
    1117     1269240 : }
    1118             : 
    1119             : 
    1120   284303331 : void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
    1121           0 :                                        LifetimePosition end, Zone* zone) {
    1122   284303331 :   TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(),
    1123             :         end.value());
    1124   284317828 :   if (first_interval_ == nullptr) {
    1125    49344612 :     UseInterval* interval = new (zone) UseInterval(start, end);
    1126    49344996 :     first_interval_ = interval;
    1127    49344996 :     last_interval_ = interval;
    1128             :   } else {
    1129   234973216 :     if (end == first_interval_->start()) {
    1130             :       first_interval_->set_start(start);
    1131   161272911 :     } else if (end < first_interval_->start()) {
    1132   109589741 :       UseInterval* interval = new (zone) UseInterval(start, end);
    1133   109589682 :       interval->set_next(first_interval_);
    1134   109589682 :       first_interval_ = interval;
    1135             :     } else {
    1136             :       // Order of instruction's processing (see ProcessInstructions) guarantees
    1137             :       // that each new use interval either precedes, intersects with or touches
    1138             :       // the last added interval.
    1139             :       DCHECK(start <= first_interval_->end());
    1140             :       first_interval_->set_start(Min(start, first_interval_->start()));
    1141    51683170 :       first_interval_->set_end(Max(end, first_interval_->end()));
    1142             :     }
    1143             :   }
    1144   284318153 : }
    1145             : 
    1146             : 
    1147    77546315 : void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
    1148             :   LifetimePosition pos = use_pos->pos();
    1149    77546315 :   TRACE("Add to live range %d use position %d\n", vreg(), pos.value());
    1150             :   UsePosition* prev_hint = nullptr;
    1151      126769 :   UsePosition* prev = nullptr;
    1152    77673760 :   UsePosition* current = first_pos_;
    1153   155220751 :   while (current != nullptr && current->pos() < pos) {
    1154      126769 :     prev_hint = current->HasHint() ? current : prev_hint;
    1155             :     prev = current;
    1156             :     current = current->next();
    1157             :   }
    1158             : 
    1159    77546991 :   if (prev == nullptr) {
    1160    77420222 :     use_pos->set_next(first_pos_);
    1161    77420222 :     first_pos_ = use_pos;
    1162             :   } else {
    1163             :     use_pos->set_next(prev->next());
    1164             :     prev->set_next(use_pos);
    1165             :   }
    1166             : 
    1167   155094045 :   if (prev_hint == nullptr && use_pos->HasHint()) {
    1168    20881048 :     current_hint_position_ = use_pos;
    1169             :   }
    1170    77547085 : }
    1171             : 
    1172             : 
    1173    28831812 : static bool AreUseIntervalsIntersecting(UseInterval* interval1,
    1174    11181787 :                                         UseInterval* interval2) {
    1175    59398068 :   while (interval1 != nullptr && interval2 != nullptr) {
    1176    39837816 :     if (interval1->start() < interval2->start()) {
    1177    15994437 :       if (interval1->end() > interval2->start()) {
    1178             :         return true;
    1179             :       }
    1180             :       interval1 = interval1->next();
    1181             :     } else {
    1182    23843379 :       if (interval2->end() > interval1->start()) {
    1183             :         return true;
    1184             :       }
    1185             :       interval2 = interval2->next();
    1186             :     }
    1187             :   }
    1188             :   return false;
    1189             : }
    1190             : 
    1191             : 
    1192           0 : std::ostream& operator<<(std::ostream& os,
    1193             :                          const PrintableLiveRange& printable_range) {
    1194           0 :   const LiveRange* range = printable_range.range_;
    1195           0 :   os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id()
    1196           0 :      << " ";
    1197           0 :   if (range->TopLevel()->is_phi()) os << "phi ";
    1198           0 :   if (range->TopLevel()->is_non_loop_phi()) os << "nlphi ";
    1199             : 
    1200           0 :   os << "{" << std::endl;
    1201           0 :   UseInterval* interval = range->first_interval();
    1202           0 :   UsePosition* use_pos = range->first_pos();
    1203             :   PrintableInstructionOperand pio;
    1204           0 :   pio.register_configuration_ = printable_range.register_configuration_;
    1205           0 :   while (use_pos != nullptr) {
    1206           0 :     if (use_pos->HasOperand()) {
    1207           0 :       pio.op_ = *use_pos->operand();
    1208           0 :       os << pio << use_pos->pos() << " ";
    1209             :     }
    1210             :     use_pos = use_pos->next();
    1211             :   }
    1212             :   os << std::endl;
    1213             : 
    1214           0 :   while (interval != nullptr) {
    1215           0 :     os << '[' << interval->start() << ", " << interval->end() << ')'
    1216             :        << std::endl;
    1217             :     interval = interval->next();
    1218             :   }
    1219           0 :   os << "}";
    1220           0 :   return os;
    1221             : }
    1222             : 
    1223     2842168 : SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone)
    1224             :     : live_ranges_(zone),
    1225             :       assigned_slot_(kUnassignedSlot),
    1226     5684336 :       byte_width_(GetByteWidth(parent->representation())) {
    1227             :   // Spill ranges are created for top level, non-splintered ranges. This is so
    1228             :   // that, when merging decisions are made, we consider the full extent of the
    1229             :   // virtual register, and avoid clobbering it.
    1230             :   DCHECK(!parent->IsSplinter());
    1231             :   UseInterval* result = nullptr;
    1232             :   UseInterval* node = nullptr;
    1233             :   // Copy the intervals for all ranges.
    1234     6317961 :   for (LiveRange* range = parent; range != nullptr; range = range->next()) {
    1235     5727467 :     UseInterval* src = range->first_interval();
    1236    12679041 :     while (src != nullptr) {
    1237             :       UseInterval* new_node = new (zone) UseInterval(src->start(), src->end());
    1238     5727467 :       if (result == nullptr) {
    1239             :         result = new_node;
    1240             :       } else {
    1241             :         node->set_next(new_node);
    1242             :       }
    1243             :       node = new_node;
    1244             :       src = src->next();
    1245             :     }
    1246             :   }
    1247     2842174 :   use_interval_ = result;
    1248     2842174 :   live_ranges().push_back(parent);
    1249     2842189 :   end_position_ = node->end();
    1250     2842189 :   parent->SetSpillRange(this);
    1251     2842189 : }
    1252             : 
    1253    20148537 : bool SpillRange::IsIntersectingWith(SpillRange* other) const {
    1254    60445622 :   if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
    1255    40279315 :       this->End() <= other->use_interval_->start() ||
    1256             :       other->End() <= this->use_interval_->start()) {
    1257             :     return false;
    1258             :   }
    1259    19364810 :   return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
    1260             : }
    1261             : 
    1262             : 
    1263    61697406 : bool SpillRange::TryMerge(SpillRange* other) {
    1264    41548855 :   if (HasSlot() || other->HasSlot()) return false;
    1265    20148551 :   if (byte_width() != other->byte_width() || IsIntersectingWith(other))
    1266             :     return false;
    1267             : 
    1268             :   LifetimePosition max = LifetimePosition::MaxPosition();
    1269      938240 :   if (End() < other->End() && other->End() != max) {
    1270       21141 :     end_position_ = other->End();
    1271             :   }
    1272      938240 :   other->end_position_ = max;
    1273             : 
    1274      938240 :   MergeDisjointIntervals(other->use_interval_);
    1275      938240 :   other->use_interval_ = nullptr;
    1276             : 
    1277     2835649 :   for (TopLevelLiveRange* range : other->live_ranges()) {
    1278             :     DCHECK(range->GetSpillRange() == other);
    1279             :     range->SetSpillRange(this);
    1280             :   }
    1281             : 
    1282             :   live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
    1283      938240 :                        other->live_ranges().end());
    1284             :   other->live_ranges().clear();
    1285             : 
    1286      938240 :   return true;
    1287             : }
    1288             : 
    1289             : 
    1290      938240 : void SpillRange::MergeDisjointIntervals(UseInterval* other) {
    1291             :   UseInterval* tail = nullptr;
    1292      938240 :   UseInterval* current = use_interval_;
    1293     5671218 :   while (other != nullptr) {
    1294             :     // Make sure the 'current' list starts first
    1295     7589476 :     if (current == nullptr || current->start() > other->start()) {
    1296             :       std::swap(current, other);
    1297             :     }
    1298             :     // Check disjointness
    1299             :     DCHECK(other == nullptr || current->end() <= other->start());
    1300             :     // Append the 'current' node to the result accumulator and move forward
    1301     3794738 :     if (tail == nullptr) {
    1302      938240 :       use_interval_ = current;
    1303             :     } else {
    1304             :       tail->set_next(current);
    1305             :     }
    1306             :     tail = current;
    1307             :     current = current->next();
    1308             :   }
    1309             :   // Other list is empty => we are done
    1310      938240 : }
    1311             : 
    1312             : 
    1313           0 : void SpillRange::Print() const {
    1314           0 :   OFStream os(stdout);
    1315           0 :   os << "{" << std::endl;
    1316           0 :   for (TopLevelLiveRange* range : live_ranges()) {
    1317           0 :     os << range->vreg() << " ";
    1318             :   }
    1319             :   os << std::endl;
    1320             : 
    1321           0 :   for (UseInterval* i = interval(); i != nullptr; i = i->next()) {
    1322           0 :     os << '[' << i->start() << ", " << i->end() << ')' << std::endl;
    1323             :   }
    1324           0 :   os << "}" << std::endl;
    1325           0 : }
    1326             : 
    1327             : 
    1328           0 : RegisterAllocationData::PhiMapValue::PhiMapValue(PhiInstruction* phi,
    1329             :                                                  const InstructionBlock* block,
    1330             :                                                  Zone* zone)
    1331             :     : phi_(phi),
    1332             :       block_(block),
    1333             :       incoming_operands_(zone),
    1334     2853480 :       assigned_register_(kUnassignedRegister) {
    1335     2853480 :   incoming_operands_.reserve(phi->operands().size());
    1336           0 : }
    1337             : 
    1338             : 
    1339           0 : void RegisterAllocationData::PhiMapValue::AddOperand(
    1340             :     InstructionOperand* operand) {
    1341     3541033 :   incoming_operands_.push_back(operand);
    1342           0 : }
    1343             : 
    1344             : 
    1345           0 : void RegisterAllocationData::PhiMapValue::CommitAssignment(
    1346             :     const InstructionOperand& assigned) {
    1347     8508723 :   for (InstructionOperand* operand : incoming_operands_) {
    1348             :     InstructionOperand::ReplaceWith(operand, &assigned);
    1349             :   }
    1350           0 : }
    1351             : 
    1352     1301597 : RegisterAllocationData::RegisterAllocationData(
    1353             :     const RegisterConfiguration* config, Zone* zone, Frame* frame,
    1354    18222170 :     InstructionSequence* code, const char* debug_name)
    1355             :     : allocation_zone_(zone),
    1356             :       frame_(frame),
    1357             :       code_(code),
    1358             :       debug_name_(debug_name),
    1359             :       config_(config),
    1360             :       phi_map_(allocation_zone()),
    1361             :       live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
    1362             :       live_out_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
    1363     1301597 :       live_ranges_(code->VirtualRegisterCount() * 2, nullptr,
    1364             :                    allocation_zone()),
    1365     1301593 :       fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
    1366             :                          allocation_zone()),
    1367             :       fixed_float_live_ranges_(allocation_zone()),
    1368     1301580 :       fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
    1369             :                                 allocation_zone()),
    1370             :       fixed_simd128_live_ranges_(allocation_zone()),
    1371             :       spill_ranges_(code->VirtualRegisterCount(), nullptr, allocation_zone()),
    1372             :       delayed_references_(allocation_zone()),
    1373             :       assigned_registers_(nullptr),
    1374             :       assigned_double_registers_(nullptr),
    1375             :       virtual_register_count_(code->VirtualRegisterCount()),
    1376    11714298 :       preassigned_slot_ranges_(zone) {
    1377             :   if (!kSimpleFPAliasing) {
    1378             :     fixed_float_live_ranges_.resize(this->config()->num_float_registers(),
    1379             :                                     nullptr);
    1380             :     fixed_simd128_live_ranges_.resize(this->config()->num_simd128_registers(),
    1381             :                                       nullptr);
    1382             :   }
    1383             : 
    1384             :   assigned_registers_ = new (code_zone())
    1385     2603153 :       BitVector(this->config()->num_general_registers(), code_zone());
    1386             :   assigned_double_registers_ = new (code_zone())
    1387     2603149 :       BitVector(this->config()->num_double_registers(), code_zone());
    1388     1301577 :   this->frame()->SetAllocatedRegisters(assigned_registers_);
    1389     1301577 :   this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
    1390     1301577 : }
    1391             : 
    1392             : 
    1393    31694518 : MoveOperands* RegisterAllocationData::AddGapMove(
    1394             :     int index, Instruction::GapPosition position,
    1395    31694518 :     const InstructionOperand& from, const InstructionOperand& to) {
    1396             :   Instruction* instr = code()->InstructionAt(index);
    1397    31694560 :   ParallelMove* moves = instr->GetOrCreateParallelMove(position, code_zone());
    1398    31694390 :   return moves->AddMove(from, to);
    1399             : }
    1400             : 
    1401             : 
    1402           0 : MachineRepresentation RegisterAllocationData::RepresentationFor(
    1403    50748173 :     int virtual_register) {
    1404             :   DCHECK_LT(virtual_register, code()->VirtualRegisterCount());
    1405    50748173 :   return code()->GetRepresentation(virtual_register);
    1406             : }
    1407             : 
    1408             : 
    1409   221309188 : TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) {
    1410   442618376 :   if (index >= static_cast<int>(live_ranges().size())) {
    1411           0 :     live_ranges().resize(index + 1, nullptr);
    1412             :   }
    1413   442618376 :   TopLevelLiveRange* result = live_ranges()[index];
    1414   221309188 :   if (result == nullptr) {
    1415    27040027 :     result = NewLiveRange(index, RepresentationFor(index));
    1416    54079476 :     live_ranges()[index] = result;
    1417             :   }
    1418   221308894 :   return result;
    1419             : }
    1420             : 
    1421             : 
    1422    54742883 : TopLevelLiveRange* RegisterAllocationData::NewLiveRange(
    1423    54742883 :     int index, MachineRepresentation rep) {
    1424    54742265 :   return new (allocation_zone()) TopLevelLiveRange(index, rep);
    1425             : }
    1426             : 
    1427             : 
    1428     3765506 : int RegisterAllocationData::GetNextLiveRangeId() {
    1429     3765506 :   int vreg = virtual_register_count_++;
    1430     7531012 :   if (vreg >= static_cast<int>(live_ranges().size())) {
    1431           0 :     live_ranges().resize(vreg + 1, nullptr);
    1432             :   }
    1433     3765506 :   return vreg;
    1434             : }
    1435             : 
    1436             : 
    1437     3765494 : TopLevelLiveRange* RegisterAllocationData::NextLiveRange(
    1438             :     MachineRepresentation rep) {
    1439     3765494 :   int vreg = GetNextLiveRangeId();
    1440     3765515 :   TopLevelLiveRange* ret = NewLiveRange(vreg, rep);
    1441     3765459 :   return ret;
    1442             : }
    1443             : 
    1444             : 
    1445     1426736 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap(
    1446     2853473 :     const InstructionBlock* block, PhiInstruction* phi) {
    1447             :   RegisterAllocationData::PhiMapValue* map_value = new (allocation_zone())
    1448             :       RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
    1449             :   auto res =
    1450     2853476 :       phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
    1451             :   DCHECK(res.second);
    1452             :   USE(res);
    1453     1426739 :   return map_value;
    1454             : }
    1455             : 
    1456             : 
    1457           0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
    1458             :     int virtual_register) {
    1459             :   auto it = phi_map_.find(virtual_register);
    1460             :   DCHECK(it != phi_map_.end());
    1461     4598941 :   return it->second;
    1462             : }
    1463             : 
    1464             : 
    1465           0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
    1466     4042169 :     TopLevelLiveRange* top_range) {
    1467           0 :   return GetPhiMapValueFor(top_range->vreg());
    1468             : }
    1469             : 
    1470             : 
    1471          42 : bool RegisterAllocationData::ExistsUseWithoutDefinition() {
    1472             :   bool found = false;
    1473          42 :   BitVector::Iterator iterator(live_in_sets()[0]);
    1474         126 :   while (!iterator.Done()) {
    1475             :     found = true;
    1476           0 :     int operand_index = iterator.Current();
    1477             :     PrintF("Register allocator error: live v%d reached first block.\n",
    1478           0 :            operand_index);
    1479           0 :     LiveRange* range = GetOrCreateLiveRangeFor(operand_index);
    1480           0 :     PrintF("  (first use is at %d)\n", range->first_pos()->pos().value());
    1481           0 :     if (debug_name() == nullptr) {
    1482           0 :       PrintF("\n");
    1483             :     } else {
    1484           0 :       PrintF("  (function: %s)\n", debug_name());
    1485             :     }
    1486           0 :     iterator.Advance();
    1487             :   }
    1488          42 :   return found;
    1489             : }
    1490             : 
    1491             : 
    1492             : // If a range is defined in a deferred block, we can expect all the range
    1493             : // to only cover positions in deferred blocks. Otherwise, a block on the
    1494             : // hot path would be dominated by a deferred block, meaning it is unreachable
    1495             : // without passing through the deferred block, which is contradictory.
    1496             : // In particular, when such a range contributes a result back on the hot
    1497             : // path, it will be as one of the inputs of a phi. In that case, the value
    1498             : // will be transferred via a move in the Gap::END's of the last instruction
    1499             : // of a deferred block.
    1500         250 : bool RegisterAllocationData::RangesDefinedInDeferredStayInDeferred() {
    1501         546 :   for (const TopLevelLiveRange* range : live_ranges()) {
    1502         901 :     if (range == nullptr || range->IsEmpty() ||
    1503             :         !code()
    1504             :              ->GetInstructionBlock(range->Start().ToInstructionIndex())
    1505         208 :              ->IsDeferred()) {
    1506             :       continue;
    1507             :     }
    1508           0 :     for (const UseInterval* i = range->first_interval(); i != nullptr;
    1509             :          i = i->next()) {
    1510             :       int first = i->FirstGapIndex();
    1511             :       int last = i->LastGapIndex();
    1512           0 :       for (int instr = first; instr <= last;) {
    1513           0 :         const InstructionBlock* block = code()->GetInstructionBlock(instr);
    1514           0 :         if (!block->IsDeferred()) return false;
    1515             :         instr = block->last_instruction_index() + 1;
    1516             :       }
    1517             :     }
    1518             :   }
    1519             :   return true;
    1520             : }
    1521             : 
    1522     2899113 : SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
    1523     9538346 :     TopLevelLiveRange* range) {
    1524             :   DCHECK(!range->HasSpillOperand());
    1525             : 
    1526             :   SpillRange* spill_range = range->GetAllocatedSpillRange();
    1527     2899113 :   if (spill_range == nullptr) {
    1528             :     DCHECK(!range->IsSplinter());
    1529     1704726 :     spill_range = new (allocation_zone()) SpillRange(range, allocation_zone());
    1530             :   }
    1531             :   range->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange);
    1532             : 
    1533             :   int spill_range_index =
    1534     2899129 :       range->IsSplinter() ? range->splintered_from()->vreg() : range->vreg();
    1535             : 
    1536     5798258 :   spill_ranges()[spill_range_index] = spill_range;
    1537             : 
    1538     2899129 :   return spill_range;
    1539             : }
    1540             : 
    1541             : 
    1542     1137453 : SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
    1543     1137453 :     TopLevelLiveRange* range) {
    1544             :   DCHECK(!range->HasSpillOperand());
    1545             :   DCHECK(!range->IsSplinter());
    1546             :   SpillRange* spill_range =
    1547     1137468 :       new (allocation_zone()) SpillRange(range, allocation_zone());
    1548     1137472 :   return spill_range;
    1549             : }
    1550             : 
    1551    50161801 : void RegisterAllocationData::MarkAllocated(MachineRepresentation rep,
    1552             :                                            int index) {
    1553    50161801 :   switch (rep) {
    1554             :     case MachineRepresentation::kFloat32:
    1555             :     case MachineRepresentation::kSimd128:
    1556             :       if (kSimpleFPAliasing) {
    1557      544710 :         assigned_double_registers_->Add(index);
    1558             :       } else {
    1559             :         int alias_base_index = -1;
    1560             :         int aliases = config()->GetAliases(
    1561             :             rep, index, MachineRepresentation::kFloat64, &alias_base_index);
    1562             :         DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    1563             :         while (aliases--) {
    1564             :           int aliased_reg = alias_base_index + aliases;
    1565             :           assigned_double_registers_->Add(aliased_reg);
    1566             :         }
    1567             :       }
    1568             :       break;
    1569             :     case MachineRepresentation::kFloat64:
    1570    14710488 :       assigned_double_registers_->Add(index);
    1571             :       break;
    1572             :     default:
    1573             :       DCHECK(!IsFloatingPoint(rep));
    1574    34906603 :       assigned_registers_->Add(index);
    1575             :       break;
    1576             :   }
    1577    50161801 : }
    1578             : 
    1579    27621960 : bool RegisterAllocationData::IsBlockBoundary(LifetimePosition pos) const {
    1580    27622015 :   return pos.IsFullStart() &&
    1581    11788101 :          code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
    1582    15833914 :              pos.ToInstructionIndex();
    1583             : }
    1584             : 
    1585             : 
    1586     2603122 : ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data)
    1587     2603122 :     : data_(data) {}
    1588             : 
    1589             : 
    1590    23723300 : InstructionOperand* ConstraintBuilder::AllocateFixed(
    1591             :     UnallocatedOperand* operand, int pos, bool is_tagged) {
    1592    23723300 :   TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
    1593             :   DCHECK(operand->HasFixedPolicy());
    1594             :   InstructionOperand allocated;
    1595             :   MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
    1596             :   int virtual_register = operand->virtual_register();
    1597    23723536 :   if (virtual_register != InstructionOperand::kInvalidVirtualRegister) {
    1598             :     rep = data()->RepresentationFor(virtual_register);
    1599             :   }
    1600    23723534 :   if (operand->HasFixedSlotPolicy()) {
    1601             :     allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, rep,
    1602             :                                  operand->fixed_slot_index());
    1603    22529026 :   } else if (operand->HasFixedRegisterPolicy()) {
    1604             :     DCHECK(!IsFloatingPoint(rep));
    1605             :     DCHECK(data()->config()->IsAllocatableGeneralCode(
    1606             :         operand->fixed_register_index()));
    1607             :     allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
    1608             :                                  operand->fixed_register_index());
    1609      944664 :   } else if (operand->HasFixedFPRegisterPolicy()) {
    1610             :     DCHECK(IsFloatingPoint(rep));
    1611             :     DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
    1612             :     allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
    1613             :                                  operand->fixed_register_index());
    1614             :   } else {
    1615           0 :     UNREACHABLE();
    1616             :   }
    1617             :   InstructionOperand::ReplaceWith(operand, &allocated);
    1618    23723534 :   if (is_tagged) {
    1619    15552631 :     TRACE("Fixed reg is tagged at %d\n", pos);
    1620    15552639 :     Instruction* instr = code()->InstructionAt(pos);
    1621    15552639 :     if (instr->HasReferenceMap()) {
    1622    12890499 :       instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand));
    1623             :     }
    1624             :   }
    1625    23723547 :   return operand;
    1626             : }
    1627             : 
    1628             : 
    1629     1301543 : void ConstraintBuilder::MeetRegisterConstraints() {
    1630    15647820 :   for (InstructionBlock* block : code()->instruction_blocks()) {
    1631    13044679 :     MeetRegisterConstraints(block);
    1632             :   }
    1633     1301598 : }
    1634             : 
    1635             : 
    1636    13044709 : void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
    1637             :   int start = block->first_instruction_index();
    1638             :   int end = block->last_instruction_index();
    1639             :   DCHECK_NE(-1, start);
    1640    57761074 :   for (int i = start; i <= end; ++i) {
    1641    44716335 :     MeetConstraintsBefore(i);
    1642    44716609 :     if (i != end) MeetConstraintsAfter(i);
    1643             :   }
    1644             :   // Meet register constraints for the instruction in the end.
    1645    13044739 :   MeetRegisterConstraintsForLastInstructionInBlock(block);
    1646    13044712 : }
    1647             : 
    1648             : 
    1649    13044678 : void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
    1650    13044678 :     const InstructionBlock* block) {
    1651             :   int end = block->last_instruction_index();
    1652    13048430 :   Instruction* last_instruction = code()->InstructionAt(end);
    1653    26096860 :   for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
    1654             :     InstructionOperand* output_operand = last_instruction->OutputAt(i);
    1655             :     DCHECK(!output_operand->IsConstant());
    1656        3728 :     UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
    1657             :     int output_vreg = output->virtual_register();
    1658        3728 :     TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
    1659             :     bool assigned = false;
    1660        3728 :     if (output->HasFixedPolicy()) {
    1661         405 :       AllocateFixed(output, -1, false);
    1662             :       // This value is produced on the stack, we never need to spill it.
    1663         405 :       if (output->IsStackSlot()) {
    1664             :         DCHECK(LocationOperand::cast(output)->index() <
    1665             :                data()->frame()->GetSpillSlotCount());
    1666             :         range->SetSpillOperand(LocationOperand::cast(output));
    1667             :         range->SetSpillStartIndex(end);
    1668             :         assigned = true;
    1669             :       }
    1670             : 
    1671         812 :       for (const RpoNumber& succ : block->successors()) {
    1672           2 :         const InstructionBlock* successor = code()->InstructionBlockAt(succ);
    1673             :         DCHECK_EQ(1, successor->PredecessorCount());
    1674             :         int gap_index = successor->first_instruction_index();
    1675             :         // Create an unconstrained operand for the same virtual register
    1676             :         // and insert a gap move from the fixed output to the operand.
    1677             :         UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
    1678           2 :         data()->AddGapMove(gap_index, Instruction::START, *output, output_copy);
    1679             :       }
    1680             :     }
    1681             : 
    1682        3728 :     if (!assigned) {
    1683       14098 :       for (const RpoNumber& succ : block->successors()) {
    1684        6646 :         const InstructionBlock* successor = code()->InstructionBlockAt(succ);
    1685             :         DCHECK_EQ(1, successor->PredecessorCount());
    1686             :         int gap_index = successor->first_instruction_index();
    1687             :         range->RecordSpillLocation(allocation_zone(), gap_index, output);
    1688             :         range->SetSpillStartIndex(gap_index);
    1689             :       }
    1690             :     }
    1691             :   }
    1692    13044702 : }
    1693             : 
    1694             : 
    1695    31672000 : void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
    1696    88966062 :   Instruction* first = code()->InstructionAt(instr_index);
    1697             :   // Handle fixed temporaries.
    1698    64662954 :   for (size_t i = 0; i < first->TempCount(); i++) {
    1699      659476 :     UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i));
    1700      659476 :     if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false);
    1701             :   }
    1702             :   // Handle constant/fixed output operands.
    1703    81597169 :   for (size_t i = 0; i < first->OutputCount(); i++) {
    1704    24962756 :     InstructionOperand* output = first->OutputAt(i);
    1705    24962756 :     if (output->IsConstant()) {
    1706             :       int output_vreg = ConstantOperand::cast(output)->virtual_register();
    1707    10069139 :       TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
    1708    10069153 :       range->SetSpillStartIndex(instr_index + 1);
    1709             :       range->SetSpillOperand(output);
    1710             :       continue;
    1711             :     }
    1712             :     UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
    1713             :     TopLevelLiveRange* range =
    1714    14893617 :         data()->GetOrCreateLiveRangeFor(first_output->virtual_register());
    1715             :     bool assigned = false;
    1716    14893340 :     if (first_output->HasFixedPolicy()) {
    1717             :       int output_vreg = first_output->virtual_register();
    1718             :       UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
    1719             :       bool is_tagged = code()->IsReference(output_vreg);
    1720     7032210 :       if (first_output->HasSecondaryStorage()) {
    1721             :         range->MarkHasPreassignedSlot();
    1722             :         data()->preassigned_slot_ranges().push_back(
    1723     1481822 :             std::make_pair(range, first_output->GetSecondaryStorage()));
    1724             :       }
    1725     7032212 :       AllocateFixed(first_output, instr_index, is_tagged);
    1726             : 
    1727             :       // This value is produced on the stack, we never need to spill it.
    1728     7032337 :       if (first_output->IsStackSlot()) {
    1729             :         DCHECK(LocationOperand::cast(first_output)->index() <
    1730             :                data()->frame()->GetTotalFrameSlotCount());
    1731             :         range->SetSpillOperand(LocationOperand::cast(first_output));
    1732     1083188 :         range->SetSpillStartIndex(instr_index + 1);
    1733             :         assigned = true;
    1734             :       }
    1735             :       data()->AddGapMove(instr_index + 1, Instruction::START, *first_output,
    1736    14064674 :                          output_copy);
    1737             :     }
    1738             :     // Make sure we add a gap move for spilling (if we have not done
    1739             :     // so already).
    1740    14893434 :     if (!assigned) {
    1741             :       range->RecordSpillLocation(allocation_zone(), instr_index + 1,
    1742    13810315 :                                  first_output);
    1743             :       range->SetSpillStartIndex(instr_index + 1);
    1744             :     }
    1745             :   }
    1746    31671829 : }
    1747             : 
    1748             : 
    1749    44716395 : void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
    1750   207561684 :   Instruction* second = code()->InstructionAt(instr_index);
    1751             :   // Handle fixed input operands of second instruction.
    1752   275068796 :   for (size_t i = 0; i < second->InputCount(); i++) {
    1753    92817901 :     InstructionOperand* input = second->InputAt(i);
    1754    92817901 :     if (input->IsImmediate() || input->IsExplicit()) {
    1755             :       continue;  // Ignore immediates and explicitly reserved registers.
    1756             :     }
    1757             :     UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
    1758    52674531 :     if (cur_input->HasFixedPolicy()) {
    1759             :       int input_vreg = cur_input->virtual_register();
    1760             :       UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
    1761             :       bool is_tagged = code()->IsReference(input_vreg);
    1762    16675402 :       AllocateFixed(cur_input, instr_index, is_tagged);
    1763    33350810 :       data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
    1764             :     }
    1765             :   }
    1766             :   // Handle "output same as input" for second instruction.
    1767    94649551 :   for (size_t i = 0; i < second->OutputCount(); i++) {
    1768    24966461 :     InstructionOperand* output = second->OutputAt(i);
    1769    48223411 :     if (!output->IsUnallocated()) continue;
    1770             :     UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
    1771    14897332 :     if (!second_output->HasSameAsInputPolicy()) continue;
    1772             :     DCHECK_EQ(0, i);  // Only valid for first output.
    1773             :     UnallocatedOperand* cur_input =
    1774     1709511 :         UnallocatedOperand::cast(second->InputAt(0));
    1775             :     int output_vreg = second_output->virtual_register();
    1776             :     int input_vreg = cur_input->virtual_register();
    1777             :     UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
    1778             :     *cur_input =
    1779     1709511 :         UnallocatedOperand(*cur_input, second_output->virtual_register());
    1780             :     MoveOperands* gap_move = data()->AddGapMove(instr_index, Instruction::END,
    1781     3419022 :                                                 input_copy, *cur_input);
    1782     2053844 :     if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) {
    1783      344262 :       if (second->HasReferenceMap()) {
    1784             :         RegisterAllocationData::DelayedReference delayed_reference = {
    1785           0 :             second->reference_map(), &gap_move->source()};
    1786           0 :         data()->delayed_references().push_back(delayed_reference);
    1787             :       }
    1788     1365322 :     } else if (!code()->IsReference(input_vreg) &&
    1789             :                code()->IsReference(output_vreg)) {
    1790             :       // The input is assumed to immediately have a tagged representation,
    1791             :       // before the pointer map can be used. I.e. the pointer map at the
    1792             :       // instruction will include the output operand (whose value at the
    1793             :       // beginning of the instruction is equal to the input operand). If
    1794             :       // this is not desired, then the pointer map at this instruction needs
    1795             :       // to be adjusted manually.
    1796             :     }
    1797             :   }
    1798    44716563 : }
    1799             : 
    1800             : 
    1801     1301601 : void ConstraintBuilder::ResolvePhis() {
    1802             :   // Process the blocks in reverse order.
    1803    28692437 :   for (InstructionBlock* block : base::Reversed(code()->instruction_blocks())) {
    1804    13044628 :     ResolvePhis(block);
    1805             :   }
    1806     1301580 : }
    1807             : 
    1808             : 
    1809    14471273 : void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) {
    1810    27515804 :   for (PhiInstruction* phi : block->phis()) {
    1811             :     int phi_vreg = phi->virtual_register();
    1812             :     RegisterAllocationData::PhiMapValue* map_value =
    1813     1426745 :         data()->InitializePhiMap(block, phi);
    1814     1426744 :     InstructionOperand& output = phi->output();
    1815             :     // Map the destination operands, so the commitment phase can find them.
    1816     9935552 :     for (size_t i = 0; i < phi->operands().size(); ++i) {
    1817     3541041 :       InstructionBlock* cur_block =
    1818     7082068 :           code()->InstructionBlockAt(block->predecessors()[i]);
    1819     3541041 :       UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]);
    1820             :       MoveOperands* move = data()->AddGapMove(
    1821     3541041 :           cur_block->last_instruction_index(), Instruction::END, input, output);
    1822     3541033 :       map_value->AddOperand(&move->destination());
    1823             :       DCHECK(!code()
    1824             :                   ->InstructionAt(cur_block->last_instruction_index())
    1825             :                   ->HasReferenceMap());
    1826             :     }
    1827     1426742 :     TopLevelLiveRange* live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
    1828             :     int gap_index = block->first_instruction_index();
    1829             :     live_range->RecordSpillLocation(allocation_zone(), gap_index, &output);
    1830             :     live_range->SetSpillStartIndex(gap_index);
    1831             :     // We use the phi-ness of some nodes in some later heuristics.
    1832             :     live_range->set_is_phi(true);
    1833     1426742 :     live_range->set_is_non_loop_phi(!block->IsLoopHeader());
    1834             :   }
    1835    13044528 : }
    1836             : 
    1837             : 
    1838     1301543 : LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data,
    1839             :                                    Zone* local_zone)
    1840     2603086 :     : data_(data), phi_hints_(local_zone) {}
    1841             : 
    1842             : 
    1843    13044636 : BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block,
    1844    13044654 :                                             RegisterAllocationData* data) {
    1845             :   size_t block_index = block->rpo_number().ToSize();
    1846    26089272 :   BitVector* live_out = data->live_out_sets()[block_index];
    1847    13044636 :   if (live_out == nullptr) {
    1848             :     // Compute live out for the given block, except not including backward
    1849             :     // successor edges.
    1850             :     Zone* zone = data->allocation_zone();
    1851    28753290 :     const InstructionSequence* code = data->code();
    1852             : 
    1853    13044608 :     live_out = new (zone) BitVector(code->VirtualRegisterCount(), zone);
    1854             : 
    1855             :     // Process all successor blocks.
    1856    41966121 :     for (const RpoNumber& succ : block->successors()) {
    1857             :       // Add values live on entry to the successor.
    1858    15877097 :       if (succ <= block->rpo_number()) continue;
    1859    31417280 :       BitVector* live_in = data->live_in_sets()[succ.ToSize()];
    1860    15708640 :       if (live_in != nullptr) live_out->Union(*live_in);
    1861             : 
    1862             :       // All phi input operands corresponding to this successor edge are live
    1863             :       // out from this block.
    1864    15708636 :       const InstructionBlock* successor = code->InstructionBlockAt(succ);
    1865    15708671 :       size_t index = successor->PredecessorIndexOf(block->rpo_number());
    1866             :       DCHECK(index < successor->PredecessorCount());
    1867    34643332 :       for (PhiInstruction* phi : successor->phis()) {
    1868     6452096 :         live_out->Add(phi->operands()[index]);
    1869             :       }
    1870             :     }
    1871    26089026 :     data->live_out_sets()[block_index] = live_out;
    1872             :   }
    1873    13044495 :   return live_out;
    1874             : }
    1875             : 
    1876             : 
    1877    26088948 : void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
    1878    79098121 :                                            BitVector* live_out) {
    1879             :   // Add an interval that includes the entire block to the live range for
    1880             :   // each live_out value.
    1881             :   LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
    1882    13044474 :       block->first_instruction_index());
    1883             :   LifetimePosition end = LifetimePosition::InstructionFromInstructionIndex(
    1884             :                              block->last_instruction_index())
    1885    13044474 :                              .NextStart();
    1886    13044474 :   BitVector::Iterator iterator(live_out);
    1887   197330010 :   while (!iterator.Done()) {
    1888    79098121 :     int operand_index = iterator.Current();
    1889    79098121 :     TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
    1890    79098157 :     range->AddUseInterval(start, end, allocation_zone());
    1891    79098011 :     iterator.Advance();
    1892             :   }
    1893    13044601 : }
    1894             : 
    1895    13073262 : int LiveRangeBuilder::FixedFPLiveRangeID(int index, MachineRepresentation rep) {
    1896    13073262 :   int result = -index - 1;
    1897    13073262 :   switch (rep) {
    1898             :     case MachineRepresentation::kSimd128:
    1899           0 :       result -= config()->num_float_registers();
    1900             :     // Fall through.
    1901             :     case MachineRepresentation::kFloat32:
    1902      163668 :       result -= config()->num_double_registers();
    1903             :     // Fall through.
    1904             :     case MachineRepresentation::kFloat64:
    1905    13073262 :       result -= config()->num_general_registers();
    1906             :       break;
    1907             :     default:
    1908           0 :       UNREACHABLE();
    1909             :       break;
    1910             :   }
    1911    13073262 :   return result;
    1912             : }
    1913             : 
    1914   214864271 : TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
    1915             :   DCHECK(index < config()->num_general_registers());
    1916   289703547 :   TopLevelLiveRange* result = data()->fixed_live_ranges()[index];
    1917    96567849 :   if (result == nullptr) {
    1918             :     MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
    1919    10864516 :     result = data()->NewLiveRange(FixedLiveRangeID(index), rep);
    1920             :     DCHECK(result->IsFixed());
    1921             :     result->set_assigned_register(index);
    1922    10864341 :     data()->MarkAllocated(rep, index);
    1923    21728464 :     data()->fixed_live_ranges()[index] = result;
    1924             :   }
    1925    96567565 :   return result;
    1926             : }
    1927             : 
    1928    70681072 : TopLevelLiveRange* LiveRangeBuilder::FixedFPLiveRangeFor(
    1929    83753805 :     int index, MachineRepresentation rep) {
    1930             :   int num_regs = config()->num_double_registers();
    1931             :   ZoneVector<TopLevelLiveRange*>* live_ranges =
    1932             :       &data()->fixed_double_live_ranges();
    1933             :   if (!kSimpleFPAliasing) {
    1934             :     switch (rep) {
    1935             :       case MachineRepresentation::kFloat32:
    1936             :         num_regs = config()->num_float_registers();
    1937             :         live_ranges = &data()->fixed_float_live_ranges();
    1938             :         break;
    1939             :       case MachineRepresentation::kSimd128:
    1940             :         num_regs = config()->num_simd128_registers();
    1941             :         live_ranges = &data()->fixed_simd128_live_ranges();
    1942             :         break;
    1943             :       default:
    1944             :         break;
    1945             :     }
    1946             :   }
    1947             : 
    1948             :   DCHECK(index < num_regs);
    1949             :   USE(num_regs);
    1950   154435024 :   TopLevelLiveRange* result = (*live_ranges)[index];
    1951    70681072 :   if (result == nullptr) {
    1952    13073257 :     result = data()->NewLiveRange(FixedFPLiveRangeID(index, rep), rep);
    1953             :     DCHECK(result->IsFixed());
    1954             :     result->set_assigned_register(index);
    1955    13072733 :     data()->MarkAllocated(rep, index);
    1956    13072880 :     (*live_ranges)[index] = result;
    1957             :   }
    1958    70680695 :   return result;
    1959             : }
    1960             : 
    1961   217188910 : TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
    1962   131501528 :   if (operand->IsUnallocated()) {
    1963             :     return data()->GetOrCreateLiveRangeFor(
    1964    75618241 :         UnallocatedOperand::cast(operand)->virtual_register());
    1965    55883287 :   } else if (operand->IsConstant()) {
    1966             :     return data()->GetOrCreateLiveRangeFor(
    1967    10069141 :         ConstantOperand::cast(operand)->virtual_register());
    1968    45814146 :   } else if (operand->IsRegister()) {
    1969             :     return FixedLiveRangeFor(
    1970    41536754 :         LocationOperand::cast(operand)->GetRegister().code());
    1971     4277392 :   } else if (operand->IsFPRegister()) {
    1972             :     LocationOperand* op = LocationOperand::cast(operand);
    1973     1888396 :     return FixedFPLiveRangeFor(op->register_code(), op->representation());
    1974             :   } else {
    1975             :     return nullptr;
    1976             :   }
    1977             : }
    1978             : 
    1979             : 
    1980    77546534 : UsePosition* LiveRangeBuilder::NewUsePosition(LifetimePosition pos,
    1981             :                                               InstructionOperand* operand,
    1982             :                                               void* hint,
    1983             :                                               UsePositionHintType hint_type) {
    1984   155092893 :   return new (allocation_zone()) UsePosition(pos, operand, hint, hint_type);
    1985             : }
    1986             : 
    1987             : 
    1988    50840314 : UsePosition* LiveRangeBuilder::Define(LifetimePosition position,
    1989             :                                       InstructionOperand* operand, void* hint,
    1990             :                                       UsePositionHintType hint_type) {
    1991    50840314 :   TopLevelLiveRange* range = LiveRangeFor(operand);
    1992    50840529 :   if (range == nullptr) return nullptr;
    1993             : 
    1994    49646134 :   if (range->IsEmpty() || range->Start() > position) {
    1995             :     // Can happen if there is a definition without use.
    1996     1928198 :     range->AddUseInterval(position, position.NextStart(), allocation_zone());
    1997     1928193 :     range->AddUsePosition(NewUsePosition(position.NextStart()));
    1998             :   } else {
    1999    47717936 :     range->ShortenTo(position);
    2000             :   }
    2001    49646141 :   if (!operand->IsUnallocated()) return nullptr;
    2002             :   UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
    2003             :   UsePosition* use_pos =
    2004    17048030 :       NewUsePosition(position, unalloc_operand, hint, hint_type);
    2005    17047921 :   range->AddUsePosition(use_pos);
    2006    17048067 :   return use_pos;
    2007             : }
    2008             : 
    2009             : 
    2010    80661576 : UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start,
    2011             :                                    LifetimePosition position,
    2012             :                                    InstructionOperand* operand, void* hint,
    2013             :                                    UsePositionHintType hint_type) {
    2014    80661576 :   TopLevelLiveRange* range = LiveRangeFor(operand);
    2015    80661964 :   if (range == nullptr) return nullptr;
    2016             :   UsePosition* use_pos = nullptr;
    2017    79467547 :   if (operand->IsUnallocated()) {
    2018             :     UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
    2019    58571244 :     use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type);
    2020    58570978 :     range->AddUsePosition(use_pos);
    2021             :   }
    2022    79467692 :   range->AddUseInterval(block_start, position, allocation_zone());
    2023    79466790 :   return use_pos;
    2024             : }
    2025             : 
    2026             : 
    2027    51055441 : void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
    2028    26176082 :                                            BitVector* live) {
    2029             :   int block_start = block->first_instruction_index();
    2030             :   LifetimePosition block_start_position =
    2031             :       LifetimePosition::GapFromInstructionIndex(block_start);
    2032             :   bool fixed_float_live_ranges = false;
    2033             :   bool fixed_simd128_live_ranges = false;
    2034             :   if (!kSimpleFPAliasing) {
    2035             :     int mask = data()->code()->representation_mask();
    2036             :     fixed_float_live_ranges = (mask & kFloatRepBit) != 0;
    2037             :     fixed_simd128_live_ranges = (mask & kSimd128RepBit) != 0;
    2038             :   }
    2039             : 
    2040    57761041 :   for (int index = block->last_instruction_index(); index >= block_start;
    2041             :        index--) {
    2042             :     LifetimePosition curr_position =
    2043             :         LifetimePosition::InstructionFromInstructionIndex(index);
    2044   252594171 :     Instruction* instr = code()->InstructionAt(index);
    2045             :     DCHECK_NOT_NULL(instr);
    2046             :     DCHECK(curr_position.IsInstructionPosition());
    2047             :     // Process output, inputs, and temps of this instruction.
    2048   139365514 :     for (size_t i = 0; i < instr->OutputCount(); i++) {
    2049    24966453 :       InstructionOperand* output = instr->OutputAt(i);
    2050    24966453 :       if (output->IsUnallocated()) {
    2051             :         // Unsupported.
    2052             :         DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
    2053             :         int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
    2054     7864593 :         live->Remove(out_vreg);
    2055    17101860 :       } else if (output->IsConstant()) {
    2056             :         int out_vreg = ConstantOperand::cast(output)->virtual_register();
    2057    10069142 :         live->Remove(out_vreg);
    2058             :       }
    2059    25411947 :       if (block->IsHandler() && index == block_start && output->IsAllocated() &&
    2060    25104486 :           output->IsRegister() &&
    2061             :           AllocatedOperand::cast(output)->GetRegister() ==
    2062             :               v8::internal::kReturnRegister0) {
    2063             :         // The register defined here is blocked from gap start - it is the
    2064             :         // exception value.
    2065             :         // TODO(mtrofin): should we explore an explicit opcode for
    2066             :         // the first instruction in the handler?
    2067             :         Define(LifetimePosition::GapFromInstructionIndex(index), output);
    2068             :       } else {
    2069             :         Define(curr_position, output);
    2070             :       }
    2071             :     }
    2072             : 
    2073    44716304 :     if (instr->ClobbersRegisters()) {
    2074   114649580 :       for (int i = 0; i < config()->num_allocatable_general_registers(); ++i) {
    2075             :         // Create a UseInterval at this instruction for all fixed registers,
    2076             :         // (including the instruction outputs). Adding another UseInterval here
    2077             :         // is OK because AddUseInterval will just merge it with the existing
    2078             :         // one at the end of the range.
    2079    55031708 :         int code = config()->GetAllocatableGeneralCode(i);
    2080    55031708 :         TopLevelLiveRange* range = FixedLiveRangeFor(code);
    2081             :         range->AddUseInterval(curr_position, curr_position.End(),
    2082    55031389 :                               allocation_zone());
    2083             :       }
    2084             :     }
    2085             : 
    2086    44716266 :     if (instr->ClobbersDoubleRegisters()) {
    2087   142171742 :       for (int i = 0; i < config()->num_allocatable_double_registers(); ++i) {
    2088             :         // Add a UseInterval for all DoubleRegisters. See comment above for
    2089             :         // general registers.
    2090    68792783 :         int code = config()->GetAllocatableDoubleCode(i);
    2091             :         TopLevelLiveRange* range =
    2092    68792783 :             FixedFPLiveRangeFor(code, MachineRepresentation::kFloat64);
    2093             :         range->AddUseInterval(curr_position, curr_position.End(),
    2094    68792262 :                               allocation_zone());
    2095             :       }
    2096             :       // Clobber fixed float registers on archs with non-simple aliasing.
    2097             :       if (!kSimpleFPAliasing) {
    2098             :         if (fixed_float_live_ranges) {
    2099             :           for (int i = 0; i < config()->num_allocatable_float_registers();
    2100             :                ++i) {
    2101             :             // Add a UseInterval for all FloatRegisters. See comment above for
    2102             :             // general registers.
    2103             :             int code = config()->GetAllocatableFloatCode(i);
    2104             :             TopLevelLiveRange* range =
    2105             :                 FixedFPLiveRangeFor(code, MachineRepresentation::kFloat32);
    2106             :             range->AddUseInterval(curr_position, curr_position.End(),
    2107             :                                   allocation_zone());
    2108             :           }
    2109             :         }
    2110             :         if (fixed_simd128_live_ranges) {
    2111             :           for (int i = 0; i < config()->num_allocatable_simd128_registers();
    2112             :                ++i) {
    2113             :             int code = config()->GetAllocatableSimd128Code(i);
    2114             :             TopLevelLiveRange* range =
    2115             :                 FixedFPLiveRangeFor(code, MachineRepresentation::kSimd128);
    2116             :             range->AddUseInterval(curr_position, curr_position.End(),
    2117             :                                   allocation_zone());
    2118             :           }
    2119             :         }
    2120             :       }
    2121             :     }
    2122             : 
    2123   230349475 :     for (size_t i = 0; i < instr->InputCount(); i++) {
    2124    92816560 :       InstructionOperand* input = instr->InputAt(i);
    2125    92816560 :       if (input->IsImmediate() || input->IsExplicit()) {
    2126             :         continue;  // Ignore immediates and explicitly reserved registers.
    2127             :       }
    2128             :       LifetimePosition use_pos;
    2129    88672819 :       if (input->IsUnallocated() &&
    2130             :           UnallocatedOperand::cast(input)->IsUsedAtStart()) {
    2131             :         use_pos = curr_position;
    2132             :       } else {
    2133             :         use_pos = curr_position.End();
    2134             :       }
    2135             : 
    2136    52674017 :       if (input->IsUnallocated()) {
    2137             :         UnallocatedOperand* unalloc = UnallocatedOperand::cast(input);
    2138             :         int vreg = unalloc->virtual_register();
    2139             :         live->Add(vreg);
    2140    35998815 :         if (unalloc->HasSlotPolicy()) {
    2141    13336371 :           data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(true);
    2142             :         }
    2143             :       }
    2144             :       Use(block_start_position, use_pos, input);
    2145             :     }
    2146             : 
    2147    46040826 :     for (size_t i = 0; i < instr->TempCount(); i++) {
    2148      662266 :       InstructionOperand* temp = instr->TempAt(i);
    2149             :       // Unsupported.
    2150             :       DCHECK_IMPLIES(temp->IsUnallocated(),
    2151             :                      !UnallocatedOperand::cast(temp)->HasSlotPolicy());
    2152      662266 :       if (instr->ClobbersTemps()) {
    2153           0 :         if (temp->IsRegister()) continue;
    2154           0 :         if (temp->IsUnallocated()) {
    2155             :           UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
    2156           0 :           if (temp_unalloc->HasFixedPolicy()) {
    2157             :             continue;
    2158             :           }
    2159             :         }
    2160             :       }
    2161             :       Use(block_start_position, curr_position.End(), temp);
    2162             :       Define(curr_position, temp);
    2163             :     }
    2164             : 
    2165             :     // Process the moves of the instruction's gaps, making their sources live.
    2166             :     const Instruction::GapPosition kPositions[] = {Instruction::END,
    2167    44716294 :                                                    Instruction::START};
    2168             :     curr_position = curr_position.PrevStart();
    2169             :     DCHECK(curr_position.IsGapPosition());
    2170   134148746 :     for (const Instruction::GapPosition& position : kPositions) {
    2171    89432230 :       ParallelMove* move = instr->GetParallelMove(position);
    2172    89432230 :       if (move == nullptr) continue;
    2173    17238452 :       if (position == Instruction::END) {
    2174             :         curr_position = curr_position.End();
    2175             :       } else {
    2176             :         curr_position = curr_position.Start();
    2177             :       }
    2178    63435194 :       for (MoveOperands* cur : *move) {
    2179    28958068 :         InstructionOperand& from = cur->source();
    2180    28958068 :         InstructionOperand& to = cur->destination();
    2181             :         void* hint = &to;
    2182    28958068 :         UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to);
    2183             :         UsePosition* to_use = nullptr;
    2184             :         int phi_vreg = -1;
    2185    28958248 :         if (to.IsUnallocated()) {
    2186             :           int to_vreg = UnallocatedOperand::cast(to).virtual_register();
    2187    12282919 :           TopLevelLiveRange* to_range =
    2188    12282908 :               data()->GetOrCreateLiveRangeFor(to_vreg);
    2189    12282919 :           if (to_range->is_phi()) {
    2190             :             phi_vreg = to_vreg;
    2191     3541027 :             if (to_range->is_non_loop_phi()) {
    2192     2984224 :               hint = to_range->current_hint_position();
    2193             :               hint_type = hint == nullptr ? UsePositionHintType::kNone
    2194     2984224 :                                           : UsePositionHintType::kUsePos;
    2195             :             } else {
    2196             :               hint_type = UsePositionHintType::kPhi;
    2197             :               hint = data()->GetPhiMapValueFor(to_vreg);
    2198             :             }
    2199             :           } else {
    2200     8741892 :             if (live->Contains(to_vreg)) {
    2201             :               to_use = Define(curr_position, &to, &from,
    2202     7109787 :                               UsePosition::HintTypeForOperand(from));
    2203     7109851 :               live->Remove(to_vreg);
    2204             :             } else {
    2205             :               cur->Eliminate();
    2206             :               continue;
    2207             :             }
    2208             :           }
    2209             :         } else {
    2210             :           Define(curr_position, &to);
    2211             :         }
    2212             :         UsePosition* from_use =
    2213    27326136 :             Use(block_start_position, curr_position, &from, hint, hint_type);
    2214             :         // Mark range live.
    2215    27326031 :         if (from.IsUnallocated()) {
    2216             :           live->Add(UnallocatedOperand::cast(from).virtual_register());
    2217             :         }
    2218             :         // Resolve use position hints just created.
    2219    27326031 :         if (to_use != nullptr && from_use != nullptr) {
    2220             :           to_use->ResolveHint(from_use);
    2221             :           from_use->ResolveHint(to_use);
    2222             :         }
    2223             :         DCHECK_IMPLIES(to_use != nullptr, to_use->IsResolved());
    2224             :         DCHECK_IMPLIES(from_use != nullptr, from_use->IsResolved());
    2225             :         // Potentially resolve phi hint.
    2226    27326031 :         if (phi_vreg != -1) ResolvePhiHint(&from, from_use);
    2227             :       }
    2228             :     }
    2229             :   }
    2230    13044554 : }
    2231             : 
    2232    14471239 : void LiveRangeBuilder::ProcessPhis(const InstructionBlock* block,
    2233             :                                    BitVector* live) {
    2234    27515737 :   for (PhiInstruction* phi : block->phis()) {
    2235             :     // The live range interval already ends at the first instruction of the
    2236             :     // block.
    2237             :     int phi_vreg = phi->virtual_register();
    2238     1426747 :     live->Remove(phi_vreg);
    2239             :     // Select a hint from a predecessor block that precedes this block in the
    2240             :     // rpo order. In order of priority:
    2241             :     // - Avoid hints from deferred blocks.
    2242             :     // - Prefer hints from allocated (or explicit) operands.
    2243             :     // - Prefer hints from empty blocks (containing just parallel moves and a
    2244             :     //   jump). In these cases, if we can elide the moves, the jump threader
    2245             :     //   is likely to be able to elide the jump.
    2246             :     // The enforcement of hinting in rpo order is required because hint
    2247             :     // resolution that happens later in the compiler pipeline visits
    2248             :     // instructions in reverse rpo order, relying on the fact that phis are
    2249             :     // encountered before their hints.
    2250             :     InstructionOperand* hint = nullptr;
    2251             :     int hint_preference = 0;
    2252             : 
    2253             :     // The cost of hinting increases with the number of predecessors. At the
    2254             :     // same time, the typical benefit decreases, since this hinting only
    2255             :     // optimises the execution path through one predecessor. A limit of 2 is
    2256             :     // sufficient to hit the common if/else pattern.
    2257             :     int predecessor_limit = 2;
    2258             : 
    2259     4594701 :     for (RpoNumber predecessor : block->predecessors()) {
    2260     7831662 :       const InstructionBlock* predecessor_block =
    2261     2924970 :           code()->InstructionBlockAt(predecessor);
    2262             :       DCHECK_EQ(predecessor_block->rpo_number(), predecessor);
    2263             : 
    2264             :       // Only take hints from earlier rpo numbers.
    2265     2924971 :       if (predecessor >= block->rpo_number()) continue;
    2266             : 
    2267             :       // Look up the predecessor instruction.
    2268             :       const Instruction* predecessor_instr =
    2269     2610556 :           GetLastInstruction(code(), predecessor_block);
    2270             :       InstructionOperand* predecessor_hint = nullptr;
    2271             :       // Phis are assigned in the END position of the last instruction in each
    2272             :       // predecessor block.
    2273     6220083 :       for (MoveOperands* move :
    2274     6220075 :            *predecessor_instr->GetParallelMove(Instruction::END)) {
    2275             :         InstructionOperand& to = move->destination();
    2276     7219048 :         if (to.IsUnallocated() &&
    2277             :             UnallocatedOperand::cast(to).virtual_register() == phi_vreg) {
    2278     2610546 :           predecessor_hint = &move->source();
    2279     2610546 :           break;
    2280             :         }
    2281             :       }
    2282             :       DCHECK_NOT_NULL(predecessor_hint);
    2283             : 
    2284             :       // For each predecessor, generate a score according to the priorities
    2285             :       // described above, and pick the best one. Flags in higher-order bits have
    2286             :       // a higher priority than those in lower-order bits.
    2287             :       int predecessor_hint_preference = 0;
    2288             :       const int kNotDeferredBlockPreference = (1 << 2);
    2289             :       const int kMoveIsAllocatedPreference = (1 << 1);
    2290             :       const int kBlockIsEmptyPreference = (1 << 0);
    2291             : 
    2292             :       // - Avoid hints from deferred blocks.
    2293     2610554 :       if (!predecessor_block->IsDeferred()) {
    2294             :         predecessor_hint_preference |= kNotDeferredBlockPreference;
    2295             :       }
    2296             : 
    2297             :       // - Prefer hints from allocated (or explicit) operands.
    2298             :       //
    2299             :       // Already-allocated or explicit operands are typically assigned using
    2300             :       // the parallel moves on the last instruction. For example:
    2301             :       //
    2302             :       //      gap (v101 = [x0|R|w32]) (v100 = v101)
    2303             :       //      ArchJmp
    2304             :       //    ...
    2305             :       //    phi: v100 = v101 v102
    2306             :       //
    2307             :       // We have already found the END move, so look for a matching START move
    2308             :       // from an allocated (or explicit) operand.
    2309             :       //
    2310             :       // Note that we cannot simply look up data()->live_ranges()[vreg] here
    2311             :       // because the live ranges are still being built when this function is
    2312             :       // called.
    2313             :       // TODO(v8): Find a way to separate hinting from live range analysis in
    2314             :       // BuildLiveRanges so that we can use the O(1) live-range look-up.
    2315             :       auto moves = predecessor_instr->GetParallelMove(Instruction::START);
    2316     2610554 :       if (moves != nullptr) {
    2317      664750 :         for (MoveOperands* move : *moves) {
    2318             :           InstructionOperand& to = move->destination();
    2319      302150 :           if (predecessor_hint->Equals(to)) {
    2320      241700 :             if (move->source().IsAllocated() || move->source().IsExplicit()) {
    2321      241700 :               predecessor_hint_preference |= kMoveIsAllocatedPreference;
    2322             :             }
    2323             :             break;
    2324             :           }
    2325             :         }
    2326             :       }
    2327             : 
    2328             :       // - Prefer hints from empty blocks.
    2329     2610554 :       if (predecessor_block->last_instruction_index() ==
    2330             :           predecessor_block->first_instruction_index()) {
    2331     1045119 :         predecessor_hint_preference |= kBlockIsEmptyPreference;
    2332             :       }
    2333             : 
    2334     5221108 :       if ((hint == nullptr) ||
    2335     2610554 :           (predecessor_hint_preference > hint_preference)) {
    2336             :         // Take the hint from this predecessor.
    2337             :         hint = predecessor_hint;
    2338             :         hint_preference = predecessor_hint_preference;
    2339             :       }
    2340             : 
    2341     2610554 :       if (--predecessor_limit <= 0) break;
    2342             :     }
    2343             :     DCHECK_NOT_NULL(hint);
    2344             : 
    2345             :     LifetimePosition block_start = LifetimePosition::GapFromInstructionIndex(
    2346     1426744 :         block->first_instruction_index());
    2347     1426741 :     UsePosition* use_pos = Define(block_start, &phi->output(), hint,
    2348     2853485 :                                   UsePosition::HintTypeForOperand(*hint));
    2349             :     MapPhiHint(hint, use_pos);
    2350             :   }
    2351    13044495 : }
    2352             : 
    2353             : 
    2354      277300 : void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block,
    2355     1269239 :                                          BitVector* live) {
    2356             :   DCHECK(block->IsLoopHeader());
    2357             :   // Add a live range stretching from the first loop instruction to the last
    2358             :   // for each value live on entry to the header.
    2359      138650 :   BitVector::Iterator iterator(live);
    2360             :   LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
    2361      138650 :       block->first_instruction_index());
    2362             :   LifetimePosition end = LifetimePosition::GapFromInstructionIndex(
    2363      138650 :                              code()->LastLoopInstructionIndex(block))
    2364      138650 :                              .NextFullStart();
    2365     2954430 :   while (!iterator.Done()) {
    2366     1269239 :     int operand_index = iterator.Current();
    2367     1269239 :     TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
    2368     1269240 :     range->EnsureInterval(start, end, allocation_zone());
    2369     1269240 :     iterator.Advance();
    2370             :   }
    2371             :   // Insert all values into the live in sets of all blocks in the loop.
    2372     1960074 :   for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt();
    2373             :        ++i) {
    2374     5464272 :     live_in_sets()[i]->Union(*live);
    2375             :   }
    2376      138650 : }
    2377             : 
    2378             : 
    2379     5070279 : void LiveRangeBuilder::BuildLiveRanges() {
    2380             :   // Process the blocks in reverse order.
    2381    15647763 :   for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
    2382             :        --block_id) {
    2383             :     InstructionBlock* block =
    2384    13044608 :         code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
    2385    13044620 :     BitVector* live = ComputeLiveOut(block, data());
    2386             :     // Initially consider all live_out values live for the entire block. We
    2387             :     // will shorten these intervals if necessary.
    2388    13044539 :     AddInitialIntervals(block, live);
    2389             :     // Process the instructions in reverse order, generating and killing
    2390             :     // live values.
    2391    13044616 :     ProcessInstructions(block, live);
    2392             :     // All phi output operands are killed by this block.
    2393    13044584 :     ProcessPhis(block, live);
    2394             :     // Now live is live_in for this block except not including values live
    2395             :     // out on backward successor edges.
    2396    13044588 :     if (block->IsLoopHeader()) ProcessLoopHeader(block, live);
    2397    39133959 :     live_in_sets()[block_id] = live;
    2398             :   }
    2399             :   // Postprocess the ranges.
    2400    95716840 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    2401    54921444 :     if (range == nullptr) continue;
    2402             :     // Give slots to all ranges with a non fixed slot use.
    2403    29069666 :     if (range->has_slot_use() && range->HasNoSpillType()) {
    2404     1107158 :       data()->AssignSpillRangeToLiveRange(range);
    2405             :     }
    2406             :     // TODO(bmeurer): This is a horrible hack to make sure that for constant
    2407             :     // live ranges, every use requires the constant to be in a register.
    2408             :     // Without this hack, all uses with "any" policy would get the constant
    2409             :     // operand assigned.
    2410    38192196 :     if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
    2411    24851029 :       for (UsePosition* pos = range->first_pos(); pos != nullptr;
    2412             :            pos = pos->next()) {
    2413    14781876 :         if (pos->type() == UsePositionType::kRequiresSlot) continue;
    2414             :         UsePositionType new_type = UsePositionType::kAny;
    2415             :         // Can't mark phis as needing a register.
    2416    14781877 :         if (!pos->pos().IsGapPosition()) {
    2417             :           new_type = UsePositionType::kRequiresRegister;
    2418             :         }
    2419             :         pos->set_type(new_type, true);
    2420             :       }
    2421             :     }
    2422             :   }
    2423     3097077 :   for (auto preassigned : data()->preassigned_slot_ranges()) {
    2424      435544 :     TopLevelLiveRange* range = preassigned.first;
    2425             :     int slot_id = preassigned.second;
    2426             :     SpillRange* spill = range->HasSpillRange()
    2427             :                             ? range->GetSpillRange()
    2428      552342 :                             : data()->AssignSpillRangeToLiveRange(range);
    2429             :     spill->set_assigned_slot(slot_id);
    2430             :   }
    2431             : #ifdef DEBUG
    2432             :   Verify();
    2433             : #endif
    2434     1301567 : }
    2435             : 
    2436             : 
    2437           0 : void LiveRangeBuilder::MapPhiHint(InstructionOperand* operand,
    2438             :                                   UsePosition* use_pos) {
    2439             :   DCHECK(!use_pos->IsResolved());
    2440     2853487 :   auto res = phi_hints_.insert(std::make_pair(operand, use_pos));
    2441             :   DCHECK(res.second);
    2442             :   USE(res);
    2443           0 : }
    2444             : 
    2445             : 
    2446     3541017 : void LiveRangeBuilder::ResolvePhiHint(InstructionOperand* operand,
    2447             :                                       UsePosition* use_pos) {
    2448             :   auto it = phi_hints_.find(operand);
    2449     7082034 :   if (it == phi_hints_.end()) return;
    2450             :   DCHECK(!it->second->IsResolved());
    2451     1426734 :   it->second->ResolveHint(use_pos);
    2452             : }
    2453             : 
    2454             : 
    2455           0 : void LiveRangeBuilder::Verify() const {
    2456           0 :   for (auto& hint : phi_hints_) {
    2457           0 :     CHECK(hint.second->IsResolved());
    2458             :   }
    2459           0 :   for (const TopLevelLiveRange* current : data()->live_ranges()) {
    2460           0 :     if (current != nullptr && !current->IsEmpty()) {
    2461             :       // New LiveRanges should not be split.
    2462           0 :       CHECK_NULL(current->next());
    2463             :       // General integrity check.
    2464           0 :       current->Verify();
    2465           0 :       const UseInterval* first = current->first_interval();
    2466           0 :       if (first->next() == nullptr) continue;
    2467             : 
    2468             :       // Consecutive intervals should not end and start in the same block,
    2469             :       // otherwise the intervals should have been joined, because the
    2470             :       // variable is live throughout that block.
    2471           0 :       CHECK(NextIntervalStartsInDifferentBlocks(first));
    2472             : 
    2473           0 :       for (const UseInterval* i = first->next(); i != nullptr; i = i->next()) {
    2474             :         // Except for the first interval, the other intevals must start at
    2475             :         // a block boundary, otherwise data wouldn't flow to them.
    2476           0 :         CHECK(IntervalStartsAtBlockBoundary(i));
    2477             :         // The last instruction of the predecessors of the block the interval
    2478             :         // starts must be covered by the range.
    2479           0 :         CHECK(IntervalPredecessorsCoveredByRange(i, current));
    2480           0 :         if (i->next() != nullptr) {
    2481             :           // Check the consecutive intervals property, except for the last
    2482             :           // interval, where it doesn't apply.
    2483           0 :           CHECK(NextIntervalStartsInDifferentBlocks(i));
    2484             :         }
    2485             :       }
    2486             :     }
    2487             :   }
    2488           0 : }
    2489             : 
    2490           0 : bool LiveRangeBuilder::IntervalStartsAtBlockBoundary(
    2491           0 :     const UseInterval* interval) const {
    2492             :   LifetimePosition start = interval->start();
    2493           0 :   if (!start.IsFullStart()) return false;
    2494             :   int instruction_index = start.ToInstructionIndex();
    2495           0 :   const InstructionBlock* block =
    2496           0 :       data()->code()->GetInstructionBlock(instruction_index);
    2497           0 :   return block->first_instruction_index() == instruction_index;
    2498             : }
    2499             : 
    2500           0 : bool LiveRangeBuilder::IntervalPredecessorsCoveredByRange(
    2501           0 :     const UseInterval* interval, const TopLevelLiveRange* range) const {
    2502             :   LifetimePosition start = interval->start();
    2503             :   int instruction_index = start.ToInstructionIndex();
    2504             :   const InstructionBlock* block =
    2505           0 :       data()->code()->GetInstructionBlock(instruction_index);
    2506           0 :   for (RpoNumber pred_index : block->predecessors()) {
    2507           0 :     const InstructionBlock* predecessor =
    2508           0 :         data()->code()->InstructionBlockAt(pred_index);
    2509             :     LifetimePosition last_pos = LifetimePosition::GapFromInstructionIndex(
    2510             :         predecessor->last_instruction_index());
    2511             :     last_pos = last_pos.NextStart().End();
    2512           0 :     if (!range->Covers(last_pos)) return false;
    2513             :   }
    2514           0 :   return true;
    2515             : }
    2516             : 
    2517           0 : bool LiveRangeBuilder::NextIntervalStartsInDifferentBlocks(
    2518           0 :     const UseInterval* interval) const {
    2519             :   DCHECK_NOT_NULL(interval->next());
    2520             :   LifetimePosition end = interval->end();
    2521             :   LifetimePosition next_start = interval->next()->start();
    2522             :   // Since end is not covered, but the previous position is, move back a
    2523             :   // position
    2524           0 :   end = end.IsStart() ? end.PrevStart().End() : end.Start();
    2525             :   int last_covered_index = end.ToInstructionIndex();
    2526             :   const InstructionBlock* block =
    2527           0 :       data()->code()->GetInstructionBlock(last_covered_index);
    2528             :   const InstructionBlock* next_block =
    2529           0 :       data()->code()->GetInstructionBlock(next_start.ToInstructionIndex());
    2530           0 :   return block->rpo_number() < next_block->rpo_number();
    2531             : }
    2532             : 
    2533    10412356 : RegisterAllocator::RegisterAllocator(RegisterAllocationData* data,
    2534             :                                      RegisterKind kind)
    2535             :     : data_(data),
    2536             :       mode_(kind),
    2537             :       num_registers_(GetRegisterCount(data->config(), kind)),
    2538             :       num_allocatable_registers_(
    2539             :           GetAllocatableRegisterCount(data->config(), kind)),
    2540             :       allocatable_register_codes_(
    2541             :           GetAllocatableRegisterCodes(data->config(), kind)),
    2542    10412356 :       check_fp_aliasing_(false) {
    2543             :   if (!kSimpleFPAliasing && kind == FP_REGISTERS) {
    2544             :     check_fp_aliasing_ = (data->code()->representation_mask() &
    2545             :                           (kFloatRepBit | kSimd128RepBit)) != 0;
    2546             :   }
    2547     2603089 : }
    2548             : 
    2549           0 : LifetimePosition RegisterAllocator::GetSplitPositionForInstruction(
    2550    10151959 :     const LiveRange* range, int instruction_index) {
    2551             :   LifetimePosition ret = LifetimePosition::Invalid();
    2552             : 
    2553             :   ret = LifetimePosition::GapFromInstructionIndex(instruction_index);
    2554    20305391 :   if (range->Start() >= ret || ret >= range->End()) {
    2555             :     return LifetimePosition::Invalid();
    2556             :   }
    2557           0 :   return ret;
    2558             : }
    2559             : 
    2560   112447416 : void RegisterAllocator::SplitAndSpillRangesDefinedByMemoryOperand() {
    2561     2603310 :   size_t initial_range_count = data()->live_ranges().size();
    2562   112447283 :   for (size_t i = 0; i < initial_range_count; ++i) {
    2563   234223366 :     TopLevelLiveRange* range = data()->live_ranges()[i];
    2564   109844106 :     if (!CanProcessRange(range)) continue;
    2565    58346162 :     if (range->HasNoSpillType() ||
    2566     1909362 :         (range->HasSpillRange() && !range->has_slot_use())) {
    2567             :       continue;
    2568             :     }
    2569           0 :     LifetimePosition start = range->Start();
    2570    14535150 :     TRACE("Live range %d:%d is defined by a spill operand.\n",
    2571             :           range->TopLevel()->vreg(), range->relative_id());
    2572             :     LifetimePosition next_pos = start;
    2573    14535154 :     if (next_pos.IsGapPosition()) {
    2574             :       next_pos = next_pos.NextStart();
    2575             :     }
    2576             : 
    2577             :     // With splinters, we can be more strict and skip over positions
    2578             :     // not strictly needing registers.
    2579             :     UsePosition* pos =
    2580             :         range->IsSplinter()
    2581     2518006 :             ? range->NextRegisterPosition(next_pos)
    2582    17053160 :             : range->NextUsePositionRegisterIsBeneficial(next_pos);
    2583             :     // If the range already has a spill operand and it doesn't need a
    2584             :     // register immediately, split it and spill the first part of the range.
    2585    14535150 :     if (pos == nullptr) {
    2586     4072586 :       Spill(range);
    2587    10462564 :     } else if (pos->pos() > range->Start().NextStart()) {
    2588             :       // Do not spill live range eagerly if use position that can benefit from
    2589             :       // the register is too close to the start of live range.
    2590             :       LifetimePosition split_pos = GetSplitPositionForInstruction(
    2591             :           range, pos->pos().ToInstructionIndex());
    2592             :       // There is no place to split, so we can't split and spill.
    2593    10153432 :       if (!split_pos.IsValid()) continue;
    2594             : 
    2595             :       split_pos =
    2596    10151963 :           FindOptimalSplitPos(range->Start().NextFullStart(), split_pos);
    2597             : 
    2598    10151947 :       SplitRangeAt(range, split_pos);
    2599    10151940 :       Spill(range);
    2600             :     }
    2601             :   }
    2602     2603177 : }
    2603             : 
    2604             : 
    2605    18033971 : LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
    2606             :                                            LifetimePosition pos) {
    2607             :   DCHECK(!range->TopLevel()->IsFixed());
    2608    18033971 :   TRACE("Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(),
    2609             :         range->relative_id(), pos.value());
    2610             : 
    2611    18033978 :   if (pos <= range->Start()) return range;
    2612             : 
    2613             :   // We can't properly connect liveranges if splitting occurred at the end
    2614             :   // a block.
    2615             :   DCHECK(pos.IsStart() || pos.IsGapPosition() ||
    2616             :          (GetInstructionBlock(code(), pos)->last_instruction_index() !=
    2617             :           pos.ToInstructionIndex()));
    2618             : 
    2619    16018937 :   LiveRange* result = range->SplitAt(pos, allocation_zone());
    2620    16018923 :   return result;
    2621             : }
    2622             : 
    2623             : 
    2624     2067128 : LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
    2625             :                                            LifetimePosition start,
    2626             :                                            LifetimePosition end) {
    2627             :   DCHECK(!range->TopLevel()->IsFixed());
    2628     2067128 :   TRACE("Splitting live range %d:%d in position between [%d, %d]\n",
    2629             :         range->TopLevel()->vreg(), range->relative_id(), start.value(),
    2630             :         end.value());
    2631             : 
    2632     2067128 :   LifetimePosition split_pos = FindOptimalSplitPos(start, end);
    2633             :   DCHECK(split_pos >= start);
    2634     2067128 :   return SplitRangeAt(range, split_pos);
    2635             : }
    2636             : 
    2637             : 
    2638    12219079 : LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
    2639             :                                                         LifetimePosition end) {
    2640             :   int start_instr = start.ToInstructionIndex();
    2641             :   int end_instr = end.ToInstructionIndex();
    2642             :   DCHECK(start_instr <= end_instr);
    2643             : 
    2644             :   // We have no choice
    2645    12219079 :   if (start_instr == end_instr) return end;
    2646             : 
    2647             :   const InstructionBlock* start_block = GetInstructionBlock(code(), start);
    2648             :   const InstructionBlock* end_block = GetInstructionBlock(code(), end);
    2649             : 
    2650     7494704 :   if (end_block == start_block) {
    2651             :     // The interval is split in the same basic block. Split at the latest
    2652             :     // possible position.
    2653     5721232 :     return end;
    2654             :   }
    2655             : 
    2656      113893 :   const InstructionBlock* block = end_block;
    2657             :   // Find header of outermost loop.
    2658             :   do {
    2659             :     const InstructionBlock* loop = GetContainingLoop(code(), block);
    2660     1859673 :     if (loop == nullptr ||
    2661             :         loop->rpo_number().ToInt() <= start_block->rpo_number().ToInt()) {
    2662             :       // No more loops or loop starts before the lifetime start.
    2663             :       break;
    2664             :     }
    2665             :     block = loop;
    2666             :   } while (true);
    2667             : 
    2668             :   // We did not find any suitable outer loop. Split at the latest possible
    2669             :   // position unless end_block is a loop header itself.
    2670     3464178 :   if (block == end_block && !end_block->IsLoopHeader()) return end;
    2671             : 
    2672             :   return LifetimePosition::GapFromInstructionIndex(
    2673             :       block->first_instruction_index());
    2674             : }
    2675             : 
    2676             : 
    2677      219673 : LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
    2678             :     LiveRange* range, LifetimePosition pos) {
    2679             :   const InstructionBlock* block = GetInstructionBlock(code(), pos.Start());
    2680       56488 :   const InstructionBlock* loop_header =
    2681      219673 :       block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
    2682             : 
    2683      219673 :   if (loop_header == nullptr) return pos;
    2684             : 
    2685             :   const UsePosition* prev_use =
    2686             :       range->PreviousUsePositionRegisterIsBeneficial(pos);
    2687             : 
    2688       97089 :   while (loop_header != nullptr) {
    2689             :     // We are going to spill live range inside the loop.
    2690             :     // If possible try to move spilling position backwards to loop header.
    2691             :     // This will reduce number of memory moves on the back edge.
    2692             :     LifetimePosition loop_start = LifetimePosition::GapFromInstructionIndex(
    2693             :         loop_header->first_instruction_index());
    2694             : 
    2695       56488 :     if (range->Covers(loop_start)) {
    2696       39711 :       if (prev_use == nullptr || prev_use->pos() < loop_start) {
    2697             :         // No register beneficial use inside the loop before the pos.
    2698             :         pos = loop_start;
    2699             :       }
    2700             :     }
    2701             : 
    2702             :     // Try hoisting out to an outer loop.
    2703             :     loop_header = GetContainingLoop(code(), loop_header);
    2704             :   }
    2705             : 
    2706       40601 :   return pos;
    2707             : }
    2708             : 
    2709             : 
    2710    20684374 : void RegisterAllocator::Spill(LiveRange* range) {
    2711             :   DCHECK(!range->spilled());
    2712           0 :   TopLevelLiveRange* first = range->TopLevel();
    2713    18969295 :   TRACE("Spilling live range %d:%d\n", first->vreg(), range->relative_id());
    2714             : 
    2715    18969400 :   if (first->HasNoSpillType()) {
    2716     1715079 :     data()->AssignSpillRangeToLiveRange(first);
    2717             :   }
    2718             :   range->Spill();
    2719    18969394 : }
    2720             : 
    2721           0 : const char* RegisterAllocator::RegisterName(int register_code) const {
    2722           0 :   if (mode() == GENERAL_REGISTERS) {
    2723           0 :     return data()->config()->GetGeneralRegisterName(register_code);
    2724             :   } else {
    2725           0 :     return data()->config()->GetDoubleRegisterName(register_code);
    2726             :   }
    2727             : }
    2728             : 
    2729             : 
    2730     2603021 : LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
    2731             :                                          RegisterKind kind, Zone* local_zone)
    2732             :     : RegisterAllocator(data, kind),
    2733             :       unhandled_live_ranges_(local_zone),
    2734             :       active_live_ranges_(local_zone),
    2735     2603021 :       inactive_live_ranges_(local_zone) {
    2736             :   unhandled_live_ranges().reserve(
    2737     2603082 :       static_cast<size_t>(code()->VirtualRegisterCount() * 2));
    2738     2603192 :   active_live_ranges().reserve(8);
    2739     2603194 :   inactive_live_ranges().reserve(8);
    2740             :   // TryAllocateFreeReg and AllocateBlockedReg assume this
    2741             :   // when allocating local arrays.
    2742             :   DCHECK_GE(RegisterConfiguration::kMaxFPRegisters,
    2743             :             this->data()->config()->num_general_registers());
    2744     2603194 : }
    2745             : 
    2746             : 
    2747     2603179 : void LinearScanAllocator::AllocateRegisters() {
    2748             :   DCHECK(unhandled_live_ranges().empty());
    2749             :   DCHECK(active_live_ranges().empty());
    2750             :   DCHECK(inactive_live_ranges().empty());
    2751             : 
    2752     7809751 :   SplitAndSpillRangesDefinedByMemoryOperand();
    2753             : 
    2754   115049666 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    2755   109843249 :     if (!CanProcessRange(range)) continue;
    2756    78649465 :     for (LiveRange* to_add = range; to_add != nullptr;
    2757             :          to_add = to_add->next()) {
    2758    39324763 :       if (!to_add->spilled()) {
    2759    25100330 :         AddToUnhandledUnsorted(to_add);
    2760             :       }
    2761             :     }
    2762             :   }
    2763     2603178 :   SortUnhandled();
    2764             :   DCHECK(UnhandledIsSorted());
    2765             : 
    2766     2603286 :   if (mode() == GENERAL_REGISTERS) {
    2767    23426876 :     for (TopLevelLiveRange* current : data()->fixed_live_ranges()) {
    2768    20823564 :       if (current != nullptr) AddToInactive(current);
    2769             :     }
    2770             :   } else {
    2771    23428075 :     for (TopLevelLiveRange* current : data()->fixed_double_live_ranges()) {
    2772    20824909 :       if (current != nullptr) AddToInactive(current);
    2773             :     }
    2774             :     if (!kSimpleFPAliasing && check_fp_aliasing()) {
    2775             :       for (TopLevelLiveRange* current : data()->fixed_float_live_ranges()) {
    2776             :         if (current != nullptr) AddToInactive(current);
    2777             :       }
    2778             :       for (TopLevelLiveRange* current : data()->fixed_simd128_live_ranges()) {
    2779             :         if (current != nullptr) AddToInactive(current);
    2780             :       }
    2781             :     }
    2782             :   }
    2783             : 
    2784    33353218 :   while (!unhandled_live_ranges().empty()) {
    2785             :     DCHECK(UnhandledIsSorted());
    2786    30750044 :     LiveRange* current = unhandled_live_ranges().back();
    2787             :     unhandled_live_ranges().pop_back();
    2788             :     DCHECK(UnhandledIsSorted());
    2789             :     LifetimePosition position = current->Start();
    2790             : #ifdef DEBUG
    2791             :     allocation_finger_ = position;
    2792             : #endif
    2793    30750044 :     TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
    2794             :           current->relative_id(), position.value());
    2795             : 
    2796    30750314 :     if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
    2797             :       continue;
    2798             : 
    2799   260769143 :     for (size_t i = 0; i < active_live_ranges().size(); ++i) {
    2800   115018818 :       LiveRange* cur_active = active_live_ranges()[i];
    2801   115018818 :       if (cur_active->End() <= position) {
    2802    24243253 :         ActiveToHandled(cur_active);
    2803    24243052 :         --i;  // The live range was removed from the list of active live ranges.
    2804    90775565 :       } else if (!cur_active->Covers(position)) {
    2805    25237646 :         ActiveToInactive(cur_active);
    2806    25237651 :         --i;  // The live range was removed from the list of active live ranges.
    2807             :       }
    2808             :     }
    2809             : 
    2810   733582313 :     for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
    2811   351425245 :       LiveRange* cur_inactive = inactive_live_ranges()[i];
    2812   351425245 :       if (cur_inactive->End() <= position) {
    2813     9596264 :         InactiveToHandled(cur_inactive);
    2814     9596043 :         --i;  // Live range was removed from the list of inactive live ranges.
    2815   341828981 :       } else if (cur_inactive->Covers(position)) {
    2816    26349282 :         InactiveToActive(cur_inactive);
    2817    26349277 :         --i;  // Live range was removed from the list of inactive live ranges.
    2818             :       }
    2819             :     }
    2820             : 
    2821             :     DCHECK(!current->HasRegisterAssigned() && !current->spilled());
    2822             : 
    2823    30731714 :     ProcessCurrentRange(current);
    2824             :   }
    2825     2603174 : }
    2826             : 
    2827     1298906 : bool LinearScanAllocator::TrySplitAndSpillSplinter(LiveRange* range) {
    2828             :   DCHECK(range->TopLevel()->IsSplinter());
    2829             :   // If we can spill the whole range, great. Otherwise, split above the
    2830             :   // first use needing a register and spill the top part.
    2831     1298906 :   const UsePosition* next_reg = range->NextRegisterPosition(range->Start());
    2832     1298887 :   if (next_reg == nullptr) {
    2833      949582 :     Spill(range);
    2834      949600 :     return true;
    2835      349305 :   } else if (range->FirstHintPosition() == nullptr) {
    2836             :     // If there was no hint, but we have a use position requiring a
    2837             :     // register, apply the hot path heuristics.
    2838             :     return false;
    2839      183204 :   } else if (next_reg->pos().PrevStart() > range->Start()) {
    2840       89607 :     LiveRange* tail = SplitRangeAt(range, next_reg->pos().PrevStart());
    2841       89607 :     AddToUnhandledSorted(tail);
    2842       89607 :     Spill(range);
    2843       89607 :     return true;
    2844             :   }
    2845             :   return false;
    2846             : }
    2847             : 
    2848    26224890 : void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
    2849             :                                                        int reg) {
    2850    27517737 :   data()->MarkAllocated(range->representation(), reg);
    2851             :   range->set_assigned_register(reg);
    2852    26224944 :   range->SetUseHints(reg);
    2853    40170539 :   if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
    2854             :     data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
    2855             :   }
    2856    26225051 : }
    2857             : 
    2858             : 
    2859    26224960 : void LinearScanAllocator::AddToActive(LiveRange* range) {
    2860    26224960 :   TRACE("Add live range %d:%d to active\n", range->TopLevel()->vreg(),
    2861             :         range->relative_id());
    2862    26224960 :   active_live_ranges().push_back(range);
    2863    26224932 : }
    2864             : 
    2865             : 
    2866    23937900 : void LinearScanAllocator::AddToInactive(LiveRange* range) {
    2867    23937900 :   TRACE("Add live range %d:%d to inactive\n", range->TopLevel()->vreg(),
    2868             :         range->relative_id());
    2869    23937900 :   inactive_live_ranges().push_back(range);
    2870    23937808 : }
    2871             : 
    2872             : 
    2873     5649692 : void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) {
    2874    11299385 :   if (range == nullptr || range->IsEmpty()) return;
    2875             :   DCHECK(!range->HasRegisterAssigned() && !range->spilled());
    2876             :   DCHECK(allocation_finger_ <= range->Start());
    2877   139720745 :   for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0;
    2878             :        --i) {
    2879   267912419 :     LiveRange* cur_range = unhandled_live_ranges().at(i);
    2880   262378998 :     if (!range->ShouldBeAllocatedBefore(cur_range)) continue;
    2881     5533996 :     TRACE("Add live range %d:%d to unhandled at %d\n",
    2882             :           range->TopLevel()->vreg(), range->relative_id(), i + 1);
    2883    11067992 :     auto it = unhandled_live_ranges().begin() + (i + 1);
    2884     5533996 :     unhandled_live_ranges().insert(it, range);
    2885             :     DCHECK(UnhandledIsSorted());
    2886     5533992 :     return;
    2887             :   }
    2888      115701 :   TRACE("Add live range %d:%d to unhandled at start\n",
    2889             :         range->TopLevel()->vreg(), range->relative_id());
    2890      115701 :   unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range);
    2891             :   DCHECK(UnhandledIsSorted());
    2892             : }
    2893             : 
    2894             : 
    2895    25100268 : void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) {
    2896    75300760 :   if (range == nullptr || range->IsEmpty()) return;
    2897             :   DCHECK(!range->HasRegisterAssigned() && !range->spilled());
    2898    25100352 :   TRACE("Add live range %d:%d to unhandled unsorted at end\n",
    2899             :         range->TopLevel()->vreg(), range->relative_id());
    2900    25100352 :   unhandled_live_ranges().push_back(range);
    2901             : }
    2902             : 
    2903             : 
    2904   212926248 : static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) {
    2905             :   DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a));
    2906   203761256 :   if (a->ShouldBeAllocatedBefore(b)) return false;
    2907   146832777 :   if (b->ShouldBeAllocatedBefore(a)) return true;
    2908     9164992 :   return a->TopLevel()->vreg() < b->TopLevel()->vreg();
    2909             : }
    2910             : 
    2911             : 
    2912             : // Sort the unhandled live ranges so that the ranges to be processed first are
    2913             : // at the end of the array list.  This is convenient for the register allocation
    2914             : // algorithm because it is efficient to remove elements from the end.
    2915     2603110 : void LinearScanAllocator::SortUnhandled() {
    2916     2603110 :   TRACE("Sort unhandled\n");
    2917             :   std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(),
    2918     2603110 :             &UnhandledSortHelper);
    2919     2603104 : }
    2920             : 
    2921             : 
    2922           0 : bool LinearScanAllocator::UnhandledIsSorted() {
    2923           0 :   size_t len = unhandled_live_ranges().size();
    2924           0 :   for (size_t i = 1; i < len; i++) {
    2925           0 :     LiveRange* a = unhandled_live_ranges().at(i - 1);
    2926           0 :     LiveRange* b = unhandled_live_ranges().at(i);
    2927           0 :     if (a->Start() < b->Start()) return false;
    2928             :   }
    2929             :   return true;
    2930             : }
    2931             : 
    2932             : 
    2933    24464059 : void LinearScanAllocator::ActiveToHandled(LiveRange* range) {
    2934    24464059 :   RemoveElement(&active_live_ranges(), range);
    2935    24464033 :   TRACE("Moving live range %d:%d from active to handled\n",
    2936             :         range->TopLevel()->vreg(), range->relative_id());
    2937    24464033 : }
    2938             : 
    2939             : 
    2940    25237637 : void LinearScanAllocator::ActiveToInactive(LiveRange* range) {
    2941    25237637 :   RemoveElement(&active_live_ranges(), range);
    2942    25237638 :   inactive_live_ranges().push_back(range);
    2943    25237649 :   TRACE("Moving live range %d:%d from active to inactive\n",
    2944             :         range->TopLevel()->vreg(), range->relative_id());
    2945    25237649 : }
    2946             : 
    2947             : 
    2948     9596052 : void LinearScanAllocator::InactiveToHandled(LiveRange* range) {
    2949     9596052 :   RemoveElement(&inactive_live_ranges(), range);
    2950     9596046 :   TRACE("Moving live range %d:%d from inactive to handled\n",
    2951             :         range->TopLevel()->vreg(), range->relative_id());
    2952     9596046 : }
    2953             : 
    2954             : 
    2955    26349268 : void LinearScanAllocator::InactiveToActive(LiveRange* range) {
    2956    26349268 :   RemoveElement(&inactive_live_ranges(), range);
    2957    26349272 :   active_live_ranges().push_back(range);
    2958    26349273 :   TRACE("Moving live range %d:%d from inactive to active\n",
    2959             :         range->TopLevel()->vreg(), range->relative_id());
    2960    26349273 : }
    2961             : 
    2962           0 : void LinearScanAllocator::GetFPRegisterSet(MachineRepresentation rep,
    2963             :                                            int* num_regs, int* num_codes,
    2964             :                                            const int** codes) const {
    2965             :   DCHECK(!kSimpleFPAliasing);
    2966           0 :   if (rep == MachineRepresentation::kFloat32) {
    2967           0 :     *num_regs = data()->config()->num_float_registers();
    2968           0 :     *num_codes = data()->config()->num_allocatable_float_registers();
    2969           0 :     *codes = data()->config()->allocatable_float_codes();
    2970           0 :   } else if (rep == MachineRepresentation::kSimd128) {
    2971           0 :     *num_regs = data()->config()->num_simd128_registers();
    2972           0 :     *num_codes = data()->config()->num_allocatable_simd128_registers();
    2973           0 :     *codes = data()->config()->allocatable_simd128_codes();
    2974             :   } else {
    2975           0 :     UNREACHABLE();
    2976             :   }
    2977           0 : }
    2978             : 
    2979    30732376 : void LinearScanAllocator::FindFreeRegistersForRange(
    2980             :     LiveRange* range, Vector<LifetimePosition> positions) {
    2981    30732376 :   int num_regs = num_registers();
    2982             :   int num_codes = num_allocatable_registers();
    2983             :   const int* codes = allocatable_register_codes();
    2984             :   MachineRepresentation rep = range->representation();
    2985             :   if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
    2986             :                              rep == MachineRepresentation::kSimd128))
    2987             :     GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
    2988             :   DCHECK_GE(positions.length(), num_regs);
    2989             : 
    2990   522433692 :   for (int i = 0; i < num_regs; ++i) {
    2991   983402632 :     positions[i] = LifetimePosition::MaxPosition();
    2992             :   }
    2993             : 
    2994   153354440 :   for (LiveRange* cur_active : active_live_ranges()) {
    2995             :     int cur_reg = cur_active->assigned_register();
    2996             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    2997   183779380 :       positions[cur_reg] = LifetimePosition::GapFromInstructionIndex(0);
    2998    91889690 :       TRACE("Register %s is free until pos %d (1)\n", RegisterName(cur_reg),
    2999             :             LifetimePosition::GapFromInstructionIndex(0).value());
    3000             :     } else {
    3001             :       int alias_base_index = -1;
    3002             :       int aliases = data()->config()->GetAliases(
    3003             :           cur_active->representation(), cur_reg, rep, &alias_base_index);
    3004             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    3005             :       while (aliases--) {
    3006             :         int aliased_reg = alias_base_index + aliases;
    3007             :         positions[aliased_reg] = LifetimePosition::GapFromInstructionIndex(0);
    3008             :       }
    3009             :     }
    3010             :   }
    3011             : 
    3012   376942572 :   for (LiveRange* cur_inactive : inactive_live_ranges()) {
    3013             :     DCHECK(cur_inactive->End() > range->Start());
    3014             :     int cur_reg = cur_inactive->assigned_register();
    3015             :     // No need to carry out intersections, when this register won't be
    3016             :     // interesting to this range anyway.
    3017             :     // TODO(mtrofin): extend to aliased ranges, too.
    3018   315478548 :     if ((kSimpleFPAliasing || !check_fp_aliasing()) &&
    3019   315478548 :         positions[cur_reg] < range->Start()) {
    3020   269175122 :       continue;
    3021             :     }
    3022             : 
    3023   259237442 :     LifetimePosition next_intersection = cur_inactive->FirstIntersection(range);
    3024   259239152 :     if (!next_intersection.IsValid()) continue;
    3025             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3026    46305136 :       positions[cur_reg] = Min(positions[cur_reg], next_intersection);
    3027    46305136 :       TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
    3028             :             Min(positions[cur_reg], next_intersection).value());
    3029             :     } else {
    3030             :       int alias_base_index = -1;
    3031             :       int aliases = data()->config()->GetAliases(
    3032             :           cur_inactive->representation(), cur_reg, rep, &alias_base_index);
    3033             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    3034             :       while (aliases--) {
    3035             :         int aliased_reg = alias_base_index + aliases;
    3036             :         positions[aliased_reg] = Min(positions[aliased_reg], next_intersection);
    3037             :       }
    3038             :     }
    3039             :   }
    3040    30731650 : }
    3041             : 
    3042             : // High-level register allocation summary:
    3043             : //
    3044             : // For regular, or hot (i.e. not splinter) ranges, we attempt to first
    3045             : // allocate first the preferred (hint) register. If that is not possible,
    3046             : // we find a register that's free, and allocate that. If that's not possible,
    3047             : // we search for a register to steal from a range that was allocated. The
    3048             : // goal is to optimize for throughput by avoiding register-to-memory
    3049             : // moves, which are expensive.
    3050             : //
    3051             : // For splinters, the goal is to minimize the number of moves. First we try
    3052             : // to allocate the preferred register (more discussion follows). Failing that,
    3053             : // we bail out and spill as far as we can, unless the first use is at start,
    3054             : // case in which we apply the same behavior as we do for regular ranges.
    3055             : // If there is no hint, we apply the hot-path behavior.
    3056             : //
    3057             : // For the splinter, the hint register may come from:
    3058             : //
    3059             : // - the hot path (we set it at splintering time with SetHint). In this case, if
    3060             : // we cannot offer the hint register, spilling is better because it's at most
    3061             : // 1 move, while trying to find and offer another register is at least 1 move.
    3062             : //
    3063             : // - a constraint. If we cannot offer that register, it's because  there is some
    3064             : // interference. So offering the hint register up to the interference would
    3065             : // result
    3066             : // in a move at the interference, plus a move to satisfy the constraint. This is
    3067             : // also the number of moves if we spill, with the potential of the range being
    3068             : // already spilled and thus saving a move (the spill).
    3069             : // Note that this can only be an input constraint, if it were an output one,
    3070             : // the range wouldn't be a splinter because it means it'd be defined in a
    3071             : // deferred
    3072             : // block, and we don't mark those as splinters (they live in deferred blocks
    3073             : // only).
    3074             : //
    3075             : // - a phi. The same analysis as in the case of the input constraint applies.
    3076             : //
    3077    49079199 : void LinearScanAllocator::ProcessCurrentRange(LiveRange* current) {
    3078  1014139775 :   LifetimePosition free_until_pos_buff[RegisterConfiguration::kMaxFPRegisters];
    3079             :   Vector<LifetimePosition> free_until_pos(
    3080             :       free_until_pos_buff, RegisterConfiguration::kMaxFPRegisters);
    3081    30731583 :   FindFreeRegistersForRange(current, free_until_pos);
    3082    30731641 :   if (!TryAllocatePreferredReg(current, free_until_pos)) {
    3083    18347616 :     if (current->TopLevel()->IsSplinter()) {
    3084     2338119 :       if (TrySplitAndSpillSplinter(current)) return;
    3085             :     }
    3086    17308401 :     if (!TryAllocateFreeReg(current, free_until_pos)) {
    3087     3687119 :       AllocateBlockedReg(current);
    3088             :     }
    3089             :   }
    3090    29692367 :   if (current->HasRegisterAssigned()) {
    3091    26224953 :     AddToActive(current);
    3092             :   }
    3093             : }
    3094             : 
    3095    34224477 : bool LinearScanAllocator::TryAllocatePreferredReg(
    3096    40901044 :     LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
    3097             :   int hint_register;
    3098    34224477 :   if (current->FirstHintPosition(&hint_register) != nullptr) {
    3099    20450512 :     TRACE(
    3100             :         "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
    3101             :         RegisterName(hint_register), free_until_pos[hint_register].value(),
    3102             :         current->TopLevel()->vreg(), current->relative_id(),
    3103             :         current->End().value());
    3104             : 
    3105             :     // The desired register is free until the end of the current live range.
    3106    40901044 :     if (free_until_pos[hint_register] >= current->End()) {
    3107    12804120 :       TRACE("Assigning preferred reg %s to live range %d:%d\n",
    3108             :             RegisterName(hint_register), current->TopLevel()->vreg(),
    3109             :             current->relative_id());
    3110    12804120 :       SetLiveRangeAssignedRegister(current, hint_register);
    3111    12804084 :       return true;
    3112             :     }
    3113             :   }
    3114             :   return false;
    3115             : }
    3116             : 
    3117    17308417 : bool LinearScanAllocator::TryAllocateFreeReg(
    3118   225943872 :     LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
    3119             :   int num_regs = 0;  // used only for the call to GetFPRegisterSet.
    3120    17308417 :   int num_codes = num_allocatable_registers();
    3121             :   const int* codes = allocatable_register_codes();
    3122             :   MachineRepresentation rep = current->representation();
    3123             :   if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
    3124             :                              rep == MachineRepresentation::kSimd128)) {
    3125             :     GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
    3126             :   }
    3127             : 
    3128             :   DCHECK_GE(free_until_pos.length(), num_codes);
    3129             : 
    3130             :   // Find the register which stays free for the longest time.
    3131    17308417 :   int reg = codes[0];
    3132   212322539 :   for (int i = 1; i < num_codes; ++i) {
    3133   195014122 :     int code = codes[i];
    3134   585042366 :     if (free_until_pos[code] > free_until_pos[reg]) {
    3135             :       reg = code;
    3136             :     }
    3137             :   }
    3138             : 
    3139    34616834 :   LifetimePosition pos = free_until_pos[reg];
    3140             : 
    3141    17308417 :   if (pos <= current->Start()) {
    3142             :     // All registers are blocked.
    3143             :     return false;
    3144             :   }
    3145             : 
    3146    13621333 :   if (pos < current->End()) {
    3147             :     // Register reg is available at the range start but becomes blocked before
    3148             :     // the range end. Split current at position where it becomes blocked.
    3149     3492947 :     LiveRange* tail = SplitRangeAt(current, pos);
    3150     3492947 :     AddToUnhandledSorted(tail);
    3151             : 
    3152             :     // Try to allocate preferred register once more.
    3153     3492948 :     if (TryAllocatePreferredReg(current, free_until_pos)) return true;
    3154             :   }
    3155             : 
    3156             :   // Register reg is available at the range start and is free until the range
    3157             :   // end.
    3158             :   DCHECK(pos >= current->End());
    3159    13201278 :   TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
    3160             :         current->TopLevel()->vreg(), current->relative_id());
    3161    13201278 :   SetLiveRangeAssignedRegister(current, reg);
    3162             : 
    3163    13201265 :   return true;
    3164             : }
    3165             : 
    3166     3906790 : void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
    3167     3687118 :   UsePosition* register_use = current->NextRegisterPosition(current->Start());
    3168     3687138 :   if (register_use == nullptr) {
    3169             :     // There is no use in the current live range that requires a register.
    3170             :     // We can just spill it.
    3171     1458331 :     Spill(current);
    3172     1458329 :     return;
    3173             :   }
    3174             : 
    3175             :   int num_regs = num_registers();
    3176             :   int num_codes = num_allocatable_registers();
    3177             :   const int* codes = allocatable_register_codes();
    3178             :   MachineRepresentation rep = current->representation();
    3179             :   if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
    3180             :                              rep == MachineRepresentation::kSimd128))
    3181             :     GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
    3182             : 
    3183             :   // use_pos keeps track of positions a register/alias is used at.
    3184             :   // block_pos keeps track of positions where a register/alias is blocked
    3185             :   // from.
    3186    73549991 :   LifetimePosition use_pos[RegisterConfiguration::kMaxFPRegisters];
    3187    71321216 :   LifetimePosition block_pos[RegisterConfiguration::kMaxFPRegisters];
    3188    35660446 :   for (int i = 0; i < num_regs; i++) {
    3189    35660446 :     use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
    3190             :   }
    3191             : 
    3192    58735824 :   for (LiveRange* range : active_live_ranges()) {
    3193             :     int cur_reg = range->assigned_register();
    3194             :     bool is_fixed_or_cant_spill =
    3195    30091421 :         range->TopLevel()->IsFixed() || !range->CanBeSpilled(current->Start());
    3196             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3197    27139116 :       if (is_fixed_or_cant_spill) {
    3198             :         block_pos[cur_reg] = use_pos[cur_reg] =
    3199    24380579 :             LifetimePosition::GapFromInstructionIndex(0);
    3200             :       } else {
    3201             :         DCHECK_NE(LifetimePosition::GapFromInstructionIndex(0),
    3202             :                   block_pos[cur_reg]);
    3203             :         use_pos[cur_reg] =
    3204     2758537 :             range->NextLifetimePositionRegisterIsBeneficial(current->Start());
    3205             :       }
    3206             :     } else {
    3207             :       int alias_base_index = -1;
    3208             :       int aliases = data()->config()->GetAliases(
    3209             :           range->representation(), cur_reg, rep, &alias_base_index);
    3210             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    3211             :       while (aliases--) {
    3212             :         int aliased_reg = alias_base_index + aliases;
    3213             :         if (is_fixed_or_cant_spill) {
    3214             :           block_pos[aliased_reg] = use_pos[aliased_reg] =
    3215             :               LifetimePosition::GapFromInstructionIndex(0);
    3216             :         } else {
    3217             :           use_pos[aliased_reg] =
    3218             :               Min(block_pos[aliased_reg],
    3219             :                   range->NextLifetimePositionRegisterIsBeneficial(
    3220             :                       current->Start()));
    3221             :         }
    3222             :       }
    3223             :     }
    3224             :   }
    3225             : 
    3226    12838550 :   for (LiveRange* range : inactive_live_ranges()) {
    3227             :     DCHECK(range->End() > current->Start());
    3228             :     int cur_reg = range->assigned_register();
    3229     4190488 :     bool is_fixed = range->TopLevel()->IsFixed();
    3230             : 
    3231             :     // Don't perform costly intersections if they are guaranteed to not update
    3232             :     // block_pos or use_pos.
    3233             :     // TODO(mtrofin): extend to aliased ranges, too.
    3234             :     if ((kSimpleFPAliasing || !check_fp_aliasing())) {
    3235     4190488 :       if (is_fixed) {
    3236     9084838 :         if (block_pos[cur_reg] < range->Start()) continue;
    3237             :       } else {
    3238     3003058 :         if (use_pos[cur_reg] < range->Start()) continue;
    3239             :       }
    3240             :     }
    3241             : 
    3242     2722378 :     LifetimePosition next_intersection = range->FirstIntersection(current);
    3243     2722378 :     if (!next_intersection.IsValid()) continue;
    3244             : 
    3245             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3246      483568 :       if (is_fixed) {
    3247      472782 :         block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
    3248      472782 :         use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
    3249             :       } else {
    3250       10786 :         use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
    3251             :       }
    3252             :     } else {
    3253             :       int alias_base_index = -1;
    3254             :       int aliases = data()->config()->GetAliases(
    3255             :           range->representation(), cur_reg, rep, &alias_base_index);
    3256             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    3257             :       while (aliases--) {
    3258             :         int aliased_reg = alias_base_index + aliases;
    3259             :         if (is_fixed) {
    3260             :           block_pos[aliased_reg] =
    3261             :               Min(block_pos[aliased_reg], next_intersection);
    3262             :           use_pos[aliased_reg] =
    3263             :               Min(block_pos[aliased_reg], use_pos[aliased_reg]);
    3264             :         } else {
    3265             :           use_pos[aliased_reg] = Min(use_pos[aliased_reg], next_intersection);
    3266             :         }
    3267             :       }
    3268             :     }
    3269             :   }
    3270             : 
    3271     2228787 :   int reg = codes[0];
    3272    27139172 :   for (int i = 1; i < num_codes; ++i) {
    3273    24910385 :     int code = codes[i];
    3274    49820770 :     if (use_pos[code] > use_pos[reg]) {
    3275             :       reg = code;
    3276             :     }
    3277             :   }
    3278             : 
    3279     4457574 :   if (use_pos[reg] < register_use->pos()) {
    3280             :     // If there is a gap position before the next register use, we can
    3281             :     // spill until there. The gap position will then fit the fill move.
    3282     2009115 :     if (LifetimePosition::ExistsGapPositionBetween(current->Start(),
    3283             :                                                    register_use->pos())) {
    3284             :       SpillBetween(current, current->Start(), register_use->pos());
    3285             :       return;
    3286             :     }
    3287             :   }
    3288             : 
    3289             :   // We couldn't spill until the next register use. Split before the register
    3290             :   // is blocked, if applicable.
    3291      439344 :   if (block_pos[reg] < current->End()) {
    3292             :     // Register becomes blocked before the current range end. Split before that
    3293             :     // position.
    3294             :     LiveRange* tail =
    3295       19312 :         SplitBetween(current, current->Start(), block_pos[reg].Start());
    3296       19312 :     AddToUnhandledSorted(tail);
    3297             :   }
    3298             : 
    3299             :   // Register reg is not blocked for the whole range.
    3300             :   DCHECK(block_pos[reg] >= current->End());
    3301      219672 :   TRACE("Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg),
    3302             :         current->TopLevel()->vreg(), current->relative_id());
    3303      219672 :   SetLiveRangeAssignedRegister(current, reg);
    3304             : 
    3305             :   // This register was not free. Thus we need to find and spill
    3306             :   // parts of active and inactive live regions that use the same register
    3307             :   // at the same lifetime positions as current.
    3308      219673 :   SplitAndSpillIntersecting(current);
    3309             : }
    3310             : 
    3311             : 
    3312      219676 : void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
    3313             :   DCHECK(current->HasRegisterAssigned());
    3314             :   int reg = current->assigned_register();
    3315             :   LifetimePosition split_pos = current->Start();
    3316     5761458 :   for (size_t i = 0; i < active_live_ranges().size(); ++i) {
    3317     2661054 :     LiveRange* range = active_live_ranges()[i];
    3318             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3319     5102432 :       if (range->assigned_register() != reg) continue;
    3320             :     } else {
    3321             :       if (!data()->config()->AreAliases(current->representation(), reg,
    3322             :                                         range->representation(),
    3323             :                                         range->assigned_register())) {
    3324             :         continue;
    3325             :       }
    3326             :     }
    3327             : 
    3328      219676 :     UsePosition* next_pos = range->NextRegisterPosition(current->Start());
    3329      219673 :     LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
    3330      219673 :     if (next_pos == nullptr) {
    3331      184473 :       SpillAfter(range, spill_pos);
    3332             :     } else {
    3333             :       // When spilling between spill_pos and next_pos ensure that the range
    3334             :       // remains spilled at least until the start of the current live range.
    3335             :       // This guarantees that we will not introduce new unhandled ranges that
    3336             :       // start before the current range as this violates allocation invariants
    3337             :       // and will lead to an inconsistent state of active and inactive
    3338             :       // live-ranges: ranges are allocated in order of their start positions,
    3339             :       // ranges are retired from active/inactive when the start of the
    3340             :       // current live-range is larger than their end.
    3341             :       DCHECK(LifetimePosition::ExistsGapPositionBetween(current->Start(),
    3342             :                                                         next_pos->pos()));
    3343       35200 :       SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
    3344             :     }
    3345      219673 :     ActiveToHandled(range);
    3346      219675 :     --i;
    3347             :   }
    3348             : 
    3349     5430183 :   for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
    3350     2810504 :     LiveRange* range = inactive_live_ranges()[i];
    3351             :     DCHECK(range->End() > current->Start());
    3352     2605256 :     if (range->TopLevel()->IsFixed()) continue;
    3353             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3354      205248 :       if (range->assigned_register() != reg) continue;
    3355             :     } else {
    3356             :       if (!data()->config()->AreAliases(current->representation(), reg,
    3357             :                                         range->representation(),
    3358             :                                         range->assigned_register()))
    3359             :         continue;
    3360             :     }
    3361             : 
    3362       16564 :     LifetimePosition next_intersection = range->FirstIntersection(current);
    3363       16562 :     if (next_intersection.IsValid()) {
    3364          72 :       UsePosition* next_pos = range->NextRegisterPosition(current->Start());
    3365          72 :       if (next_pos == nullptr) {
    3366          57 :         SpillAfter(range, split_pos);
    3367             :       } else {
    3368             :         next_intersection = Min(next_intersection, next_pos->pos());
    3369             :         SpillBetween(range, split_pos, next_intersection);
    3370             :       }
    3371          72 :       InactiveToHandled(range);
    3372          72 :       --i;
    3373             :     }
    3374             :   }
    3375      219673 : }
    3376             : 
    3377             : 
    3378    14948445 : bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
    3379    14948445 :   if (!range->is_phi()) return false;
    3380             : 
    3381             :   DCHECK(!range->HasSpillOperand());
    3382     1322574 :   RegisterAllocationData::PhiMapValue* phi_map_value =
    3383     4743605 :       data()->GetPhiMapValueFor(range);
    3384             :   const PhiInstruction* phi = phi_map_value->phi();
    3385             :   const InstructionBlock* block = phi_map_value->block();
    3386             :   // Count the number of spilled operands.
    3387             :   size_t spilled_count = 0;
    3388       24509 :   LiveRange* first_op = nullptr;
    3389     9167080 :   for (size_t i = 0; i < phi->operands().size(); i++) {
    3390     3260959 :     int op = phi->operands()[i];
    3391     3968127 :     LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op);
    3392     3260966 :     if (!op_range->TopLevel()->HasSpillRange()) continue;
    3393      261629 :     const InstructionBlock* pred =
    3394      523258 :         code()->InstructionBlockAt(block->predecessors()[i]);
    3395             :     LifetimePosition pred_end =
    3396             :         LifetimePosition::InstructionFromInstructionIndex(
    3397             :             pred->last_instruction_index());
    3398     1542804 :     while (op_range != nullptr && !op_range->CanCover(pred_end)) {
    3399             :       op_range = op_range->next();
    3400             :     }
    3401      520751 :     if (op_range != nullptr && op_range->spilled()) {
    3402      196991 :       spilled_count++;
    3403      196991 :       if (first_op == nullptr) {
    3404             :         first_op = op_range->TopLevel();
    3405             :       }
    3406             :     }
    3407             :   }
    3408             : 
    3409             :   // Only continue if more than half of the operands are spilled.
    3410     1322581 :   if (spilled_count * 2 <= phi->operands().size()) {
    3411             :     return false;
    3412             :   }
    3413             : 
    3414             :   // Try to merge the spilled operands and count the number of merged spilled
    3415             :   // operands.
    3416             :   DCHECK_NOT_NULL(first_op);
    3417       44181 :   SpillRange* first_op_spill = first_op->TopLevel()->GetSpillRange();
    3418             :   size_t num_merged = 1;
    3419      332112 :   for (size_t i = 1; i < phi->operands().size(); i++) {
    3420      141547 :     int op = phi->operands()[i];
    3421      550283 :     TopLevelLiveRange* op_range = data()->live_ranges()[op];
    3422      141547 :     if (!op_range->HasSpillRange()) continue;
    3423             :     SpillRange* op_spill = op_range->GetSpillRange();
    3424      125642 :     if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) {
    3425      104999 :       num_merged++;
    3426             :     }
    3427             :   }
    3428             : 
    3429             :   // Only continue if enough operands could be merged to the
    3430             :   // same spill slot.
    3431       44181 :   if (num_merged * 2 <= phi->operands().size() ||
    3432             :       AreUseIntervalsIntersecting(first_op_spill->interval(),
    3433       57895 :                                   range->first_interval())) {
    3434             :     return false;
    3435             :   }
    3436             : 
    3437             :   // If the range does not need register soon, spill it to the merged
    3438             :   // spill range.
    3439             :   LifetimePosition next_pos = range->Start();
    3440       19415 :   if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
    3441       19415 :   UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
    3442       19415 :   if (pos == nullptr) {
    3443             :     SpillRange* spill_range =
    3444             :         range->TopLevel()->HasSpillRange()
    3445          52 :             ? range->TopLevel()->GetSpillRange()
    3446       30040 :             : data()->AssignSpillRangeToLiveRange(range->TopLevel());
    3447       15046 :     bool merged = first_op_spill->TryMerge(spill_range);
    3448       15046 :     if (!merged) return false;
    3449       15014 :     Spill(range);
    3450       15014 :     return true;
    3451        4369 :   } else if (pos->pos() > range->Start().NextStart()) {
    3452             :     SpillRange* spill_range =
    3453             :         range->TopLevel()->HasSpillRange()
    3454           7 :             ? range->TopLevel()->GetSpillRange()
    3455        7003 :             : data()->AssignSpillRangeToLiveRange(range->TopLevel());
    3456        3505 :     bool merged = first_op_spill->TryMerge(spill_range);
    3457        3505 :     if (!merged) return false;
    3458             :     SpillBetween(range, range->Start(), pos->pos());
    3459             :     DCHECK(UnhandledIsSorted());
    3460        3501 :     return true;
    3461             :   }
    3462             :   return false;
    3463             : }
    3464             : 
    3465             : 
    3466      184530 : void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
    3467      184530 :   LiveRange* second_part = SplitRangeAt(range, pos);
    3468      184530 :   Spill(second_part);
    3469      184530 : }
    3470             : 
    3471             : 
    3472           0 : void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
    3473             :                                        LifetimePosition end) {
    3474     2012631 :   SpillBetweenUntil(range, start, start, end);
    3475           0 : }
    3476             : 
    3477             : 
    3478     2047831 : void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
    3479             :                                             LifetimePosition start,
    3480             :                                             LifetimePosition until,
    3481             :                                             LifetimePosition end) {
    3482     2047831 :   CHECK(start < end);
    3483     4095647 :   LiveRange* second_part = SplitRangeAt(range, start);
    3484             : 
    3485     2047831 :   if (second_part->Start() < end) {
    3486             :     // The split result intersects with [start, end[.
    3487             :     // Split it at position between ]start+1, end[, spill the middle part
    3488             :     // and put the rest to unhandled.
    3489     2047816 :     LifetimePosition third_part_end = end.PrevStart().End();
    3490     2047816 :     if (data()->IsBlockBoundary(end.Start())) {
    3491           0 :       third_part_end = end.Start();
    3492             :     }
    3493             :     LiveRange* third_part = SplitBetween(
    3494     2047816 :         second_part, Max(second_part->Start().End(), until), third_part_end);
    3495             : 
    3496             :     DCHECK(third_part != second_part);
    3497             : 
    3498     2047816 :     Spill(second_part);
    3499     2047816 :     AddToUnhandledSorted(third_part);
    3500             :   } else {
    3501             :     // The split result does not intersect with [start, end[.
    3502             :     // Nothing to spill. Just put it to unhandled as whole.
    3503          15 :     AddToUnhandledSorted(second_part);
    3504             :   }
    3505     2047831 : }
    3506             : 
    3507             : 
    3508     1301593 : SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
    3509     1301593 :     : data_(data) {}
    3510             : 
    3511             : 
    3512     1301620 : void SpillSlotLocator::LocateSpillSlots() {
    3513     1301620 :   const InstructionSequence* code = data()->code();
    3514    61895046 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    3515    81961948 :     if (range == nullptr || range->IsEmpty()) continue;
    3516             :     // We care only about ranges which spill in the frame.
    3517    28004529 :     if (!range->HasSpillRange() || range->IsSpilledOnlyInDeferredBlocks()) {
    3518             :       continue;
    3519             :     }
    3520             :     TopLevelLiveRange::SpillMoveInsertionList* spills =
    3521             :         range->GetSpillMoveInsertionLocations();
    3522             :     DCHECK_NOT_NULL(spills);
    3523     3545364 :     for (; spills != nullptr; spills = spills->next) {
    3524     1772693 :       code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
    3525             :     }
    3526             :   }
    3527     1301598 : }
    3528             : 
    3529             : 
    3530     2603152 : OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
    3531             : 
    3532             : 
    3533     1301537 : void OperandAssigner::AssignSpillSlots() {
    3534             :   ZoneVector<SpillRange*>& spill_ranges = data()->spill_ranges();
    3535             :   // Merge disjoint spill ranges
    3536    57525446 :   for (size_t i = 0; i < spill_ranges.size(); ++i) {
    3537   233492753 :     SpillRange* range = spill_ranges[i];
    3538    27461172 :     if (range == nullptr) continue;
    3539     2597117 :     if (range->IsEmpty()) continue;
    3540   354537716 :     for (size_t j = i + 1; j < spill_ranges.size(); ++j) {
    3541   175609966 :       SpillRange* other = spill_ranges[j];
    3542   200183626 :       if (other != nullptr && !other->IsEmpty()) {
    3543    21035255 :         range->TryMerge(other);
    3544             :       }
    3545             :     }
    3546             :   }
    3547             :   // Allocate slots for the merged spill ranges.
    3548    59047674 :   for (SpillRange* range : spill_ranges) {
    3549    30058279 :     if (range == nullptr || range->IsEmpty()) continue;
    3550             :     // Allocate a new operand referring to the spill slot.
    3551     1658882 :     if (!range->HasSlot()) {
    3552     1164888 :       int index = data()->frame()->AllocateSpillSlot(range->byte_width());
    3553             :       range->set_assigned_slot(index);
    3554             :     }
    3555             :   }
    3556     1301590 : }
    3557             : 
    3558             : 
    3559     1301595 : void OperandAssigner::CommitAssignment() {
    3560   109606887 :   for (TopLevelLiveRange* top_range : data()->live_ranges()) {
    3561   136883774 :     if (top_range == nullptr || top_range->IsEmpty()) continue;
    3562             :     InstructionOperand spill_operand;
    3563    25407299 :     if (top_range->HasSpillOperand()) {
    3564    11152337 :       spill_operand = *top_range->TopLevel()->GetSpillOperand();
    3565    14254962 :     } else if (top_range->TopLevel()->HasSpillRange()) {
    3566     2597109 :       spill_operand = top_range->TopLevel()->GetSpillRangeOperand();
    3567             :     }
    3568    25407299 :     if (top_range->is_phi()) {
    3569             :       data()->GetPhiMapValueFor(top_range)->CommitAssignment(
    3570     2853270 :           top_range->GetAssignedOperand());
    3571             :     }
    3572    85982672 :     for (LiveRange* range = top_range; range != nullptr;
    3573             :          range = range->next()) {
    3574    60575190 :       InstructionOperand assigned = range->GetAssignedOperand();
    3575             :       DCHECK(!assigned.IsUnallocated());
    3576    60575197 :       range->ConvertUsesToOperand(assigned, spill_operand);
    3577             :     }
    3578             : 
    3579    25407482 :     if (!spill_operand.IsInvalid()) {
    3580             :       // If this top level range has a child spilled in a deferred block, we use
    3581             :       // the range and control flow connection mechanism instead of spilling at
    3582             :       // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
    3583             :       // phases. Normally, when we spill at definition, we do not insert a
    3584             :       // connecting move when a successor child range is spilled - because the
    3585             :       // spilled range picks up its value from the slot which was assigned at
    3586             :       // definition. For ranges that are determined to spill only in deferred
    3587             :       // blocks, we let ConnectLiveRanges and ResolveControlFlow find the blocks
    3588             :       // where a spill operand is expected, and then finalize by inserting the
    3589             :       // spills in the deferred blocks dominators.
    3590    13749432 :       if (!top_range->IsSpilledOnlyInDeferredBlocks()) {
    3591             :         // Spill at definition if the range isn't spilled only in deferred
    3592             :         // blocks.
    3593             :         top_range->CommitSpillMoves(
    3594             :             data()->code(), spill_operand,
    3595    36980166 :             top_range->has_slot_use() || top_range->spilled());
    3596             :       }
    3597             :     }
    3598             :   }
    3599     1301587 : }
    3600             : 
    3601             : 
    3602     1301584 : ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
    3603     1301584 :     : data_(data) {}
    3604             : 
    3605             : 
    3606           0 : bool ReferenceMapPopulator::SafePointsAreInOrder() const {
    3607             :   int safe_point = 0;
    3608           0 :   for (ReferenceMap* map : *data()->code()->reference_maps()) {
    3609           0 :     if (safe_point > map->instruction_position()) return false;
    3610             :     safe_point = map->instruction_position();
    3611             :   }
    3612           0 :   return true;
    3613             : }
    3614             : 
    3615             : 
    3616     1301518 : void ReferenceMapPopulator::PopulateReferenceMaps() {
    3617             :   DCHECK(SafePointsAreInOrder());
    3618             :   // Map all delayed references.
    3619     2603036 :   for (RegisterAllocationData::DelayedReference& delayed_reference :
    3620             :        data()->delayed_references()) {
    3621             :     delayed_reference.map->RecordReference(
    3622           0 :         AllocatedOperand::cast(*delayed_reference.operand));
    3623             :   }
    3624             :   // Iterate over all safe point positions and record a pointer
    3625             :   // for all spilled live ranges at this point.
    3626             :   int last_range_start = 0;
    3627     1301518 :   const ReferenceMapDeque* reference_maps = data()->code()->reference_maps();
    3628             :   ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
    3629   134208761 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    3630    54921570 :     if (range == nullptr) continue;
    3631             :     // Skip non-reference values.
    3632    27039885 :     if (!data()->IsReference(range)) continue;
    3633             :     // Skip empty live ranges.
    3634    12352033 :     if (range->IsEmpty()) continue;
    3635    10725887 :     if (range->has_preassigned_slot()) continue;
    3636             : 
    3637             :     // Find the extent of the range and its children.
    3638             :     int start = range->Start().ToInstructionIndex();
    3639             :     int end = 0;
    3640    38410294 :     for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) {
    3641             :       LifetimePosition this_end = cur->End();
    3642    28178366 :       if (this_end.ToInstructionIndex() > end)
    3643             :         end = this_end.ToInstructionIndex();
    3644             :       DCHECK(cur->Start().ToInstructionIndex() >= start);
    3645             :     }
    3646             : 
    3647             :     // Most of the ranges are in order, but not all.  Keep an eye on when they
    3648             :     // step backwards and reset the first_it so we don't miss any safe points.
    3649    10231928 :     if (start < last_range_start) first_it = reference_maps->begin();
    3650             :     last_range_start = start;
    3651             : 
    3652             :     // Step across all the safe points that are before the start of this range,
    3653             :     // recording how far we step in order to save doing this for the next range.
    3654   521947671 :     for (; first_it != reference_maps->end(); ++first_it) {
    3655   510775291 :       ReferenceMap* map = *first_it;
    3656   510775291 :       if (map->instruction_position() >= start) break;
    3657             :     }
    3658             : 
    3659             :     InstructionOperand spill_operand;
    3660    14598420 :     if (((range->HasSpillOperand() &&
    3661    19437603 :           !range->GetSpillOperand()->IsConstant()) ||
    3662             :          range->HasSpillRange())) {
    3663     2131722 :       if (range->HasSpillOperand()) {
    3664     1026274 :         spill_operand = *range->GetSpillOperand();
    3665             :       } else {
    3666             :         spill_operand = range->GetSpillRangeOperand();
    3667             :       }
    3668             :       DCHECK(spill_operand.IsStackSlot());
    3669             :       DCHECK(CanBeTaggedPointer(
    3670             :           AllocatedOperand::cast(spill_operand).representation()));
    3671             :     }
    3672             : 
    3673    51653785 :     LiveRange* cur = range;
    3674             :     // Step through the safe points to see whether they are in the range.
    3675    56648533 :     for (auto it = first_it; it != reference_maps->end(); ++it) {
    3676    43789954 :       ReferenceMap* map = *it;
    3677             :       int safe_point = map->instruction_position();
    3678             : 
    3679             :       // The safe points are sorted so we can stop searching here.
    3680    43789954 :       if (safe_point - 1 > end) break;
    3681             : 
    3682             :       // Advance to the next active range that covers the current
    3683             :       // safe point position.
    3684             :       LifetimePosition safe_point_pos =
    3685             :           LifetimePosition::InstructionFromInstructionIndex(safe_point);
    3686             : 
    3687             :       // Search for the child range (cur) that covers safe_point_pos. If we
    3688             :       // don't find it before the children pass safe_point_pos, keep cur at
    3689             :       // the last child, because the next safe_point_pos may be covered by cur.
    3690             :       // This may happen if cur has more than one interval, and the current
    3691             :       // safe_point_pos is in between intervals.
    3692             :       // For that reason, cur may be at most the last child.
    3693             :       DCHECK_NOT_NULL(cur);
    3694             :       DCHECK(safe_point_pos >= cur->Start() || range == cur);
    3695             :       bool found = false;
    3696   118230809 :       while (!found) {
    3697    51653767 :         if (cur->Covers(safe_point_pos)) {
    3698             :           found = true;
    3699             :         } else {
    3700             :           LiveRange* next = cur->next();
    3701    37745431 :           if (next == nullptr || next->Start() > safe_point_pos) {
    3702             :             break;
    3703             :           }
    3704             :           cur = next;
    3705             :         }
    3706             :       }
    3707             : 
    3708    36184681 :       if (!found) {
    3709             :         continue;
    3710             :       }
    3711             : 
    3712             :       // Check if the live range is spilled and the safe point is after
    3713             :       // the spill position.
    3714             :       int spill_index = range->IsSpilledOnlyInDeferredBlocks()
    3715             :                             ? cur->Start().ToInstructionIndex()
    3716    30392383 :                             : range->spill_start_index();
    3717             : 
    3718    30392383 :       if (!spill_operand.IsInvalid() && safe_point >= spill_index) {
    3719    15285856 :         TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
    3720             :               range->vreg(), spill_index, safe_point);
    3721    15285856 :         map->RecordReference(AllocatedOperand::cast(spill_operand));
    3722             :       }
    3723             : 
    3724    30392379 :       if (!cur->spilled()) {
    3725           0 :         TRACE(
    3726             :             "Pointer in register for range %d:%d (start at %d) "
    3727             :             "at safe point %d\n",
    3728             :             range->vreg(), cur->relative_id(), cur->Start().value(),
    3729             :             safe_point);
    3730           0 :         InstructionOperand operand = cur->GetAssignedOperand();
    3731             :         DCHECK(!operand.IsStackSlot());
    3732             :         DCHECK(CanBeTaggedPointer(
    3733             :             AllocatedOperand::cast(operand).representation()));
    3734           0 :         map->RecordReference(AllocatedOperand::cast(operand));
    3735             :       }
    3736             :     }
    3737             :   }
    3738     1301516 : }
    3739             : 
    3740             : 
    3741     2603134 : LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
    3742     2603134 :     : data_(data) {}
    3743             : 
    3744             : 
    3745           0 : bool LiveRangeConnector::CanEagerlyResolveControlFlow(
    3746             :     const InstructionBlock* block) const {
    3747    13807531 :   if (block->PredecessorCount() != 1) return false;
    3748           0 :   return block->predecessors()[0].IsNext(block->rpo_number());
    3749             : }
    3750             : 
    3751             : 
    3752     1301471 : void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
    3753             :   // Lazily linearize live ranges in memory for fast lookup.
    3754     1301471 :   LiveRangeFinder finder(data(), local_zone);
    3755             :   ZoneVector<BitVector*>& live_in_sets = data()->live_in_sets();
    3756    18819397 :   for (const InstructionBlock* block : code()->instruction_blocks()) {
    3757    17931366 :     if (CanEagerlyResolveControlFlow(block)) continue;
    3758    16316248 :     BitVector* live = live_in_sets[block->rpo_number().ToInt()];
    3759     8158124 :     BitVector::Iterator iterator(live);
    3760   126964133 :     while (!iterator.Done()) {
    3761    51244876 :       int vreg = iterator.Current();
    3762    51244876 :       LiveRangeBoundArray* array = finder.ArrayFor(vreg);
    3763   180497925 :       for (const RpoNumber& pred : block->predecessors()) {
    3764             :         FindResult result;
    3765    78185589 :         const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
    3766    78010615 :         if (!array->FindConnectableSubranges(block, pred_block, &result)) {
    3767    76383134 :           continue;
    3768             :         }
    3769     4905471 :         InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
    3770     4905473 :         InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
    3771     4905471 :         if (pred_op.Equals(cur_op)) continue;
    3772     3292391 :         if (!pred_op.IsAnyRegister() && cur_op.IsAnyRegister()) {
    3773             :           // We're doing a reload.
    3774             :           // We don't need to, if:
    3775             :           // 1) there's no register use in this block, and
    3776             :           // 2) the range ends before the block does, and
    3777             :           // 3) we don't have a successor, or the successor is spilled.
    3778             :           LifetimePosition block_start =
    3779     1585696 :               LifetimePosition::GapFromInstructionIndex(block->code_start());
    3780             :           LifetimePosition block_end =
    3781             :               LifetimePosition::GapFromInstructionIndex(block->code_end());
    3782     3093504 :           const LiveRange* current = result.cur_cover_;
    3783      152428 :           const LiveRange* successor = current->next();
    3784     3171392 :           if (current->End() < block_end &&
    3785      152428 :               (successor == nullptr || successor->spilled())) {
    3786             :             // verify point 1: no register use. We can go to the end of the
    3787             :             // range, since it's all within the block.
    3788             : 
    3789             :             bool uses_reg = false;
    3790      560168 :             for (const UsePosition* use = current->NextUsePosition(block_start);
    3791             :                  use != nullptr; use = use->next()) {
    3792      482282 :               if (use->operand()->IsAnyRegister()) {
    3793             :                 uses_reg = true;
    3794             :                 break;
    3795             :               }
    3796             :             }
    3797      637973 :             if (!uses_reg) continue;
    3798             :           }
    3799     1683119 :           if (current->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
    3800             :               pred_block->IsDeferred()) {
    3801             :             // The spill location should be defined in pred_block, so add
    3802             :             // pred_block to the list of blocks requiring a spill operand.
    3803             :             current->TopLevel()->GetListOfBlocksRequiringSpillOperands()->Add(
    3804      175311 :                 pred_block->rpo_number().ToInt());
    3805             :           }
    3806             :         }
    3807     1628811 :         int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op);
    3808             :         USE(move_loc);
    3809             :         DCHECK_IMPLIES(
    3810             :             result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
    3811             :                 !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()),
    3812             :             code()->GetInstructionBlock(move_loc)->IsDeferred());
    3813             :       }
    3814    51244649 :       iterator.Advance();
    3815             :     }
    3816             :   }
    3817             : 
    3818             :   // At this stage, we collected blocks needing a spill operand from
    3819             :   // ConnectRanges and from ResolveControlFlow. Time to commit the spills for
    3820             :   // deferred blocks.
    3821    83757571 :   for (TopLevelLiveRange* top : data()->live_ranges()) {
    3822   107370097 :     if (top == nullptr || top->IsEmpty() ||
    3823             :         !top->IsSpilledOnlyInDeferredBlocks())
    3824             :       continue;
    3825      824411 :     CommitSpillsInDeferredBlocks(top, finder.ArrayFor(top->vreg()), local_zone);
    3826             :   }
    3827     1301597 : }
    3828             : 
    3829             : 
    3830     2344043 : int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
    3831             :                                            const InstructionOperand& cur_op,
    3832      913583 :                                            const InstructionBlock* pred,
    3833             :                                            const InstructionOperand& pred_op) {
    3834             :   DCHECK(!pred_op.Equals(cur_op));
    3835             :   int gap_index;
    3836             :   Instruction::GapPosition position;
    3837     1628813 :   if (block->PredecessorCount() == 1) {
    3838             :     gap_index = block->first_instruction_index();
    3839             :     position = Instruction::START;
    3840             :   } else {
    3841             :     DCHECK_EQ(1, pred->SuccessorCount());
    3842             :     DCHECK(!code()
    3843             :                 ->InstructionAt(pred->last_instruction_index())
    3844             :                 ->HasReferenceMap());
    3845             :     gap_index = pred->last_instruction_index();
    3846             :     position = Instruction::END;
    3847             :   }
    3848     1628813 :   data()->AddGapMove(gap_index, position, pred_op, cur_op);
    3849     1628810 :   return gap_index;
    3850             : }
    3851             : 
    3852     1301574 : void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
    3853             :   DelayedInsertionMap delayed_insertion_map(local_zone);
    3854    85501703 :   for (TopLevelLiveRange* top_range : data()->live_ranges()) {
    3855    54921830 :     if (top_range == nullptr) continue;
    3856             :     bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
    3857    41460941 :     LiveRange* first_range = top_range;
    3858    97376385 :     for (LiveRange *second_range = first_range->next(); second_range != nullptr;
    3859             :          first_range = second_range, second_range = second_range->next()) {
    3860             :       LifetimePosition pos = second_range->Start();
    3861             :       // Add gap move if the two live ranges touch and there is no block
    3862             :       // boundary.
    3863    57075702 :       if (second_range->spilled()) continue;
    3864    14421108 :       if (first_range->End() != pos) continue;
    3865    14548863 :       if (data()->IsBlockBoundary(pos) &&
    3866             :           !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
    3867             :         continue;
    3868             :       }
    3869    13409732 :       InstructionOperand prev_operand = first_range->GetAssignedOperand();
    3870    13409716 :       InstructionOperand cur_operand = second_range->GetAssignedOperand();
    3871    13409729 :       if (prev_operand.Equals(cur_operand)) continue;
    3872             :       bool delay_insertion = false;
    3873             :       Instruction::GapPosition gap_pos;
    3874             :       int gap_index = pos.ToInstructionIndex();
    3875    15137287 :       if (connect_spilled && !prev_operand.IsAnyRegister() &&
    3876             :           cur_operand.IsAnyRegister()) {
    3877      936929 :         const InstructionBlock* block = code()->GetInstructionBlock(gap_index);
    3878             :         DCHECK(block->IsDeferred());
    3879             :         // Performing a reload in this block, meaning the spill operand must
    3880             :         // be defined here.
    3881             :         top_range->GetListOfBlocksRequiringSpillOperands()->Add(
    3882             :             block->rpo_number().ToInt());
    3883             :       }
    3884             : 
    3885    13260907 :       if (pos.IsGapPosition()) {
    3886    13260774 :         gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
    3887             :       } else {
    3888         133 :         if (pos.IsStart()) {
    3889             :           delay_insertion = true;
    3890             :         } else {
    3891           0 :           gap_index++;
    3892             :         }
    3893         133 :         gap_pos = delay_insertion ? Instruction::END : Instruction::START;
    3894             :       }
    3895             :       // Reloads or spills for spilled in deferred blocks ranges must happen
    3896             :       // only in deferred blocks.
    3897             :       DCHECK_IMPLIES(
    3898             :           connect_spilled &&
    3899             :               !(prev_operand.IsAnyRegister() && cur_operand.IsAnyRegister()),
    3900             :           code()->GetInstructionBlock(gap_index)->IsDeferred());
    3901             : 
    3902             :       ParallelMove* move =
    3903             :           code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
    3904    13260933 :               gap_pos, code_zone());
    3905    13260954 :       if (!delay_insertion) {
    3906             :         move->AddMove(prev_operand, cur_operand);
    3907             :       } else {
    3908             :         delayed_insertion_map.insert(
    3909         133 :             std::make_pair(std::make_pair(move, prev_operand), cur_operand));
    3910             :       }
    3911             :     }
    3912             :   }
    3913     2603020 :   if (delayed_insertion_map.empty()) return;
    3914             :   // Insert all the moves which should occur after the stored move.
    3915             :   ZoneVector<MoveOperands*> to_insert(local_zone);
    3916             :   ZoneVector<MoveOperands*> to_eliminate(local_zone);
    3917          67 :   to_insert.reserve(4);
    3918          67 :   to_eliminate.reserve(4);
    3919          67 :   ParallelMove* moves = delayed_insertion_map.begin()->first.first;
    3920             :   for (auto it = delayed_insertion_map.begin();; ++it) {
    3921             :     bool done = it == delayed_insertion_map.end();
    3922         200 :     if (done || it->first.first != moves) {
    3923             :       // Commit the MoveOperands for current ParallelMove.
    3924         266 :       for (MoveOperands* move : to_eliminate) {
    3925             :         move->Eliminate();
    3926             :       }
    3927         399 :       for (MoveOperands* move : to_insert) {
    3928         133 :         moves->push_back(move);
    3929             :       }
    3930         133 :       if (done) break;
    3931             :       // Reset state.
    3932             :       to_eliminate.clear();
    3933             :       to_insert.clear();
    3934          66 :       moves = it->first.first;
    3935             :     }
    3936             :     // Gather all MoveOperands for a single ParallelMove.
    3937             :     MoveOperands* move =
    3938         133 :         new (code_zone()) MoveOperands(it->first.second, it->second);
    3939         133 :     moves->PrepareInsertAfter(move, &to_eliminate);
    3940         133 :     to_insert.push_back(move);
    3941             :   }
    3942             : }
    3943             : 
    3944             : 
    3945      824409 : void LiveRangeConnector::CommitSpillsInDeferredBlocks(
    3946     3864014 :     TopLevelLiveRange* range, LiveRangeBoundArray* array, Zone* temp_zone) {
    3947             :   DCHECK(range->IsSpilledOnlyInDeferredBlocks());
    3948             :   DCHECK(!range->spilled());
    3949             : 
    3950     8144071 :   InstructionSequence* code = data()->code();
    3951      824409 :   InstructionOperand spill_operand = range->GetSpillRangeOperand();
    3952             : 
    3953      824409 :   TRACE("Live Range %d will be spilled only in deferred blocks.\n",
    3954             :         range->vreg());
    3955             :   // If we have ranges that aren't spilled but require the operand on the stack,
    3956             :   // make sure we insert the spill.
    3957     6999792 :   for (const LiveRange* child = range; child != nullptr;
    3958             :        child = child->next()) {
    3959     6422560 :     for (const UsePosition* pos = child->first_pos(); pos != nullptr;
    3960             :          pos = pos->next()) {
    3961     6574485 :       if (pos->type() != UsePositionType::kRequiresSlot && !child->spilled())
    3962             :         continue;
    3963             :       range->AddBlockRequiringSpillOperand(
    3964             :           code->GetInstructionBlock(pos->pos().ToInstructionIndex())
    3965      510416 :               ->rpo_number());
    3966             :     }
    3967             :   }
    3968             : 
    3969      824410 :   ZoneQueue<int> worklist(temp_zone);
    3970             : 
    3971     2897474 :   for (BitVector::Iterator iterator(
    3972      824384 :            range->GetListOfBlocksRequiringSpillOperands());
    3973     1248682 :        !iterator.Done(); iterator.Advance()) {
    3974     2497369 :     worklist.push(iterator.Current());
    3975             :   }
    3976             : 
    3977             :   ZoneSet<std::pair<RpoNumber, int>> done_moves(temp_zone);
    3978             :   // Seek the deferred blocks that dominate locations requiring spill operands,
    3979             :   // and spill there. We only need to spill at the start of such blocks.
    3980             :   BitVector done_blocks(
    3981      824399 :       range->GetListOfBlocksRequiringSpillOperands()->length(), temp_zone);
    3982     5844965 :   while (!worklist.empty()) {
    3983     4196162 :     int block_id = worklist.front();
    3984             :     worklist.pop();
    3985     4196160 :     if (done_blocks.Contains(block_id)) continue;
    3986             :     done_blocks.Add(block_id);
    3987     1107604 :     InstructionBlock* spill_block =
    3988     3264573 :         code->InstructionBlockAt(RpoNumber::FromInt(block_id));
    3989             : 
    3990    10584260 :     for (const RpoNumber& pred : spill_block->predecessors()) {
    3991     5162707 :       const InstructionBlock* pred_block = code->InstructionBlockAt(pred);
    3992             : 
    3993     4055087 :       if (pred_block->IsDeferred()) {
    3994     5894940 :         worklist.push(pred_block->rpo_number().ToInt());
    3995             :       } else {
    3996             :         LifetimePosition pred_end =
    3997             :             LifetimePosition::InstructionFromInstructionIndex(
    3998             :                 pred_block->last_instruction_index());
    3999             : 
    4000             :         LiveRangeBound* bound = array->Find(pred_end);
    4001             : 
    4002     1107618 :         InstructionOperand pred_op = bound->range_->GetAssignedOperand();
    4003             : 
    4004             :         RpoNumber spill_block_number = spill_block->rpo_number();
    4005     1107654 :         if (done_moves.find(std::make_pair(
    4006     2215273 :                 spill_block_number, range->vreg())) == done_moves.end()) {
    4007             :           data()->AddGapMove(spill_block->first_instruction_index(),
    4008             :                              Instruction::GapPosition::START, pred_op,
    4009     1107604 :                              spill_operand);
    4010     2215177 :           done_moves.insert(std::make_pair(spill_block_number, range->vreg()));
    4011             :           spill_block->mark_needs_frame();
    4012             :         }
    4013             :       }
    4014             :     }
    4015             :   }
    4016      824411 : }
    4017             : 
    4018             : #undef TRACE
    4019             : 
    4020             : }  // namespace compiler
    4021             : }  // namespace internal
    4022             : }  // namespace v8

Generated by: LCOV version 1.10