LCOV - code coverage report
Current view: top level - src/compiler/backend - register-allocator.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 1220 1739 70.2 %
Date: 2019-04-17 Functions: 113 194 58.2 %

          Line data    Source code
       1             : // Copyright 2014 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/compiler/backend/register-allocator.h"
       6             : 
       7             : #include <iomanip>
       8             : 
       9             : #include "src/assembler-inl.h"
      10             : #include "src/base/adapters.h"
      11             : #include "src/base/small-vector.h"
      12             : #include "src/compiler/linkage.h"
      13             : #include "src/string-stream.h"
      14             : #include "src/vector.h"
      15             : 
      16             : namespace v8 {
      17             : namespace internal {
      18             : namespace compiler {
      19             : 
      20             : #define TRACE(...)                             \
      21             :   do {                                         \
      22             :     if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
      23             :   } while (false)
      24             : 
      25             : namespace {
      26             : 
      27             : static constexpr int kFloat32Bit =
      28             :     RepresentationBit(MachineRepresentation::kFloat32);
      29             : static constexpr int kSimd128Bit =
      30             :     RepresentationBit(MachineRepresentation::kSimd128);
      31             : 
      32             : int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) {
      33             :   return kind == FP_REGISTERS ? cfg->num_double_registers()
      34     2936466 :                               : cfg->num_general_registers();
      35             : }
      36             : 
      37             : int GetAllocatableRegisterCount(const RegisterConfiguration* cfg,
      38             :                                 RegisterKind kind) {
      39             :   return kind == FP_REGISTERS ? cfg->num_allocatable_double_registers()
      40     2936466 :                               : cfg->num_allocatable_general_registers();
      41             : }
      42             : 
      43             : const int* GetAllocatableRegisterCodes(const RegisterConfiguration* cfg,
      44             :                                        RegisterKind kind) {
      45             :   return kind == FP_REGISTERS ? cfg->allocatable_double_codes()
      46     2936466 :                               : cfg->allocatable_general_codes();
      47             : }
      48             : 
      49             : const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence,
      50             :                                           const InstructionBlock* block) {
      51             :   RpoNumber index = block->loop_header();
      52     4178992 :   if (!index.IsValid()) return nullptr;
      53      402305 :   return sequence->InstructionBlockAt(index);
      54             : }
      55             : 
      56             : const InstructionBlock* GetInstructionBlock(const InstructionSequence* code,
      57             :                                             LifetimePosition pos) {
      58    13634853 :   return code->GetInstructionBlock(pos.ToInstructionIndex());
      59             : }
      60             : 
      61             : Instruction* GetLastInstruction(InstructionSequence* code,
      62             :                                 const InstructionBlock* block) {
      63             :   return code->InstructionAt(block->last_instruction_index());
      64             : }
      65             : 
      66             : // TODO(dcarney): fix frame to allow frame accesses to half size location.
      67             : int GetByteWidth(MachineRepresentation rep) {
      68     5943344 :   switch (rep) {
      69             :     case MachineRepresentation::kBit:
      70             :     case MachineRepresentation::kWord8:
      71             :     case MachineRepresentation::kWord16:
      72             :     case MachineRepresentation::kWord32:
      73             :     case MachineRepresentation::kFloat32:
      74             :       return kSystemPointerSize;
      75             :     case MachineRepresentation::kTaggedSigned:
      76             :     case MachineRepresentation::kTaggedPointer:
      77             :     case MachineRepresentation::kTagged:
      78             :     case MachineRepresentation::kCompressedSigned:
      79             :     case MachineRepresentation::kCompressedPointer:
      80             :     case MachineRepresentation::kCompressed:
      81             :       // TODO(ishell): kTaggedSize once half size locations are supported.
      82             :       return kSystemPointerSize;
      83             :     case MachineRepresentation::kWord64:
      84             :     case MachineRepresentation::kFloat64:
      85             :       return kDoubleSize;
      86             :     case MachineRepresentation::kSimd128:
      87             :       return kSimd128Size;
      88             :     case MachineRepresentation::kNone:
      89             :       break;
      90             :   }
      91           0 :   UNREACHABLE();
      92             : }
      93             : 
      94             : }  // namespace
      95             : 
      96             : class LiveRangeBound {
      97             :  public:
      98             :   explicit LiveRangeBound(LiveRange* range, bool skip)
      99    41094845 :       : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
     100             :     DCHECK(!range->IsEmpty());
     101             :   }
     102             : 
     103             :   bool CanCover(LifetimePosition position) {
     104   246481557 :     return start_ <= position && position < end_;
     105             :   }
     106             : 
     107             :   LiveRange* const range_;
     108             :   const LifetimePosition start_;
     109             :   const LifetimePosition end_;
     110             :   const bool skip_;
     111             : 
     112             :  private:
     113             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
     114             : };
     115             : 
     116             : struct FindResult {
     117             :   LiveRange* cur_cover_;
     118             :   LiveRange* pred_cover_;
     119             : };
     120             : 
     121             : class LiveRangeBoundArray {
     122             :  public:
     123    88878484 :   LiveRangeBoundArray() : length_(0), start_(nullptr) {}
     124             : 
     125             :   bool ShouldInitialize() { return start_ == nullptr; }
     126             : 
     127     6917486 :   void Initialize(Zone* zone, TopLevelLiveRange* range) {
     128     6917486 :     size_t max_child_count = range->GetMaxChildCount();
     129             : 
     130     6917530 :     start_ = zone->NewArray<LiveRangeBound>(max_child_count);
     131     6917530 :     length_ = 0;
     132             :     LiveRangeBound* curr = start_;
     133             :     // Normally, spilled ranges do not need connecting moves, because the spill
     134             :     // location has been assigned at definition. For ranges spilled in deferred
     135             :     // blocks, that is not the case, so we need to connect the spilled children.
     136    89107152 :     for (LiveRange *i = range; i != nullptr; i = i->next(), ++curr, ++length_) {
     137    41094811 :       new (curr) LiveRangeBound(i, i->spilled());
     138             :     }
     139     6917530 :   }
     140             : 
     141             :   LiveRangeBound* Find(const LifetimePosition position) const {
     142             :     size_t left_index = 0;
     143   168215963 :     size_t right_index = length_;
     144             :     while (true) {
     145   619915392 :       size_t current_index = left_index + (right_index - left_index) / 2;
     146             :       DCHECK(right_index > current_index);
     147   619915392 :       LiveRangeBound* bound = &start_[current_index];
     148   619915392 :       if (bound->start_ <= position) {
     149   417312571 :         if (position < bound->end_) return bound;
     150             :         DCHECK(left_index < current_index);
     151             :         left_index = current_index;
     152             :       } else {
     153             :         right_index = current_index;
     154             :       }
     155             :     }
     156             :   }
     157             : 
     158             :   LiveRangeBound* FindPred(const InstructionBlock* pred) {
     159             :     LifetimePosition pred_end =
     160             :         LifetimePosition::InstructionFromInstructionIndex(
     161             :             pred->last_instruction_index());
     162             :     return Find(pred_end);
     163             :   }
     164             : 
     165             :   LiveRangeBound* FindSucc(const InstructionBlock* succ) {
     166             :     LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
     167             :         succ->first_instruction_index());
     168             :     return Find(succ_start);
     169             :   }
     170             : 
     171   123726986 :   bool FindConnectableSubranges(const InstructionBlock* block,
     172             :                                 const InstructionBlock* pred,
     173             :                                 FindResult* result) const {
     174             :     LifetimePosition pred_end =
     175             :         LifetimePosition::InstructionFromInstructionIndex(
     176             :             pred->last_instruction_index());
     177             :     LiveRangeBound* bound = Find(pred_end);
     178   123726986 :     result->pred_cover_ = bound->range_;
     179             :     LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
     180             :         block->first_instruction_index());
     181             : 
     182   123726986 :     if (bound->CanCover(cur_start)) {
     183             :       // Both blocks are covered by the same range, so there is nothing to
     184             :       // connect.
     185             :       return false;
     186             :     }
     187             :     bound = Find(cur_start);
     188    42797369 :     if (bound->skip_) {
     189             :       return false;
     190             :     }
     191     8203051 :     result->cur_cover_ = bound->range_;
     192             :     DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
     193     8203051 :     return (result->cur_cover_ != result->pred_cover_);
     194             :   }
     195             : 
     196             :  private:
     197             :   size_t length_;
     198             :   LiveRangeBound* start_;
     199             : 
     200             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
     201             : };
     202             : 
     203             : class LiveRangeFinder {
     204             :  public:
     205     2640558 :   explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
     206             :       : data_(data),
     207             :         bounds_length_(static_cast<int>(data_->live_ranges().size())),
     208     2640558 :         bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
     209     7923971 :         zone_(zone) {
     210   180399179 :     for (int i = 0; i < bounds_length_; ++i) {
     211    88878162 :       new (&bounds_[i]) LiveRangeBoundArray();
     212             :     }
     213     2642855 :   }
     214             : 
     215    81893912 :   LiveRangeBoundArray* ArrayFor(int operand_index) {
     216             :     DCHECK(operand_index < bounds_length_);
     217   163787824 :     TopLevelLiveRange* range = data_->live_ranges()[operand_index];
     218             :     DCHECK(range != nullptr && !range->IsEmpty());
     219    81893912 :     LiveRangeBoundArray* array = &bounds_[operand_index];
     220    81893912 :     if (array->ShouldInitialize()) {
     221     6917611 :       array->Initialize(zone_, range);
     222             :     }
     223    81893831 :     return array;
     224             :   }
     225             : 
     226             :  private:
     227             :   const RegisterAllocationData* const data_;
     228             :   const int bounds_length_;
     229             :   LiveRangeBoundArray* const bounds_;
     230             :   Zone* const zone_;
     231             : 
     232             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
     233             : };
     234             : 
     235             : using DelayedInsertionMapKey = std::pair<ParallelMove*, InstructionOperand>;
     236             : 
     237             : struct DelayedInsertionMapCompare {
     238             :   bool operator()(const DelayedInsertionMapKey& a,
     239             :                   const DelayedInsertionMapKey& b) const {
     240         464 :     if (a.first == b.first) {
     241             :       return a.second.Compare(b.second);
     242             :     }
     243         464 :     return a.first < b.first;
     244             :   }
     245             : };
     246             : 
     247             : using DelayedInsertionMap = ZoneMap<DelayedInsertionMapKey, InstructionOperand,
     248             :                                     DelayedInsertionMapCompare>;
     249             : 
     250   111094692 : UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
     251             :                          void* hint, UsePositionHintType hint_type)
     252   111094692 :     : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) {
     253             :   DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone);
     254             :   bool register_beneficial = true;
     255             :   UsePositionType type = UsePositionType::kRegisterOrSlot;
     256   219476653 :   if (operand_ != nullptr && operand_->IsUnallocated()) {
     257             :     const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
     258   108382555 :     if (unalloc->HasRegisterPolicy()) {
     259             :       type = UsePositionType::kRequiresRegister;
     260    65223562 :     } else if (unalloc->HasSlotPolicy()) {
     261             :       type = UsePositionType::kRequiresSlot;
     262             :       register_beneficial = false;
     263    51385159 :     } else if (unalloc->HasRegisterOrSlotOrConstantPolicy()) {
     264             :       type = UsePositionType::kRegisterOrSlotOrConstant;
     265             :       register_beneficial = false;
     266             :     } else {
     267    51385589 :       register_beneficial = !unalloc->HasRegisterOrSlotPolicy();
     268             :     }
     269             :   }
     270   222189384 :   flags_ = TypeField::encode(type) | HintTypeField::encode(hint_type) |
     271   111094692 :            RegisterBeneficialField::encode(register_beneficial) |
     272   111094692 :            AssignedRegisterField::encode(kUnassignedRegister);
     273             :   DCHECK(pos_.IsValid());
     274   111094692 : }
     275             : 
     276           0 : bool UsePosition::HasHint() const {
     277             :   int hint_register;
     278   111233513 :   return HintRegister(&hint_register);
     279             : }
     280             : 
     281   356254529 : bool UsePosition::HintRegister(int* register_code) const {
     282   356254529 :   if (hint_ == nullptr) return false;
     283   172030756 :   switch (HintTypeField::decode(flags_)) {
     284             :     case UsePositionHintType::kNone:
     285             :     case UsePositionHintType::kUnresolved:
     286             :       return false;
     287             :     case UsePositionHintType::kUsePos: {
     288             :       UsePosition* use_pos = reinterpret_cast<UsePosition*>(hint_);
     289    24311594 :       int assigned_register = AssignedRegisterField::decode(use_pos->flags_);
     290    24311594 :       if (assigned_register == kUnassignedRegister) return false;
     291    11316263 :       *register_code = assigned_register;
     292    11316263 :       return true;
     293             :     }
     294             :     case UsePositionHintType::kOperand: {
     295             :       InstructionOperand* operand =
     296             :           reinterpret_cast<InstructionOperand*>(hint_);
     297    51319613 :       *register_code = LocationOperand::cast(operand)->register_code();
     298    51319613 :       return true;
     299             :     }
     300             :     case UsePositionHintType::kPhi: {
     301             :       RegisterAllocationData::PhiMapValue* phi =
     302             :           reinterpret_cast<RegisterAllocationData::PhiMapValue*>(hint_);
     303             :       int assigned_register = phi->assigned_register();
     304     1429358 :       if (assigned_register == kUnassignedRegister) return false;
     305      230064 :       *register_code = assigned_register;
     306      230064 :       return true;
     307             :     }
     308             :   }
     309           0 :   UNREACHABLE();
     310             : }
     311             : 
     312    50434353 : UsePositionHintType UsePosition::HintTypeForOperand(
     313             :     const InstructionOperand& op) {
     314    50434353 :   switch (op.kind()) {
     315             :     case InstructionOperand::CONSTANT:
     316             :     case InstructionOperand::IMMEDIATE:
     317             :     case InstructionOperand::EXPLICIT:
     318             :       return UsePositionHintType::kNone;
     319             :     case InstructionOperand::UNALLOCATED:
     320    22608940 :       return UsePositionHintType::kUnresolved;
     321             :     case InstructionOperand::ALLOCATED:
     322    29477509 :       if (op.IsRegister() || op.IsFPRegister()) {
     323             :         return UsePositionHintType::kOperand;
     324             :       } else {
     325             :         DCHECK(op.IsStackSlot() || op.IsFPStackSlot());
     326     1246183 :         return UsePositionHintType::kNone;
     327             :       }
     328             :     case InstructionOperand::INVALID:
     329             :       break;
     330             :   }
     331           0 :   UNREACHABLE();
     332             : }
     333             : 
     334           0 : void UsePosition::SetHint(UsePosition* use_pos) {
     335             :   DCHECK_NOT_NULL(use_pos);
     336    13974678 :   hint_ = use_pos;
     337    13974678 :   flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
     338           0 : }
     339             : 
     340           0 : void UsePosition::ResolveHint(UsePosition* use_pos) {
     341             :   DCHECK_NOT_NULL(use_pos);
     342     7539974 :   if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return;
     343     7539978 :   hint_ = use_pos;
     344     7539978 :   flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
     345             : }
     346             : 
     347           0 : void UsePosition::set_type(UsePositionType type, bool register_beneficial) {
     348             :   DCHECK_IMPLIES(type == UsePositionType::kRequiresSlot, !register_beneficial);
     349             :   DCHECK_EQ(kUnassignedRegister, AssignedRegisterField::decode(flags_));
     350    19980511 :   flags_ = TypeField::encode(type) |
     351    19980511 :            RegisterBeneficialField::encode(register_beneficial) |
     352    19980511 :            HintTypeField::encode(HintTypeField::decode(flags_)) |
     353    19980511 :            AssignedRegisterField::encode(kUnassignedRegister);
     354           0 : }
     355             : 
     356           0 : UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
     357             :   DCHECK(Contains(pos) && pos != start());
     358    49461299 :   UseInterval* after = new (zone) UseInterval(pos, end_);
     359    49461527 :   after->next_ = next_;
     360    49461527 :   next_ = nullptr;
     361    49461527 :   end_ = pos;
     362           0 :   return after;
     363             : }
     364             : 
     365           0 : void LifetimePosition::Print() const { StdoutStream{} << *this << std::endl; }
     366             : 
     367           0 : std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) {
     368           0 :   os << '@' << pos.ToInstructionIndex();
     369           0 :   if (pos.IsGapPosition()) {
     370             :     os << 'g';
     371             :   } else {
     372             :     os << 'i';
     373             :   }
     374           0 :   if (pos.IsStart()) {
     375             :     os << 's';
     376             :   } else {
     377             :     os << 'e';
     378             :   }
     379           0 :   return os;
     380             : }
     381             : 
     382           0 : LiveRange::LiveRange(int relative_id, MachineRepresentation rep,
     383             :                      TopLevelLiveRange* top_level)
     384             :     : relative_id_(relative_id),
     385             :       bits_(0),
     386             :       last_interval_(nullptr),
     387             :       first_interval_(nullptr),
     388             :       first_pos_(nullptr),
     389             :       top_level_(top_level),
     390             :       next_(nullptr),
     391             :       current_interval_(nullptr),
     392             :       last_processed_use_(nullptr),
     393             :       current_hint_position_(nullptr),
     394   176873525 :       splitting_pointer_(nullptr) {
     395             :   DCHECK(AllocatedOperand::IsSupportedRepresentation(rep));
     396             :   bits_ = AssignedRegisterField::encode(kUnassignedRegister) |
     397   176873525 :           RepresentationField::encode(rep) |
     398   176873525 :           ControlFlowRegisterHint::encode(kUnassignedRegister);
     399           0 : }
     400             : 
     401           0 : void LiveRange::VerifyPositions() const {
     402             :   // Walk the positions, verifying that each is in an interval.
     403           0 :   UseInterval* interval = first_interval_;
     404           0 :   for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
     405           0 :     CHECK(Start() <= pos->pos());
     406           0 :     CHECK(pos->pos() <= End());
     407           0 :     CHECK_NOT_NULL(interval);
     408           0 :     while (!interval->Contains(pos->pos()) && interval->end() != pos->pos()) {
     409             :       interval = interval->next();
     410           0 :       CHECK_NOT_NULL(interval);
     411             :     }
     412             :   }
     413           0 : }
     414             : 
     415           0 : void LiveRange::VerifyIntervals() const {
     416             :   DCHECK(first_interval()->start() == Start());
     417             :   LifetimePosition last_end = first_interval()->end();
     418           0 :   for (UseInterval* interval = first_interval()->next(); interval != nullptr;
     419             :        interval = interval->next()) {
     420             :     DCHECK(last_end <= interval->start());
     421             :     last_end = interval->end();
     422             :   }
     423             :   DCHECK(last_end == End());
     424           0 : }
     425             : 
     426           0 : void LiveRange::set_assigned_register(int reg) {
     427             :   DCHECK(!HasRegisterAssigned() && !spilled());
     428   195609319 :   bits_ = AssignedRegisterField::update(bits_, reg);
     429           0 : }
     430             : 
     431           0 : void LiveRange::UnsetAssignedRegister() {
     432             :   DCHECK(HasRegisterAssigned() && !spilled());
     433           0 :   bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
     434           0 : }
     435             : 
     436           0 : void LiveRange::AttachToNext() {
     437             :   DCHECK_NOT_NULL(next_);
     438             :   DCHECK_NE(TopLevel()->last_child_covers_, next_);
     439           0 :   last_interval_->set_next(next_->first_interval());
     440           0 :   next_->first_interval_ = nullptr;
     441           0 :   last_interval_ = next_->last_interval_;
     442           0 :   next_->last_interval_ = nullptr;
     443           0 :   if (first_pos() == nullptr) {
     444           0 :     first_pos_ = next_->first_pos();
     445             :   } else {
     446             :     UsePosition* ptr = first_pos_;
     447           0 :     while (ptr->next() != nullptr) {
     448             :       ptr = ptr->next();
     449             :     }
     450           0 :     ptr->set_next(next_->first_pos());
     451             :   }
     452           0 :   next_->first_pos_ = nullptr;
     453           0 :   LiveRange* old_next = next_;
     454           0 :   next_ = next_->next_;
     455           0 :   old_next->next_ = nullptr;
     456           0 : }
     457             : 
     458           0 : void LiveRange::Unspill() {
     459             :   DCHECK(spilled());
     460             :   set_spilled(false);
     461           0 :   bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
     462           0 : }
     463             : 
     464           0 : void LiveRange::Spill() {
     465             :   DCHECK(!spilled());
     466             :   DCHECK(!TopLevel()->HasNoSpillType());
     467             :   set_spilled(true);
     468    26614214 :   bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
     469           0 : }
     470             : 
     471           0 : RegisterKind LiveRange::kind() const {
     472   130764321 :   return IsFloatingPoint(representation()) ? FP_REGISTERS : GENERAL_REGISTERS;
     473             : }
     474             : 
     475           0 : UsePosition* LiveRange::FirstHintPosition(int* register_index) const {
     476    84045352 :   for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
     477   241144230 :     if (pos->HintRegister(register_index)) return pos;
     478             :   }
     479             :   return nullptr;
     480             : }
     481             : 
     482           0 : UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
     483    58024323 :   UsePosition* use_pos = last_processed_use_;
     484    58024323 :   if (use_pos == nullptr || use_pos->pos() > start) {
     485             :     use_pos = first_pos();
     486             :   }
     487   767185449 :   while (use_pos != nullptr && use_pos->pos() < start) {
     488             :     use_pos = use_pos->next();
     489             :   }
     490    58024323 :   last_processed_use_ = use_pos;
     491           0 :   return use_pos;
     492             : }
     493             : 
     494    28993078 : UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
     495             :     LifetimePosition start) const {
     496             :   UsePosition* pos = NextUsePosition(start);
     497    79331439 :   while (pos != nullptr && !pos->RegisterIsBeneficial()) {
     498             :     pos = pos->next();
     499             :   }
     500    28993078 :   return pos;
     501             : }
     502             : 
     503           0 : LifetimePosition LiveRange::NextLifetimePositionRegisterIsBeneficial(
     504             :     const LifetimePosition& start) const {
     505    12559597 :   UsePosition* next_use = NextUsePositionRegisterIsBeneficial(start);
     506    12559595 :   if (next_use == nullptr) return End();
     507             :   return next_use->pos();
     508             : }
     509             : 
     510           0 : UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
     511             :     LifetimePosition start) const {
     512             :   UsePosition* pos = first_pos();
     513             :   UsePosition* prev = nullptr;
     514      313505 :   while (pos != nullptr && pos->pos() < start) {
     515      252507 :     if (pos->RegisterIsBeneficial()) prev = pos;
     516             :     pos = pos->next();
     517             :   }
     518           0 :   return prev;
     519             : }
     520             : 
     521    25928788 : UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const {
     522             :   UsePosition* pos = NextUsePosition(start);
     523    72857607 :   while (pos != nullptr && pos->type() != UsePositionType::kRequiresRegister) {
     524             :     pos = pos->next();
     525             :   }
     526    25928788 :   return pos;
     527             : }
     528             : 
     529     2416899 : UsePosition* LiveRange::NextSlotPosition(LifetimePosition start) const {
     530    15108887 :   for (UsePosition* pos = NextUsePosition(start); pos != nullptr;
     531             :        pos = pos->next()) {
     532     6346743 :     if (pos->type() != UsePositionType::kRequiresSlot) continue;
     533             :     return pos;
     534             :   }
     535             :   return nullptr;
     536             : }
     537             : 
     538    13618147 : bool LiveRange::CanBeSpilled(LifetimePosition pos) const {
     539             :   // We cannot spill a live range that has a use requiring a register
     540             :   // at the current or the immediate next position.
     541    13618147 :   UsePosition* use_pos = NextRegisterPosition(pos);
     542    13618147 :   if (use_pos == nullptr) return true;
     543             :   return use_pos->pos() > pos.NextStart().End();
     544             : }
     545             : 
     546    91752494 : bool LiveRange::IsTopLevel() const { return top_level_ == this; }
     547             : 
     548   151677552 : InstructionOperand LiveRange::GetAssignedOperand() const {
     549             :   DCHECK(!IsEmpty());
     550   151677552 :   if (HasRegisterAssigned()) {
     551             :     DCHECK(!spilled());
     552             :     return AllocatedOperand(LocationOperand::REGISTER, representation(),
     553    84590041 :                             assigned_register());
     554             :   }
     555             :   DCHECK(spilled());
     556             :   DCHECK(!HasRegisterAssigned());
     557    67087511 :   if (TopLevel()->HasSpillOperand()) {
     558             :     InstructionOperand* op = TopLevel()->GetSpillOperand();
     559             :     DCHECK(!op->IsUnallocated());
     560    43137730 :     return *op;
     561             :   }
     562    23949781 :   return TopLevel()->GetSpillRangeOperand();
     563             : }
     564             : 
     565           0 : UseInterval* LiveRange::FirstSearchIntervalForPosition(
     566             :     LifetimePosition position) const {
     567   968484210 :   if (current_interval_ == nullptr) return first_interval_;
     568   671750714 :   if (current_interval_->start() > position) {
     569     2879170 :     current_interval_ = nullptr;
     570     2333887 :     return first_interval_;
     571             :   }
     572             :   return current_interval_;
     573             : }
     574             : 
     575           0 : void LiveRange::AdvanceLastProcessedMarker(
     576             :     UseInterval* to_start_of, LifetimePosition but_not_past) const {
     577   557103911 :   if (to_start_of == nullptr) return;
     578   557123650 :   if (to_start_of->start() > but_not_past) return;
     579   277253970 :   LifetimePosition start = current_interval_ == nullptr
     580             :                                ? LifetimePosition::Invalid()
     581   277253970 :                                : current_interval_->start();
     582   277253970 :   if (to_start_of->start() > start) {
     583   113161769 :     current_interval_ = to_start_of;
     584             :   }
     585             : }
     586             : 
     587    45490010 : LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
     588             :   int new_id = TopLevel()->GetNextChildId();
     589             :   LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel());
     590    45489278 :   child->set_bundle(bundle_);
     591             :   // If we split, we do so because we're about to switch registers or move
     592             :   // to/from a slot, so there's no value in connecting hints.
     593    45489278 :   DetachAt(position, child, zone, DoNotConnectHints);
     594             : 
     595    45490725 :   child->top_level_ = TopLevel();
     596    45490725 :   child->next_ = next_;
     597    45490725 :   next_ = child;
     598    45490725 :   return child;
     599             : }
     600             : 
     601    75731030 : UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
     602             :                                  Zone* zone,
     603             :                                  HintConnectionOption connect_hints) {
     604             :   DCHECK(Start() < position);
     605             :   DCHECK(End() > position);
     606             :   DCHECK(result->IsEmpty());
     607             :   // Find the last interval that ends before the position. If the
     608             :   // position is contained in one of the intervals in the chain, we
     609             :   // split that interval and use the first part.
     610             :   UseInterval* current = FirstSearchIntervalForPosition(position);
     611             : 
     612             :   // If the split position coincides with the beginning of a use interval
     613             :   // we need to split use positons in a special way.
     614             :   bool split_at_start = false;
     615             : 
     616    75731030 :   if (current->start() == position) {
     617             :     // When splitting at start we need to locate the previous use interval.
     618        1300 :     current = first_interval_;
     619             :   }
     620             : 
     621             :   UseInterval* after = nullptr;
     622    94162481 :   while (current != nullptr) {
     623    94162924 :     if (current->Contains(position)) {
     624             :       after = current->SplitAt(position, zone);
     625    49461527 :       break;
     626             :     }
     627             :     UseInterval* next = current->next();
     628    44701625 :     if (next->start() >= position) {
     629             :       split_at_start = (next->start() == position);
     630             :       after = next;
     631             :       current->set_next(nullptr);
     632             :       break;
     633             :     }
     634             :     current = next;
     635             :   }
     636             :   DCHECK_NOT_NULL(after);
     637             : 
     638             :   // Partition original use intervals to the two live ranges.
     639             :   UseInterval* before = current;
     640             :   result->last_interval_ =
     641    75731258 :       (last_interval_ == before)
     642             :           ? after            // Only interval in the range after split.
     643    75731258 :           : last_interval_;  // Last interval of the original range.
     644    75731258 :   result->first_interval_ = after;
     645    75731258 :   last_interval_ = before;
     646             : 
     647             :   // Find the last use position before the split and the first use
     648             :   // position after it.
     649             :   UsePosition* use_after =
     650    89767002 :       splitting_pointer_ == nullptr || splitting_pointer_->pos() > position
     651             :           ? first_pos()
     652   138775472 :           : splitting_pointer_;
     653             :   UsePosition* use_before = nullptr;
     654    75731258 :   if (split_at_start) {
     655             :     // The split position coincides with the beginning of a use interval (the
     656             :     // end of a lifetime hole). Use at this position should be attributed to
     657             :     // the split child because split child owns use interval covering it.
     658     6440467 :     while (use_after != nullptr && use_after->pos() < position) {
     659             :       use_before = use_after;
     660             :       use_after = use_after->next();
     661             :     }
     662             :   } else {
     663   125306324 :     while (use_after != nullptr && use_after->pos() <= position) {
     664             :       use_before = use_after;
     665             :       use_after = use_after->next();
     666             :     }
     667             :   }
     668             : 
     669             :   // Partition original use positions to the two live ranges.
     670    75731258 :   if (use_before != nullptr) {
     671             :     use_before->set_next(nullptr);
     672             :   } else {
     673    46617603 :     first_pos_ = nullptr;
     674             :   }
     675    75731258 :   result->first_pos_ = use_after;
     676             : 
     677             :   // Discard cached iteration state. It might be pointing
     678             :   // to the use that no longer belongs to this live range.
     679    75731258 :   last_processed_use_ = nullptr;
     680    75731258 :   current_interval_ = nullptr;
     681             : 
     682    91705358 :   if (connect_hints == ConnectHints && use_before != nullptr &&
     683    15974100 :       use_after != nullptr) {
     684             :     use_after->SetHint(use_before);
     685             :   }
     686             : #ifdef DEBUG
     687             :   VerifyChildStructure();
     688             :   result->VerifyChildStructure();
     689             : #endif
     690    75731258 :   return use_before;
     691             : }
     692             : 
     693           0 : void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
     694             :   LiveRange* child = this;
     695    45291836 :   for (; child != nullptr; child = child->next()) {
     696    39628164 :     child->top_level_ = new_top_level;
     697             :   }
     698           0 : }
     699             : 
     700    91007709 : void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
     701             :                                      const InstructionOperand& spill_op) {
     702   309075539 :   for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
     703             :     DCHECK(Start() <= pos->pos() && pos->pos() <= End());
     704   109033915 :     if (!pos->HasOperand()) continue;
     705   108407748 :     switch (pos->type()) {
     706             :       case UsePositionType::kRequiresSlot:
     707             :         DCHECK(spill_op.IsStackSlot() || spill_op.IsFPStackSlot());
     708             :         InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
     709             :         break;
     710             :       case UsePositionType::kRequiresRegister:
     711             :         DCHECK(op.IsRegister() || op.IsFPRegister());
     712             :         V8_FALLTHROUGH;
     713             :       case UsePositionType::kRegisterOrSlot:
     714             :       case UsePositionType::kRegisterOrSlotOrConstant:
     715             :         InstructionOperand::ReplaceWith(pos->operand(), &op);
     716             :         break;
     717             :     }
     718             :   }
     719    91007709 : }
     720             : 
     721             : // This implements an ordering on live ranges so that they are ordered by their
     722             : // start positions.  This is needed for the correctness of the register
     723             : // allocation algorithm.  If two live ranges start at the same offset then there
     724             : // is a tie breaker based on where the value is first used.  This part of the
     725             : // ordering is merely a heuristic.
     726   369080787 : bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
     727             :   LifetimePosition start = Start();
     728             :   LifetimePosition other_start = other->Start();
     729   369080787 :   if (start == other_start) {
     730             :     // Prefer register that has a controlflow hint to make sure it gets
     731             :     // allocated first. This allows the control flow aware alloction to
     732             :     // just put ranges back into the queue without other ranges interfering.
     733    25040952 :     if (controlflow_hint() < other->controlflow_hint()) {
     734             :       return true;
     735             :     }
     736             :     // The other has a smaller hint.
     737    25040952 :     if (controlflow_hint() > other->controlflow_hint()) {
     738             :       return false;
     739             :     }
     740             :     // Both have the same hint or no hint at all. Use first use position.
     741             :     UsePosition* pos = first_pos();
     742             :     UsePosition* other_pos = other->first_pos();
     743             :     // To make the order total, handle the case where both positions are null.
     744    25041025 :     if (pos == other_pos) return TopLevel()->vreg() < other->TopLevel()->vreg();
     745    14641078 :     if (pos == nullptr) return false;
     746    14320065 :     if (other_pos == nullptr) return true;
     747             :     // To make the order total, handle the case where both positions are equal.
     748    13945134 :     if (pos->pos() == other_pos->pos())
     749     8356183 :       return TopLevel()->vreg() < other->TopLevel()->vreg();
     750             :     return pos->pos() < other_pos->pos();
     751             :   }
     752   344039835 :   return start < other_start;
     753             : }
     754             : 
     755           0 : void LiveRange::SetUseHints(int register_index) {
     756   135152195 :   for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
     757    92580002 :     if (!pos->HasOperand()) continue;
     758    91945349 :     switch (pos->type()) {
     759             :       case UsePositionType::kRequiresSlot:
     760             :         break;
     761             :       case UsePositionType::kRequiresRegister:
     762             :       case UsePositionType::kRegisterOrSlot:
     763             :       case UsePositionType::kRegisterOrSlotOrConstant:
     764             :         pos->set_assigned_register(register_index);
     765             :         break;
     766             :     }
     767             :   }
     768           0 : }
     769             : 
     770           0 : bool LiveRange::CanCover(LifetimePosition position) const {
     771   266209292 :   if (IsEmpty()) return false;
     772   266210675 :   return Start() <= position && position < End();
     773             : }
     774             : 
     775   265544275 : bool LiveRange::Covers(LifetimePosition position) const {
     776   265544275 :   if (!CanCover(position)) return false;
     777             :   UseInterval* start_search = FirstSearchIntervalForPosition(position);
     778   540538215 :   for (UseInterval* interval = start_search; interval != nullptr;
     779             :        interval = interval->next()) {
     780             :     DCHECK(interval->next() == nullptr ||
     781             :            interval->next()->start() >= interval->start());
     782             :     AdvanceLastProcessedMarker(interval, position);
     783   374349220 :     if (interval->Contains(position)) return true;
     784   263263895 :     if (interval->start() > position) return false;
     785             :   }
     786             :   return false;
     787             : }
     788             : 
     789           0 : LifetimePosition LiveRange::NextEndAfter(LifetimePosition position) const {
     790             :   UseInterval* start_search = FirstSearchIntervalForPosition(position);
     791   118168400 :   while (start_search->end() < position) {
     792             :     start_search = start_search->next();
     793             :   }
     794           0 :   return start_search->end();
     795             : }
     796             : 
     797           0 : LifetimePosition LiveRange::NextStartAfter(LifetimePosition position) const {
     798             :   UseInterval* start_search = FirstSearchIntervalForPosition(position);
     799   244011977 :   while (start_search->start() < position) {
     800             :     start_search = start_search->next();
     801             :   }
     802           0 :   return start_search->start();
     803             : }
     804             : 
     805   416670719 : LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
     806             :   UseInterval* b = other->first_interval();
     807   416670719 :   if (b == nullptr) return LifetimePosition::Invalid();
     808             :   LifetimePosition advance_last_processed_up_to = b->start();
     809             :   UseInterval* a = FirstSearchIntervalForPosition(b->start());
     810  1090099971 :   while (a != nullptr && b != nullptr) {
     811   702976894 :     if (a->start() > other->End()) break;
     812   648860176 :     if (b->start() > End()) break;
     813   643894358 :     LifetimePosition cur_intersection = a->Intersect(b);
     814   643867624 :     if (cur_intersection.IsValid()) {
     815    76008335 :       return cur_intersection;
     816             :     }
     817   567859289 :     if (a->start() < b->start()) {
     818             :       a = a->next();
     819   413899354 :       if (a == nullptr || a->start() > other->End()) break;
     820             :       AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
     821             :     } else {
     822             :       b = b->next();
     823             :     }
     824             :   }
     825             :   return LifetimePosition::Invalid();
     826             : }
     827             : 
     828           0 : void LiveRange::Print(const RegisterConfiguration* config,
     829             :                       bool with_children) const {
     830           0 :   StdoutStream os;
     831             :   PrintableLiveRange wrapper;
     832           0 :   wrapper.register_configuration_ = config;
     833           0 :   for (const LiveRange* i = this; i != nullptr; i = i->next()) {
     834           0 :     wrapper.range_ = i;
     835           0 :     os << wrapper << std::endl;
     836           0 :     if (!with_children) break;
     837             :   }
     838           0 : }
     839             : 
     840           0 : void LiveRange::Print(bool with_children) const {
     841           0 :   Print(RegisterConfiguration::Default(), with_children);
     842           0 : }
     843             : 
     844           0 : bool LiveRange::RegisterFromBundle(int* hint) const {
     845    48595958 :   if (bundle_ == nullptr || bundle_->reg() == kUnassignedRegister) return false;
     846     2644643 :   *hint = bundle_->reg();
     847           0 :   return true;
     848             : }
     849             : 
     850           0 : void LiveRange::UpdateBundleRegister(int reg) const {
     851    42572193 :   if (bundle_ == nullptr || bundle_->reg() != kUnassignedRegister) return;
     852             :   bundle_->set_reg(reg);
     853             : }
     854             : 
     855             : struct TopLevelLiveRange::SpillMoveInsertionList : ZoneObject {
     856             :   SpillMoveInsertionList(int gap_index, InstructionOperand* operand,
     857             :                          SpillMoveInsertionList* next)
     858    25927272 :       : gap_index(gap_index), operand(operand), next(next) {}
     859             :   const int gap_index;
     860             :   InstructionOperand* const operand;
     861             :   SpillMoveInsertionList* const next;
     862             : };
     863             : 
     864          71 : TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineRepresentation rep)
     865             :     : LiveRange(0, rep, this),
     866             :       vreg_(vreg),
     867             :       last_child_id_(0),
     868             :       splintered_from_(nullptr),
     869             :       spill_operand_(nullptr),
     870             :       spill_move_insertion_locations_(nullptr),
     871             :       spilled_in_deferred_blocks_(false),
     872             :       spill_start_index_(kMaxInt),
     873             :       last_pos_(nullptr),
     874             :       last_child_covers_(this),
     875             :       splinter_(nullptr),
     876   117116356 :       has_preassigned_slot_(false) {
     877             :   bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
     878          71 : }
     879             : 
     880             : #if DEBUG
     881             : int TopLevelLiveRange::debug_virt_reg() const {
     882             :   return IsSplinter() ? splintered_from()->vreg() : vreg();
     883             : }
     884             : #endif
     885             : 
     886           0 : void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index,
     887             :                                             InstructionOperand* operand) {
     888             :   DCHECK(HasNoSpillType());
     889             :   spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList(
     890    51854544 :       gap_index, operand, spill_move_insertion_locations_);
     891           0 : }
     892             : 
     893    19954511 : void TopLevelLiveRange::CommitSpillMoves(RegisterAllocationData* data,
     894             :                                          const InstructionOperand& op,
     895             :                                          bool might_be_duplicated) {
     896             :   DCHECK_IMPLIES(op.IsConstant(),
     897             :                  GetSpillMoveInsertionLocations(data) == nullptr);
     898             :   InstructionSequence* sequence = data->code();
     899             :   Zone* zone = sequence->zone();
     900             : 
     901     4504416 :   for (SpillMoveInsertionList* to_spill = GetSpillMoveInsertionLocations(data);
     902    24458927 :        to_spill != nullptr; to_spill = to_spill->next) {
     903     4504202 :     Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
     904             :     ParallelMove* move =
     905             :         instr->GetOrCreateParallelMove(Instruction::START, zone);
     906             :     // Skip insertion if it's possible that the move exists already as a
     907             :     // constraint move from a fixed output register to a slot.
     908     4504192 :     if (might_be_duplicated || has_preassigned_slot()) {
     909             :       bool found = false;
     910     1931638 :       for (MoveOperands* move_op : *move) {
     911     1223420 :         if (move_op->IsEliminated()) continue;
     912     3661685 :         if (move_op->source().Equals(*to_spill->operand) &&
     913             :             move_op->destination().Equals(op)) {
     914             :           found = true;
     915      753938 :           if (has_preassigned_slot()) move_op->Eliminate();
     916             :           break;
     917             :         }
     918             :       }
     919     1462156 :       if (found) continue;
     920             :     }
     921     3750446 :     if (!has_preassigned_slot()) {
     922     3311533 :       move->AddMove(*to_spill->operand, op);
     923             :     }
     924             :   }
     925    19954725 : }
     926             : 
     927           0 : void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) {
     928             :   DCHECK(HasNoSpillType());
     929             :   DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
     930             :   set_spill_type(SpillType::kSpillOperand);
     931    15451480 :   spill_operand_ = operand;
     932           0 : }
     933             : 
     934           0 : void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
     935             :   DCHECK(!HasSpillOperand());
     936             :   DCHECK(spill_range);
     937    11106587 :   spill_range_ = spill_range;
     938           0 : }
     939             : 
     940           0 : AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
     941             :   SpillRange* spill_range = GetSpillRange();
     942             :   int index = spill_range->assigned_slot();
     943           0 :   return AllocatedOperand(LocationOperand::STACK_SLOT, representation(), index);
     944             : }
     945             : 
     946    15974106 : void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end,
     947             :                                  Zone* zone) {
     948             :   DCHECK(start != Start() || end != End());
     949             :   DCHECK(start < end);
     950             : 
     951             :   TopLevelLiveRange splinter_temp(-1, representation());
     952             :   UsePosition* last_in_splinter = nullptr;
     953             :   // Live ranges defined in deferred blocks stay in deferred blocks, so we
     954             :   // don't need to splinter them. That means that start should always be
     955             :   // after the beginning of the range.
     956             :   DCHECK(start > Start());
     957             : 
     958    15974106 :   if (end >= End()) {
     959             :     DCHECK(start > Start());
     960     1706231 :     DetachAt(start, &splinter_temp, zone, ConnectHints);
     961     1706212 :     next_ = nullptr;
     962             :   } else {
     963             :     DCHECK(start < End() && Start() < end);
     964             : 
     965             :     const int kInvalidId = std::numeric_limits<int>::max();
     966             : 
     967    14267875 :     UsePosition* last = DetachAt(start, &splinter_temp, zone, ConnectHints);
     968             : 
     969             :     LiveRange end_part(kInvalidId, this->representation(), nullptr);
     970             :     // The last chunk exits the deferred region, and we don't want to connect
     971             :     // hints here, because the non-deferred region shouldn't be affected
     972             :     // by allocation decisions on the deferred path.
     973             :     last_in_splinter =
     974    14267891 :         splinter_temp.DetachAt(end, &end_part, zone, DoNotConnectHints);
     975             : 
     976    14268133 :     next_ = end_part.next_;
     977    14268133 :     last_interval_->set_next(end_part.first_interval_);
     978             :     // The next splinter will happen either at or after the current interval.
     979             :     // We can optimize DetachAt by setting current_interval_ accordingly,
     980             :     // which will then be picked up by FirstSearchIntervalForPosition.
     981    14268133 :     current_interval_ = last_interval_;
     982    14268133 :     last_interval_ = end_part.last_interval_;
     983             : 
     984    14268133 :     if (first_pos_ == nullptr) {
     985     1086952 :       first_pos_ = end_part.first_pos_;
     986             :     } else {
     987    13181181 :       splitting_pointer_ = last;
     988    13181181 :       if (last != nullptr) last->set_next(end_part.first_pos_);
     989             :     }
     990             :   }
     991             : 
     992    15974345 :   if (splinter()->IsEmpty()) {
     993     5663604 :     splinter()->first_interval_ = splinter_temp.first_interval_;
     994     5663604 :     splinter()->last_interval_ = splinter_temp.last_interval_;
     995             :   } else {
     996    10310741 :     splinter()->last_interval_->set_next(splinter_temp.first_interval_);
     997    10310741 :     splinter()->last_interval_ = splinter_temp.last_interval_;
     998             :   }
     999    15974345 :   if (splinter()->first_pos() == nullptr) {
    1000    12458230 :     splinter()->first_pos_ = splinter_temp.first_pos_;
    1001             :   } else {
    1002     3516115 :     splinter()->last_pos_->set_next(splinter_temp.first_pos_);
    1003             :   }
    1004    15974345 :   if (last_in_splinter != nullptr) {
    1005     2918444 :     splinter()->last_pos_ = last_in_splinter;
    1006             :   } else {
    1007    16863656 :     if (splinter()->first_pos() != nullptr &&
    1008     3807755 :         splinter()->last_pos_ == nullptr) {
    1009     1400461 :       splinter()->last_pos_ = splinter()->first_pos();
    1010     4472973 :       for (UsePosition* pos = splinter()->first_pos(); pos != nullptr;
    1011             :            pos = pos->next()) {
    1012     1536256 :         splinter()->last_pos_ = pos;
    1013             :       }
    1014             :     }
    1015             :   }
    1016             : #if DEBUG
    1017             :   Verify();
    1018             :   splinter()->Verify();
    1019             : #endif
    1020    15974345 : }
    1021             : 
    1022     5663353 : void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) {
    1023     5663353 :   splintered_from_ = splinter_parent;
    1024     5663353 :   if (!HasSpillOperand() && splinter_parent->spill_range_ != nullptr) {
    1025     3269716 :     SetSpillRange(splinter_parent->spill_range_);
    1026             :   }
    1027     5663353 : }
    1028             : 
    1029     5663658 : void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) {
    1030             :   DCHECK(merged->TopLevel() == this);
    1031             : 
    1032     6729345 :   if (HasNoSpillType() && merged->HasSpillRange()) {
    1033             :     set_spill_type(merged->spill_type());
    1034             :     DCHECK_LT(0, GetSpillRange()->live_ranges().size());
    1035      698459 :     merged->spill_range_ = nullptr;
    1036             :     merged->bits_ =
    1037     1396918 :         SpillTypeField::update(merged->bits_, SpillType::kNoSpillType);
    1038             :   }
    1039     5663658 : }
    1040             : 
    1041     5663795 : void TopLevelLiveRange::Merge(TopLevelLiveRange* other, Zone* zone) {
    1042             :   DCHECK(Start() < other->Start());
    1043             :   DCHECK(other->splintered_from() == this);
    1044             : 
    1045     5663795 :   LiveRange* first = this;
    1046             :   LiveRange* second = other;
    1047             :   DCHECK(first->Start() < second->Start());
    1048    59220608 :   while (first != nullptr && second != nullptr) {
    1049             :     DCHECK(first != second);
    1050             :     // Make sure the ranges are in order each time we iterate.
    1051    53556936 :     if (second->Start() < first->Start()) {
    1052             :       LiveRange* tmp = second;
    1053             :       second = first;
    1054             :       first = tmp;
    1055             :       continue;
    1056             :     }
    1057             : 
    1058    31700298 :     if (first->End() <= second->Start()) {
    1059     9866319 :       if (first->next() == nullptr ||
    1060             :           first->next()->Start() > second->Start()) {
    1061             :         // First is in order before second.
    1062             :         LiveRange* temp = first->next();
    1063     5687308 :         first->next_ = second;
    1064             :         first = temp;
    1065             :       } else {
    1066             :         // First is in order before its successor (or second), so advance first.
    1067             :         first = first->next();
    1068             :       }
    1069             :       continue;
    1070             :     }
    1071             : 
    1072             :     DCHECK(first->Start() < second->Start());
    1073             :     // If first and second intersect, split first.
    1074    21833979 :     if (first->Start() < second->End() && second->Start() < first->End()) {
    1075    21834005 :       LiveRange* temp = first->SplitAt(second->Start(), zone);
    1076    21833882 :       CHECK(temp != first);
    1077             :       temp->set_spilled(first->spilled());
    1078    21833882 :       if (!temp->spilled())
    1079             :         temp->set_assigned_register(first->assigned_register());
    1080             : 
    1081    21833882 :       first->next_ = second;
    1082             :       first = temp;
    1083    21833882 :       continue;
    1084             :     }
    1085             :     DCHECK(first->End() <= second->Start());
    1086             :   }
    1087             : 
    1088     5663672 :   TopLevel()->UpdateParentForAllChildren(TopLevel());
    1089     5663672 :   TopLevel()->UpdateSpillRangePostMerge(other);
    1090             :   TopLevel()->register_slot_use(other->slot_use_kind());
    1091             : 
    1092             : #if DEBUG
    1093             :   Verify();
    1094             : #endif
    1095     5663668 : }
    1096             : 
    1097           0 : void TopLevelLiveRange::VerifyChildrenInOrder() const {
    1098             :   LifetimePosition last_end = End();
    1099           0 :   for (const LiveRange* child = this->next(); child != nullptr;
    1100             :        child = child->next()) {
    1101             :     DCHECK(last_end <= child->Start());
    1102             :     last_end = child->End();
    1103             :   }
    1104           0 : }
    1105             : 
    1106           0 : LiveRange* TopLevelLiveRange::GetChildCovers(LifetimePosition pos) {
    1107           0 :   LiveRange* child = last_child_covers_;
    1108           0 :   while (child != nullptr && child->End() <= pos) {
    1109             :     child = child->next();
    1110             :   }
    1111           0 :   last_child_covers_ = child;
    1112           0 :   return !child || !child->Covers(pos) ? nullptr : child;
    1113             : }
    1114             : 
    1115           0 : void TopLevelLiveRange::Verify() const {
    1116             :   VerifyChildrenInOrder();
    1117           0 :   for (const LiveRange* child = this; child != nullptr; child = child->next()) {
    1118             :     VerifyChildStructure();
    1119             :   }
    1120           0 : }
    1121             : 
    1122           0 : void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
    1123    68566638 :   TRACE("Shorten live range %d to [%d\n", vreg(), start.value());
    1124             :   DCHECK_NOT_NULL(first_interval_);
    1125             :   DCHECK(first_interval_->start() <= start);
    1126             :   DCHECK(start < first_interval_->end());
    1127    68566729 :   first_interval_->set_start(start);
    1128           0 : }
    1129             : 
    1130     2121825 : void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
    1131             :                                        LifetimePosition end, Zone* zone) {
    1132     2121825 :   TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
    1133             :         end.value());
    1134             :   LifetimePosition new_end = end;
    1135    10154330 :   while (first_interval_ != nullptr && first_interval_->start() <= end) {
    1136     4016252 :     if (first_interval_->end() > end) {
    1137             :       new_end = first_interval_->end();
    1138             :     }
    1139     4016252 :     first_interval_ = first_interval_->next();
    1140             :   }
    1141             : 
    1142     2121826 :   UseInterval* new_interval = new (zone) UseInterval(start, new_end);
    1143     2121827 :   new_interval->set_next(first_interval_);
    1144     2121827 :   first_interval_ = new_interval;
    1145     2121827 :   if (new_interval->next() == nullptr) {
    1146      895652 :     last_interval_ = new_interval;
    1147             :   }
    1148     2121827 : }
    1149             : 
    1150   396156594 : void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
    1151             :                                        LifetimePosition end, Zone* zone) {
    1152   396156594 :   TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(),
    1153             :         end.value());
    1154   396228022 :   if (first_interval_ == nullptr) {
    1155    93454384 :     UseInterval* interval = new (zone) UseInterval(start, end);
    1156    93449599 :     first_interval_ = interval;
    1157    93449599 :     last_interval_ = interval;
    1158             :   } else {
    1159   302773638 :     if (end == first_interval_->start()) {
    1160             :       first_interval_->set_start(start);
    1161   187998182 :     } else if (end < first_interval_->start()) {
    1162   126475819 :       UseInterval* interval = new (zone) UseInterval(start, end);
    1163   126475700 :       interval->set_next(first_interval_);
    1164   126475700 :       first_interval_ = interval;
    1165             :     } else {
    1166             :       // Order of instruction's processing (see ProcessInstructions) guarantees
    1167             :       // that each new use interval either precedes, intersects with or touches
    1168             :       // the last added interval.
    1169             :       DCHECK(start <= first_interval_->end());
    1170             :       first_interval_->set_start(Min(start, first_interval_->start()));
    1171    61522363 :       first_interval_->set_end(Max(end, first_interval_->end()));
    1172             :     }
    1173             :   }
    1174   396223118 : }
    1175             : 
    1176   111095075 : void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
    1177             :   LifetimePosition pos = use_pos->pos();
    1178   111095075 :   TRACE("Add to live range %d use position %d\n", vreg(), pos.value());
    1179             :   UsePosition* prev_hint = nullptr;
    1180             :   UsePosition* prev = nullptr;
    1181   111098431 :   UsePosition* current = first_pos_;
    1182   111369425 :   while (current != nullptr && current->pos() < pos) {
    1183      135497 :     prev_hint = current->HasHint() ? current : prev_hint;
    1184             :     prev = current;
    1185             :     current = current->next();
    1186             :   }
    1187             : 
    1188   111098431 :   if (prev == nullptr) {
    1189   110962934 :     use_pos->set_next(first_pos_);
    1190   110962934 :     first_pos_ = use_pos;
    1191             :   } else {
    1192             :     use_pos->set_next(prev->next());
    1193             :     prev->set_next(use_pos);
    1194             :   }
    1195             : 
    1196   222195506 :   if (prev_hint == nullptr && use_pos->HasHint()) {
    1197    26582374 :     current_hint_position_ = use_pos;
    1198             :   }
    1199   111097490 : }
    1200             : 
    1201             : static bool AreUseIntervalsIntersecting(UseInterval* interval1,
    1202             :                                         UseInterval* interval2) {
    1203   324090104 :   while (interval1 != nullptr && interval2 != nullptr) {
    1204   323857297 :     if (interval1->start() < interval2->start()) {
    1205   101764299 :       if (interval1->end() > interval2->start()) {
    1206             :         return true;
    1207             :       }
    1208             :       interval1 = interval1->next();
    1209             :     } else {
    1210   222092998 :       if (interval2->end() > interval1->start()) {
    1211             :         return true;
    1212             :       }
    1213             :       interval2 = interval2->next();
    1214             :     }
    1215             :   }
    1216             :   return false;
    1217             : }
    1218             : 
    1219           0 : std::ostream& operator<<(std::ostream& os,
    1220             :                          const PrintableLiveRange& printable_range) {
    1221           0 :   const LiveRange* range = printable_range.range_;
    1222           0 :   os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id()
    1223           0 :      << " ";
    1224           0 :   if (range->TopLevel()->is_phi()) os << "phi ";
    1225           0 :   if (range->TopLevel()->is_non_loop_phi()) os << "nlphi ";
    1226             : 
    1227             :   os << "{" << std::endl;
    1228             :   UseInterval* interval = range->first_interval();
    1229             :   UsePosition* use_pos = range->first_pos();
    1230           0 :   while (use_pos != nullptr) {
    1231           0 :     if (use_pos->HasOperand()) {
    1232           0 :       os << *use_pos->operand() << use_pos->pos() << " ";
    1233             :     }
    1234             :     use_pos = use_pos->next();
    1235             :   }
    1236             :   os << std::endl;
    1237             : 
    1238           0 :   while (interval != nullptr) {
    1239           0 :     os << '[' << interval->start() << ", " << interval->end() << ')'
    1240             :        << std::endl;
    1241             :     interval = interval->next();
    1242             :   }
    1243           0 :   os << "}";
    1244           0 :   return os;
    1245             : }
    1246             : 
    1247             : namespace {
    1248           0 : void PrintBlockRow(std::ostream& os, const InstructionBlocks& blocks) {
    1249           0 :   os << "     ";
    1250           0 :   for (auto block : blocks) {
    1251             :     LifetimePosition start_pos = LifetimePosition::GapFromInstructionIndex(
    1252             :         block->first_instruction_index());
    1253             :     LifetimePosition end_pos = LifetimePosition::GapFromInstructionIndex(
    1254             :                                    block->last_instruction_index())
    1255             :                                    .NextFullStart();
    1256           0 :     int length = end_pos.value() - start_pos.value();
    1257           0 :     constexpr int kMaxPrefixLength = 32;
    1258             :     char buffer[kMaxPrefixLength];
    1259             :     int rpo_number = block->rpo_number().ToInt();
    1260           0 :     const char* deferred_marker = block->IsDeferred() ? "(deferred)" : "";
    1261           0 :     int max_prefix_length = std::min(length, kMaxPrefixLength);
    1262           0 :     int prefix = snprintf(buffer, max_prefix_length, "[-B%d-%s", rpo_number,
    1263           0 :                           deferred_marker);
    1264           0 :     os << buffer;
    1265           0 :     int remaining = length - std::min(prefix, max_prefix_length) - 1;
    1266           0 :     for (int i = 0; i < remaining; ++i) os << '-';
    1267             :     os << ']';
    1268             :   }
    1269             :   os << '\n';
    1270           0 : }
    1271             : }  // namespace
    1272             : 
    1273           0 : void LinearScanAllocator::PrintRangeRow(std::ostream& os,
    1274             :                                         const TopLevelLiveRange* toplevel) {
    1275             :   int position = 0;
    1276           0 :   os << std::setw(3) << toplevel->vreg()
    1277           0 :      << (toplevel->IsSplinter() ? "s:" : ": ");
    1278             : 
    1279             :   const char* kind_string;
    1280           0 :   switch (toplevel->spill_type()) {
    1281             :     case TopLevelLiveRange::SpillType::kSpillRange:
    1282             :       kind_string = "ss";
    1283             :       break;
    1284             :     case TopLevelLiveRange::SpillType::kDeferredSpillRange:
    1285             :       kind_string = "sd";
    1286           0 :       break;
    1287             :     case TopLevelLiveRange::SpillType::kSpillOperand:
    1288             :       kind_string = "so";
    1289           0 :       break;
    1290             :     default:
    1291             :       kind_string = "s?";
    1292             :   }
    1293             : 
    1294           0 :   for (const LiveRange* range = toplevel; range != nullptr;
    1295             :        range = range->next()) {
    1296           0 :     for (UseInterval* interval = range->first_interval(); interval != nullptr;
    1297             :          interval = interval->next()) {
    1298             :       LifetimePosition start = interval->start();
    1299             :       LifetimePosition end = interval->end();
    1300           0 :       CHECK_GE(start.value(), position);
    1301           0 :       for (; start.value() > position; position++) {
    1302             :         os << ' ';
    1303             :       }
    1304           0 :       int length = end.value() - start.value();
    1305           0 :       constexpr int kMaxPrefixLength = 32;
    1306             :       char buffer[kMaxPrefixLength];
    1307           0 :       int max_prefix_length = std::min(length + 1, kMaxPrefixLength);
    1308             :       int prefix;
    1309           0 :       if (range->spilled()) {
    1310           0 :         prefix = snprintf(buffer, max_prefix_length, "|%s", kind_string);
    1311             :       } else {
    1312             :         const char* reg_name;
    1313           0 :         if (range->assigned_register() == kUnassignedRegister) {
    1314             :           reg_name = "???";
    1315             :         } else {
    1316             :           reg_name = RegisterName(range->assigned_register());
    1317             :         }
    1318           0 :         prefix = snprintf(buffer, max_prefix_length, "|%s", reg_name);
    1319             :       }
    1320           0 :       os << buffer;
    1321           0 :       position += std::min(prefix, max_prefix_length - 1);
    1322           0 :       CHECK_GE(end.value(), position);
    1323           0 :       const char line_style = range->spilled() ? '-' : '=';
    1324           0 :       for (; end.value() > position; position++) {
    1325             :         os << line_style;
    1326             :       }
    1327             :     }
    1328             :   }
    1329             :   os << '\n';
    1330           0 : }
    1331             : 
    1332           0 : void LinearScanAllocator::PrintRangeOverview(std::ostream& os) {
    1333           0 :   PrintBlockRow(os, code()->instruction_blocks());
    1334           0 :   for (auto const toplevel : data()->fixed_live_ranges()) {
    1335           0 :     if (toplevel == nullptr) continue;
    1336           0 :     PrintRangeRow(os, toplevel);
    1337             :   }
    1338             :   int rowcount = 0;
    1339           0 :   for (auto toplevel : data()->live_ranges()) {
    1340           0 :     if (!CanProcessRange(toplevel)) continue;
    1341           0 :     if (rowcount++ % 10 == 0) PrintBlockRow(os, code()->instruction_blocks());
    1342           0 :     PrintRangeRow(os, toplevel);
    1343             :   }
    1344           0 : }
    1345             : 
    1346     5943344 : SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone)
    1347             :     : live_ranges_(zone),
    1348             :       assigned_slot_(kUnassignedSlot),
    1349    11886688 :       byte_width_(GetByteWidth(parent->representation())) {
    1350             :   // Spill ranges are created for top level, non-splintered ranges. This is so
    1351             :   // that, when merging decisions are made, we consider the full extent of the
    1352             :   // virtual register, and avoid clobbering it.
    1353             :   DCHECK(!parent->IsSplinter());
    1354             :   UseInterval* result = nullptr;
    1355             :   UseInterval* node = nullptr;
    1356             :   // Copy the intervals for all ranges.
    1357    24466088 :   for (LiveRange* range = parent; range != nullptr; range = range->next()) {
    1358             :     UseInterval* src = range->first_interval();
    1359    35363129 :     while (src != nullptr) {
    1360             :       UseInterval* new_node = new (zone) UseInterval(src->start(), src->end());
    1361    13050906 :       if (result == nullptr) {
    1362             :         result = new_node;
    1363             :       } else {
    1364             :         node->set_next(new_node);
    1365             :       }
    1366             :       node = new_node;
    1367             :       src = src->next();
    1368             :     }
    1369             :   }
    1370     5943399 :   use_interval_ = result;
    1371     5943399 :   live_ranges().push_back(parent);
    1372     5943242 :   end_position_ = node->end();
    1373     5943242 :   parent->SetSpillRange(this);
    1374     5943242 : }
    1375             : 
    1376   259250748 : bool SpillRange::IsIntersectingWith(SpillRange* other) const {
    1377   777752248 :   if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
    1378   518434158 :       this->End() <= other->use_interval_->start() ||
    1379             :       other->End() <= this->use_interval_->start()) {
    1380             :     return false;
    1381             :   }
    1382   515228422 :   return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
    1383             : }
    1384             : 
    1385   259911937 : bool SpillRange::TryMerge(SpillRange* other) {
    1386   259911937 :   if (HasSlot() || other->HasSlot()) return false;
    1387   259401834 :   if (byte_width() != other->byte_width() || IsIntersectingWith(other))
    1388             :     return false;
    1389             : 
    1390             :   LifetimePosition max = LifetimePosition::MaxPosition();
    1391     1869464 :   if (End() < other->End() && other->End() != max) {
    1392       70840 :     end_position_ = other->End();
    1393             :   }
    1394     1869464 :   other->end_position_ = max;
    1395             : 
    1396     1869464 :   MergeDisjointIntervals(other->use_interval_);
    1397     1869464 :   other->use_interval_ = nullptr;
    1398             : 
    1399     3763093 :   for (TopLevelLiveRange* range : other->live_ranges()) {
    1400             :     DCHECK(range->GetSpillRange() == other);
    1401             :     range->SetSpillRange(this);
    1402             :   }
    1403             : 
    1404             :   live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
    1405     1869464 :                        other->live_ranges().end());
    1406             :   other->live_ranges().clear();
    1407             : 
    1408     1869464 :   return true;
    1409             : }
    1410             : 
    1411           0 : void SpillRange::MergeDisjointIntervals(UseInterval* other) {
    1412             :   UseInterval* tail = nullptr;
    1413     1869464 :   UseInterval* current = use_interval_;
    1414     8826622 :   while (other != nullptr) {
    1415             :     // Make sure the 'current' list starts first
    1416     6957158 :     if (current == nullptr || current->start() > other->start()) {
    1417             :       std::swap(current, other);
    1418             :     }
    1419             :     // Check disjointness
    1420             :     DCHECK(other == nullptr || current->end() <= other->start());
    1421             :     // Append the 'current' node to the result accumulator and move forward
    1422     6957158 :     if (tail == nullptr) {
    1423     1869457 :       use_interval_ = current;
    1424             :     } else {
    1425             :       tail->set_next(current);
    1426             :     }
    1427             :     tail = current;
    1428             :     current = current->next();
    1429             :   }
    1430             :   // Other list is empty => we are done
    1431           0 : }
    1432             : 
    1433           0 : void SpillRange::Print() const {
    1434           0 :   StdoutStream os;
    1435             :   os << "{" << std::endl;
    1436           0 :   for (TopLevelLiveRange* range : live_ranges()) {
    1437           0 :     os << range->vreg() << " ";
    1438             :   }
    1439             :   os << std::endl;
    1440             : 
    1441           0 :   for (UseInterval* i = interval(); i != nullptr; i = i->next()) {
    1442           0 :     os << '[' << i->start() << ", " << i->end() << ')' << std::endl;
    1443             :   }
    1444             :   os << "}" << std::endl;
    1445           0 : }
    1446             : 
    1447           0 : RegisterAllocationData::PhiMapValue::PhiMapValue(PhiInstruction* phi,
    1448             :                                                  const InstructionBlock* block,
    1449             :                                                  Zone* zone)
    1450             :     : phi_(phi),
    1451             :       block_(block),
    1452             :       incoming_operands_(zone),
    1453     4176478 :       assigned_register_(kUnassignedRegister) {
    1454     2088239 :   incoming_operands_.reserve(phi->operands().size());
    1455           0 : }
    1456             : 
    1457           0 : void RegisterAllocationData::PhiMapValue::AddOperand(
    1458             :     InstructionOperand* operand) {
    1459     5054094 :   incoming_operands_.push_back(operand);
    1460           0 : }
    1461             : 
    1462           0 : void RegisterAllocationData::PhiMapValue::CommitAssignment(
    1463             :     const InstructionOperand& assigned) {
    1464     7142314 :   for (InstructionOperand* operand : incoming_operands_) {
    1465             :     InstructionOperand::ReplaceWith(operand, &assigned);
    1466             :   }
    1467           0 : }
    1468             : 
    1469     2640139 : RegisterAllocationData::RegisterAllocationData(
    1470             :     const RegisterConfiguration* config, Zone* zone, Frame* frame,
    1471             :     InstructionSequence* code, RegisterAllocationFlags flags,
    1472             :     const char* debug_name)
    1473             :     : allocation_zone_(zone),
    1474             :       frame_(frame),
    1475             :       code_(code),
    1476             :       debug_name_(debug_name),
    1477             :       config_(config),
    1478             :       phi_map_(allocation_zone()),
    1479             :       live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
    1480             :       live_out_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
    1481     2641367 :       live_ranges_(code->VirtualRegisterCount() * 2, nullptr,
    1482             :                    allocation_zone()),
    1483     2642215 :       fixed_live_ranges_(kNumberOfFixedRangesPerRegister *
    1484             :                              this->config()->num_general_registers(),
    1485             :                          nullptr, allocation_zone()),
    1486             :       fixed_float_live_ranges_(allocation_zone()),
    1487     2642015 :       fixed_double_live_ranges_(kNumberOfFixedRangesPerRegister *
    1488             :                                     this->config()->num_double_registers(),
    1489             :                                 nullptr, allocation_zone()),
    1490             :       fixed_simd128_live_ranges_(allocation_zone()),
    1491             :       spill_ranges_(code->VirtualRegisterCount(), nullptr, allocation_zone()),
    1492             :       delayed_references_(allocation_zone()),
    1493             :       assigned_registers_(nullptr),
    1494             :       assigned_double_registers_(nullptr),
    1495             :       virtual_register_count_(code->VirtualRegisterCount()),
    1496             :       preassigned_slot_ranges_(zone),
    1497             :       spill_state_(code->InstructionBlockCount(), ZoneVector<LiveRange*>(zone),
    1498             :                    zone),
    1499    26415398 :       flags_(flags) {
    1500             :   if (!kSimpleFPAliasing) {
    1501             :     fixed_float_live_ranges_.resize(
    1502             :         kNumberOfFixedRangesPerRegister * this->config()->num_float_registers(),
    1503             :         nullptr);
    1504             :     fixed_simd128_live_ranges_.resize(
    1505             :         kNumberOfFixedRangesPerRegister *
    1506             :             this->config()->num_simd128_registers(),
    1507             :         nullptr);
    1508             :   }
    1509             : 
    1510             :   assigned_registers_ = new (code_zone())
    1511     2640920 :       BitVector(this->config()->num_general_registers(), code_zone());
    1512             :   assigned_double_registers_ = new (code_zone())
    1513     2641139 :       BitVector(this->config()->num_double_registers(), code_zone());
    1514             :   fixed_register_use_ = new (code_zone())
    1515     2641538 :       BitVector(this->config()->num_general_registers(), code_zone());
    1516             :   fixed_fp_register_use_ = new (code_zone())
    1517     2642023 :       BitVector(this->config()->num_double_registers(), code_zone());
    1518             : 
    1519     2642233 :   this->frame()->SetAllocatedRegisters(assigned_registers_);
    1520     2642233 :   this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
    1521     2642233 : }
    1522             : 
    1523    41977780 : MoveOperands* RegisterAllocationData::AddGapMove(
    1524             :     int index, Instruction::GapPosition position,
    1525             :     const InstructionOperand& from, const InstructionOperand& to) {
    1526             :   Instruction* instr = code()->InstructionAt(index);
    1527             :   ParallelMove* moves = instr->GetOrCreateParallelMove(position, code_zone());
    1528    41978773 :   return moves->AddMove(from, to);
    1529             : }
    1530             : 
    1531           0 : MachineRepresentation RegisterAllocationData::RepresentationFor(
    1532             :     int virtual_register) {
    1533             :   DCHECK_LT(virtual_register, code()->VirtualRegisterCount());
    1534    71765087 :   return code()->GetRepresentation(virtual_register);
    1535             : }
    1536             : 
    1537   332562226 : TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) {
    1538   332562226 :   if (index >= static_cast<int>(live_ranges().size())) {
    1539           0 :     live_ranges().resize(index + 1, nullptr);
    1540             :   }
    1541   665124452 :   TopLevelLiveRange* result = live_ranges()[index];
    1542   332562226 :   if (result == nullptr) {
    1543    41898433 :     result = NewLiveRange(index, RepresentationFor(index));
    1544    41895612 :     live_ranges()[index] = result;
    1545             :   }
    1546   332558660 :   return result;
    1547             : }
    1548             : 
    1549   101158746 : TopLevelLiveRange* RegisterAllocationData::NewLiveRange(
    1550             :     int index, MachineRepresentation rep) {
    1551   101142179 :   return new (allocation_zone()) TopLevelLiveRange(index, rep);
    1552             : }
    1553             : 
    1554     5663603 : int RegisterAllocationData::GetNextLiveRangeId() {
    1555     5663603 :   int vreg = virtual_register_count_++;
    1556     5663603 :   if (vreg >= static_cast<int>(live_ranges().size())) {
    1557           0 :     live_ranges().resize(vreg + 1, nullptr);
    1558             :   }
    1559     5663603 :   return vreg;
    1560             : }
    1561             : 
    1562     5663575 : TopLevelLiveRange* RegisterAllocationData::NextLiveRange(
    1563             :     MachineRepresentation rep) {
    1564     5663575 :   int vreg = GetNextLiveRangeId();
    1565     5663591 :   TopLevelLiveRange* ret = NewLiveRange(vreg, rep);
    1566     5663500 :   return ret;
    1567             : }
    1568             : 
    1569     2088240 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap(
    1570             :     const InstructionBlock* block, PhiInstruction* phi) {
    1571             :   RegisterAllocationData::PhiMapValue* map_value = new (allocation_zone())
    1572             :       RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
    1573             :   auto res =
    1574     4176477 :       phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
    1575             :   DCHECK(res.second);
    1576             :   USE(res);
    1577     2088237 :   return map_value;
    1578             : }
    1579             : 
    1580           0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
    1581             :     int virtual_register) {
    1582             :   auto it = phi_map_.find(virtual_register);
    1583             :   DCHECK(it != phi_map_.end());
    1584     6785517 :   return it->second;
    1585             : }
    1586             : 
    1587           0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
    1588             :     TopLevelLiveRange* top_range) {
    1589           0 :   return GetPhiMapValueFor(top_range->vreg());
    1590             : }
    1591             : 
    1592          42 : bool RegisterAllocationData::ExistsUseWithoutDefinition() {
    1593             :   bool found = false;
    1594          42 :   BitVector::Iterator iterator(live_in_sets()[0]);
    1595          42 :   while (!iterator.Done()) {
    1596             :     found = true;
    1597             :     int operand_index = iterator.Current();
    1598             :     PrintF("Register allocator error: live v%d reached first block.\n",
    1599           0 :            operand_index);
    1600           0 :     LiveRange* range = GetOrCreateLiveRangeFor(operand_index);
    1601           0 :     PrintF("  (first use is at %d)\n", range->first_pos()->pos().value());
    1602           0 :     if (debug_name() == nullptr) {
    1603           0 :       PrintF("\n");
    1604             :     } else {
    1605           0 :       PrintF("  (function: %s)\n", debug_name());
    1606             :     }
    1607           0 :     iterator.Advance();
    1608             :   }
    1609          42 :   return found;
    1610             : }
    1611             : 
    1612             : // If a range is defined in a deferred block, we can expect all the range
    1613             : // to only cover positions in deferred blocks. Otherwise, a block on the
    1614             : // hot path would be dominated by a deferred block, meaning it is unreachable
    1615             : // without passing through the deferred block, which is contradictory.
    1616             : // In particular, when such a range contributes a result back on the hot
    1617             : // path, it will be as one of the inputs of a phi. In that case, the value
    1618             : // will be transferred via a move in the Gap::END's of the last instruction
    1619             : // of a deferred block.
    1620          42 : bool RegisterAllocationData::RangesDefinedInDeferredStayInDeferred() {
    1621             :   const size_t live_ranges_size = live_ranges().size();
    1622         730 :   for (const TopLevelLiveRange* range : live_ranges()) {
    1623         688 :     CHECK_EQ(live_ranges_size,
    1624             :              live_ranges().size());  // TODO(neis): crbug.com/831822
    1625        1009 :     if (range == nullptr || range->IsEmpty() ||
    1626             :         !code()
    1627             :              ->GetInstructionBlock(range->Start().ToInstructionIndex())
    1628         321 :              ->IsDeferred()) {
    1629             :       continue;
    1630             :     }
    1631           0 :     for (const UseInterval* i = range->first_interval(); i != nullptr;
    1632             :          i = i->next()) {
    1633             :       int first = i->FirstGapIndex();
    1634             :       int last = i->LastGapIndex();
    1635           0 :       for (int instr = first; instr <= last;) {
    1636           0 :         const InstructionBlock* block = code()->GetInstructionBlock(instr);
    1637           0 :         if (!block->IsDeferred()) return false;
    1638             :         instr = block->last_instruction_index() + 1;
    1639             :       }
    1640             :     }
    1641             :   }
    1642             :   return true;
    1643             : }
    1644             : 
    1645     6782909 : SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
    1646             :     TopLevelLiveRange* range, SpillMode spill_mode) {
    1647             :   using SpillType = TopLevelLiveRange::SpillType;
    1648             :   DCHECK(!range->HasSpillOperand());
    1649             : 
    1650             :   SpillRange* spill_range = range->GetAllocatedSpillRange();
    1651     6782909 :   if (spill_range == nullptr) {
    1652             :     DCHECK(!range->IsSplinter());
    1653     3579274 :     spill_range = new (allocation_zone()) SpillRange(range, allocation_zone());
    1654             :   }
    1655     6782997 :   if (spill_mode == SpillMode::kSpillDeferred &&
    1656             :       (range->spill_type() != SpillType::kSpillRange)) {
    1657             :     DCHECK(is_turbo_control_flow_aware_allocation());
    1658             :     range->set_spill_type(SpillType::kDeferredSpillRange);
    1659             :   } else {
    1660             :     range->set_spill_type(SpillType::kSpillRange);
    1661             :   }
    1662             : 
    1663             :   int spill_range_index =
    1664     6782997 :       range->IsSplinter() ? range->splintered_from()->vreg() : range->vreg();
    1665             : 
    1666    13565994 :   spill_ranges()[spill_range_index] = spill_range;
    1667             : 
    1668     6782997 :   return spill_range;
    1669             : }
    1670             : 
    1671     2364136 : SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
    1672             :     TopLevelLiveRange* range) {
    1673             :   DCHECK(is_turbo_preprocess_ranges());
    1674             :   DCHECK(!range->HasSpillOperand());
    1675             :   DCHECK(!range->IsSplinter());
    1676             :   SpillRange* spill_range =
    1677     2364122 :       new (allocation_zone()) SpillRange(range, allocation_zone());
    1678     2364138 :   return spill_range;
    1679             : }
    1680             : 
    1681    19756106 : void RegisterAllocationData::MarkFixedUse(MachineRepresentation rep,
    1682             :                                           int index) {
    1683    19756106 :   switch (rep) {
    1684             :     case MachineRepresentation::kFloat32:
    1685             :     case MachineRepresentation::kSimd128:
    1686             :       if (kSimpleFPAliasing) {
    1687      115241 :         fixed_fp_register_use_->Add(index);
    1688             :       } else {
    1689             :         int alias_base_index = -1;
    1690             :         int aliases = config()->GetAliases(
    1691             :             rep, index, MachineRepresentation::kFloat64, &alias_base_index);
    1692             :         DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    1693             :         while (aliases--) {
    1694             :           int aliased_reg = alias_base_index + aliases;
    1695             :           fixed_fp_register_use_->Add(aliased_reg);
    1696             :         }
    1697             :       }
    1698             :       break;
    1699             :     case MachineRepresentation::kFloat64:
    1700      158116 :       fixed_fp_register_use_->Add(index);
    1701             :       break;
    1702             :     default:
    1703             :       DCHECK(!IsFloatingPoint(rep));
    1704    19482749 :       fixed_register_use_->Add(index);
    1705             :       break;
    1706             :   }
    1707    19756106 : }
    1708             : 
    1709   345584697 : bool RegisterAllocationData::HasFixedUse(MachineRepresentation rep, int index) {
    1710   345584697 :   switch (rep) {
    1711             :     case MachineRepresentation::kFloat32:
    1712             :     case MachineRepresentation::kSimd128:
    1713             :       if (kSimpleFPAliasing) {
    1714     3421836 :         return fixed_fp_register_use_->Contains(index);
    1715             :       } else {
    1716             :         int alias_base_index = -1;
    1717             :         int aliases = config()->GetAliases(
    1718             :             rep, index, MachineRepresentation::kFloat64, &alias_base_index);
    1719             :         DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    1720             :         bool result = false;
    1721             :         while (aliases-- && !result) {
    1722             :           int aliased_reg = alias_base_index + aliases;
    1723             :           result |= fixed_fp_register_use_->Contains(aliased_reg);
    1724             :         }
    1725             :         return result;
    1726             :       }
    1727             :       break;
    1728             :     case MachineRepresentation::kFloat64:
    1729    33502994 :       return fixed_fp_register_use_->Contains(index);
    1730             :       break;
    1731             :     default:
    1732             :       DCHECK(!IsFloatingPoint(rep));
    1733   654244564 :       return fixed_register_use_->Contains(index);
    1734             :       break;
    1735             :   }
    1736             : }
    1737             : 
    1738    96181578 : void RegisterAllocationData::MarkAllocated(MachineRepresentation rep,
    1739             :                                            int index) {
    1740    96181578 :   switch (rep) {
    1741             :     case MachineRepresentation::kFloat32:
    1742             :     case MachineRepresentation::kSimd128:
    1743             :       if (kSimpleFPAliasing) {
    1744      283918 :         assigned_double_registers_->Add(index);
    1745             :       } else {
    1746             :         int alias_base_index = -1;
    1747             :         int aliases = config()->GetAliases(
    1748             :             rep, index, MachineRepresentation::kFloat64, &alias_base_index);
    1749             :         DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    1750             :         while (aliases--) {
    1751             :           int aliased_reg = alias_base_index + aliases;
    1752             :           assigned_double_registers_->Add(aliased_reg);
    1753             :         }
    1754             :       }
    1755             :       break;
    1756             :     case MachineRepresentation::kFloat64:
    1757    31175010 :       assigned_double_registers_->Add(index);
    1758             :       break;
    1759             :     default:
    1760             :       DCHECK(!IsFloatingPoint(rep));
    1761    64722650 :       assigned_registers_->Add(index);
    1762             :       break;
    1763             :   }
    1764    96181578 : }
    1765             : 
    1766    24781756 : bool RegisterAllocationData::IsBlockBoundary(LifetimePosition pos) const {
    1767    41691212 :   return pos.IsFullStart() &&
    1768    16909578 :          code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
    1769    24781634 :              pos.ToInstructionIndex();
    1770             : }
    1771             : 
    1772     5282417 : ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data)
    1773     5282417 :     : data_(data) {}
    1774             : 
    1775    29938721 : InstructionOperand* ConstraintBuilder::AllocateFixed(
    1776             :     UnallocatedOperand* operand, int pos, bool is_tagged, bool is_input) {
    1777    29938721 :   TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
    1778             :   DCHECK(operand->HasFixedPolicy());
    1779             :   InstructionOperand allocated;
    1780             :   MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
    1781             :   int virtual_register = operand->virtual_register();
    1782    29938821 :   if (virtual_register != InstructionOperand::kInvalidVirtualRegister) {
    1783             :     rep = data()->RepresentationFor(virtual_register);
    1784             :   }
    1785    29938877 :   if (operand->HasFixedSlotPolicy()) {
    1786             :     allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, rep,
    1787             :                                  operand->fixed_slot_index());
    1788    28692685 :   } else if (operand->HasFixedRegisterPolicy()) {
    1789             :     DCHECK(!IsFloatingPoint(rep));
    1790             :     DCHECK(data()->config()->IsAllocatableGeneralCode(
    1791             :         operand->fixed_register_index()));
    1792             :     allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
    1793             :                                  operand->fixed_register_index());
    1794      404946 :   } else if (operand->HasFixedFPRegisterPolicy()) {
    1795             :     DCHECK(IsFloatingPoint(rep));
    1796             :     DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
    1797             :     allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
    1798             :                                  operand->fixed_register_index());
    1799             :   } else {
    1800           0 :     UNREACHABLE();
    1801             :   }
    1802    49789689 :   if (is_input && allocated.IsAnyRegister()) {
    1803    19757851 :     data()->MarkFixedUse(rep, operand->fixed_register_index());
    1804             :   }
    1805             :   InstructionOperand::ReplaceWith(operand, &allocated);
    1806    29937056 :   if (is_tagged) {
    1807    18613948 :     TRACE("Fixed reg is tagged at %d\n", pos);
    1808             :     Instruction* instr = code()->InstructionAt(pos);
    1809    18614036 :     if (instr->HasReferenceMap()) {
    1810    14800346 :       instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand));
    1811             :     }
    1812             :   }
    1813    29937323 :   return operand;
    1814             : }
    1815             : 
    1816     2640291 : void ConstraintBuilder::MeetRegisterConstraints() {
    1817    23116805 :   for (InstructionBlock* block : code()->instruction_blocks()) {
    1818    20473968 :     MeetRegisterConstraints(block);
    1819             :   }
    1820     2642837 : }
    1821             : 
    1822    20473897 : void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
    1823             :   int start = block->first_instruction_index();
    1824             :   int end = block->last_instruction_index();
    1825             :   DCHECK_NE(-1, start);
    1826   158235695 :   for (int i = start; i <= end; ++i) {
    1827    68878161 :     MeetConstraintsBefore(i);
    1828    68880585 :     if (i != end) MeetConstraintsAfter(i);
    1829             :   }
    1830             :   // Meet register constraints for the instruction in the end.
    1831    20476635 :   MeetRegisterConstraintsForLastInstructionInBlock(block);
    1832    20476252 : }
    1833             : 
    1834    20476457 : void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
    1835             :     const InstructionBlock* block) {
    1836             :   int end = block->last_instruction_index();
    1837             :   Instruction* last_instruction = code()->InstructionAt(end);
    1838    20779663 :   for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
    1839             :     InstructionOperand* output_operand = last_instruction->OutputAt(i);
    1840             :     DCHECK(!output_operand->IsConstant());
    1841             :     UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
    1842             :     int output_vreg = output->virtual_register();
    1843      151604 :     TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
    1844             :     bool assigned = false;
    1845      151605 :     if (output->HasFixedPolicy()) {
    1846           2 :       AllocateFixed(output, -1, false, false);
    1847             :       // This value is produced on the stack, we never need to spill it.
    1848           2 :       if (output->IsStackSlot()) {
    1849             :         DCHECK(LocationOperand::cast(output)->index() <
    1850             :                data()->frame()->GetSpillSlotCount());
    1851             :         range->SetSpillOperand(LocationOperand::cast(output));
    1852             :         range->SetSpillStartIndex(end);
    1853             :         assigned = true;
    1854             :       }
    1855             : 
    1856           4 :       for (const RpoNumber& succ : block->successors()) {
    1857           2 :         const InstructionBlock* successor = code()->InstructionBlockAt(succ);
    1858             :         DCHECK_EQ(1, successor->PredecessorCount());
    1859             :         int gap_index = successor->first_instruction_index();
    1860             :         // Create an unconstrained operand for the same virtual register
    1861             :         // and insert a gap move from the fixed output to the operand.
    1862             :         UnallocatedOperand output_copy(UnallocatedOperand::REGISTER_OR_SLOT,
    1863             :                                        output_vreg);
    1864           2 :         data()->AddGapMove(gap_index, Instruction::START, *output, output_copy);
    1865             :       }
    1866             :     }
    1867             : 
    1868      151605 :     if (!assigned) {
    1869      454798 :       for (const RpoNumber& succ : block->successors()) {
    1870      303200 :         const InstructionBlock* successor = code()->InstructionBlockAt(succ);
    1871             :         DCHECK_EQ(1, successor->PredecessorCount());
    1872             :         int gap_index = successor->first_instruction_index();
    1873             :         range->RecordSpillLocation(allocation_zone(), gap_index, output);
    1874             :         range->SetSpillStartIndex(gap_index);
    1875             :       }
    1876             :     }
    1877             :   }
    1878    20476455 : }
    1879             : 
    1880    48410173 : void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
    1881             :   Instruction* first = code()->InstructionAt(instr_index);
    1882             :   // Handle fixed temporaries.
    1883    49893187 :   for (size_t i = 0; i < first->TempCount(); i++) {
    1884             :     UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i));
    1885      740469 :     if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false, false);
    1886             :   }
    1887             :   // Handle constant/fixed output operands.
    1888   126385094 :   for (size_t i = 0; i < first->OutputCount(); i++) {
    1889             :     InstructionOperand* output = first->OutputAt(i);
    1890    38988918 :     if (output->IsConstant()) {
    1891             :       int output_vreg = ConstantOperand::cast(output)->virtual_register();
    1892    14336877 :       TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
    1893    14335906 :       range->SetSpillStartIndex(instr_index + 1);
    1894             :       range->SetSpillOperand(output);
    1895             :       continue;
    1896             :     }
    1897             :     UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
    1898             :     TopLevelLiveRange* range =
    1899    24652041 :         data()->GetOrCreateLiveRangeFor(first_output->virtual_register());
    1900             :     bool assigned = false;
    1901    24649973 :     if (first_output->HasFixedPolicy()) {
    1902             :       int output_vreg = first_output->virtual_register();
    1903             :       UnallocatedOperand output_copy(UnallocatedOperand::REGISTER_OR_SLOT,
    1904             :                                      output_vreg);
    1905             :       bool is_tagged = code()->IsReference(output_vreg);
    1906    10013363 :       if (first_output->HasSecondaryStorage()) {
    1907             :         range->MarkHasPreassignedSlot();
    1908      554416 :         data()->preassigned_slot_ranges().push_back(
    1909     1109018 :             std::make_pair(range, first_output->GetSecondaryStorage()));
    1910             :       }
    1911    10013549 :       AllocateFixed(first_output, instr_index, is_tagged, false);
    1912             : 
    1913             :       // This value is produced on the stack, we never need to spill it.
    1914    10014701 :       if (first_output->IsStackSlot()) {
    1915             :         DCHECK(LocationOperand::cast(first_output)->index() <
    1916             :                data()->frame()->GetTotalFrameSlotCount());
    1917             :         range->SetSpillOperand(LocationOperand::cast(first_output));
    1918     1115572 :         range->SetSpillStartIndex(instr_index + 1);
    1919             :         assigned = true;
    1920             :       }
    1921    10014701 :       data()->AddGapMove(instr_index + 1, Instruction::START, *first_output,
    1922    10014701 :                          output_copy);
    1923             :     }
    1924             :     // Make sure we add a gap move for spilling (if we have not done
    1925             :     // so already).
    1926    24650883 :     if (!assigned) {
    1927    23536198 :       range->RecordSpillLocation(allocation_zone(), instr_index + 1,
    1928             :                                  first_output);
    1929             :       range->SetSpillStartIndex(instr_index + 1);
    1930             :     }
    1931             :   }
    1932    48409752 : }
    1933             : 
    1934    68878722 : void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
    1935             :   Instruction* second = code()->InstructionAt(instr_index);
    1936             :   // Handle fixed input operands of second instruction.
    1937   341365350 :   for (size_t i = 0; i < second->InputCount(); i++) {
    1938             :     InstructionOperand* input = second->InputAt(i);
    1939   136242736 :     if (input->IsImmediate() || input->IsExplicit()) {
    1940             :       continue;  // Ignore immediates and explicitly reserved registers.
    1941             :     }
    1942             :     UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
    1943    71698944 :     if (cur_input->HasFixedPolicy()) {
    1944             :       int input_vreg = cur_input->virtual_register();
    1945             :       UnallocatedOperand input_copy(UnallocatedOperand::REGISTER_OR_SLOT,
    1946             :                                     input_vreg);
    1947             :       bool is_tagged = code()->IsReference(input_vreg);
    1948    19851395 :       AllocateFixed(cur_input, instr_index, is_tagged, true);
    1949    39699198 :       data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
    1950             :     }
    1951             :   }
    1952             :   // Handle "output same as input" for second instruction.
    1953   147158277 :   for (size_t i = 0; i < second->OutputCount(); i++) {
    1954             :     InstructionOperand* output = second->OutputAt(i);
    1955    75552432 :     if (!output->IsUnallocated()) continue;
    1956             :     UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
    1957    24803714 :     if (!second_output->HasSameAsInputPolicy()) continue;
    1958             :     DCHECK_EQ(0, i);  // Only valid for first output.
    1959             :     UnallocatedOperand* cur_input =
    1960             :         UnallocatedOperand::cast(second->InputAt(0));
    1961             :     int output_vreg = second_output->virtual_register();
    1962             :     int input_vreg = cur_input->virtual_register();
    1963             :     UnallocatedOperand input_copy(UnallocatedOperand::REGISTER_OR_SLOT,
    1964             :                                   input_vreg);
    1965             :     *cur_input =
    1966     2725896 :         UnallocatedOperand(*cur_input, second_output->virtual_register());
    1967     2725896 :     MoveOperands* gap_move = data()->AddGapMove(instr_index, Instruction::END,
    1968     2725896 :                                                 input_copy, *cur_input);
    1969             :     DCHECK_NOT_NULL(gap_move);
    1970     3283010 :     if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) {
    1971      557119 :       if (second->HasReferenceMap()) {
    1972             :         RegisterAllocationData::DelayedReference delayed_reference = {
    1973           0 :             second->reference_map(), &gap_move->source()};
    1974           0 :         data()->delayed_references().push_back(delayed_reference);
    1975             :       }
    1976     2168440 :     } else if (!code()->IsReference(input_vreg) &&
    1977             :                code()->IsReference(output_vreg)) {
    1978             :       // The input is assumed to immediately have a tagged representation,
    1979             :       // before the pointer map can be used. I.e. the pointer map at the
    1980             :       // instruction will include the output operand (whose value at the
    1981             :       // beginning of the instruction is equal to the input operand). If
    1982             :       // this is not desired, then the pointer map at this instruction needs
    1983             :       // to be adjusted manually.
    1984             :     }
    1985             :   }
    1986    68880289 : }
    1987             : 
    1988     2642759 : void ConstraintBuilder::ResolvePhis() {
    1989             :   // Process the blocks in reverse order.
    1990    23118738 :   for (InstructionBlock* block : base::Reversed(code()->instruction_blocks())) {
    1991    20476184 :     ResolvePhis(block);
    1992             :   }
    1993     2642554 : }
    1994             : 
    1995    20475729 : void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) {
    1996    22563969 :   for (PhiInstruction* phi : block->phis()) {
    1997             :     int phi_vreg = phi->virtual_register();
    1998             :     RegisterAllocationData::PhiMapValue* map_value =
    1999     2088252 :         data()->InitializePhiMap(block, phi);
    2000             :     InstructionOperand& output = phi->output();
    2001             :     // Map the destination operands, so the commitment phase can find them.
    2002    12196407 :     for (size_t i = 0; i < phi->operands().size(); ++i) {
    2003             :       InstructionBlock* cur_block =
    2004     5054066 :           code()->InstructionBlockAt(block->predecessors()[i]);
    2005             :       UnallocatedOperand input(UnallocatedOperand::REGISTER_OR_SLOT,
    2006     5054074 :                                phi->operands()[i]);
    2007             :       MoveOperands* move = data()->AddGapMove(
    2008     5054074 :           cur_block->last_instruction_index(), Instruction::END, input, output);
    2009             :       map_value->AddOperand(&move->destination());
    2010             :       DCHECK(!code()
    2011             :                   ->InstructionAt(cur_block->last_instruction_index())
    2012             :                   ->HasReferenceMap());
    2013             :     }
    2014     2088250 :     TopLevelLiveRange* live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
    2015             :     int gap_index = block->first_instruction_index();
    2016             :     live_range->RecordSpillLocation(allocation_zone(), gap_index, &output);
    2017             :     live_range->SetSpillStartIndex(gap_index);
    2018             :     // We use the phi-ness of some nodes in some later heuristics.
    2019             :     live_range->set_is_phi(true);
    2020     2088240 :     live_range->set_is_non_loop_phi(!block->IsLoopHeader());
    2021             :   }
    2022    20475717 : }
    2023             : 
    2024     2640434 : LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data,
    2025             :                                    Zone* local_zone)
    2026     5280868 :     : data_(data), phi_hints_(local_zone) {}
    2027             : 
    2028    20474091 : BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block,
    2029             :                                             RegisterAllocationData* data) {
    2030             :   size_t block_index = block->rpo_number().ToSize();
    2031    20474091 :   BitVector* live_out = data->live_out_sets()[block_index];
    2032    20474091 :   if (live_out == nullptr) {
    2033             :     // Compute live out for the given block, except not including backward
    2034             :     // successor edges.
    2035             :     Zone* zone = data->allocation_zone();
    2036             :     const InstructionSequence* code = data->code();
    2037             : 
    2038    20474052 :     live_out = new (zone) BitVector(code->VirtualRegisterCount(), zone);
    2039             : 
    2040             :     // Process all successor blocks.
    2041    44295817 :     for (const RpoNumber& succ : block->successors()) {
    2042             :       // Add values live on entry to the successor.
    2043    23823628 :       if (succ <= block->rpo_number()) continue;
    2044    23591292 :       BitVector* live_in = data->live_in_sets()[succ.ToSize()];
    2045    23591292 :       if (live_in != nullptr) live_out->Union(*live_in);
    2046             : 
    2047             :       // All phi input operands corresponding to this successor edge are live
    2048             :       // out from this block.
    2049    23591292 :       const InstructionBlock* successor = code->InstructionBlockAt(succ);
    2050    23591455 :       size_t index = successor->PredecessorIndexOf(block->rpo_number());
    2051             :       DCHECK(index < successor->PredecessorCount());
    2052    28285304 :       for (PhiInstruction* phi : successor->phis()) {
    2053     4695601 :         live_out->Add(phi->operands()[index]);
    2054             :       }
    2055             :     }
    2056    20472189 :     data->live_out_sets()[block_index] = live_out;
    2057             :   }
    2058    20472075 :   return live_out;
    2059             : }
    2060             : 
    2061    20472297 : void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
    2062             :                                            BitVector* live_out) {
    2063             :   // Add an interval that includes the entire block to the live range for
    2064             :   // each live_out value.
    2065             :   LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
    2066    20472297 :       block->first_instruction_index());
    2067             :   LifetimePosition end = LifetimePosition::InstructionFromInstructionIndex(
    2068             :                              block->last_instruction_index())
    2069    20472297 :                              .NextStart();
    2070             :   BitVector::Iterator iterator(live_out);
    2071   266389136 :   while (!iterator.Done()) {
    2072             :     int operand_index = iterator.Current();
    2073   122958556 :     TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
    2074   122958571 :     range->AddUseInterval(start, end, allocation_zone());
    2075   122958092 :     iterator.Advance();
    2076             :   }
    2077    20472206 : }
    2078             : 
    2079    29224218 : int LiveRangeBuilder::FixedFPLiveRangeID(int index, MachineRepresentation rep) {
    2080    29224218 :   int result = -index - 1;
    2081    29224218 :   switch (rep) {
    2082             :     case MachineRepresentation::kSimd128:
    2083             :       result -=
    2084          48 :           kNumberOfFixedRangesPerRegister * config()->num_float_registers();
    2085             :       V8_FALLTHROUGH;
    2086             :     case MachineRepresentation::kFloat32:
    2087             :       result -=
    2088       39662 :           kNumberOfFixedRangesPerRegister * config()->num_double_registers();
    2089             :       V8_FALLTHROUGH;
    2090             :     case MachineRepresentation::kFloat64:
    2091             :       result -=
    2092    29224218 :           kNumberOfFixedRangesPerRegister * config()->num_general_registers();
    2093             :       break;
    2094             :     default:
    2095           0 :       UNREACHABLE();
    2096             :       break;
    2097             :   }
    2098    29224218 :   return result;
    2099             : }
    2100             : 
    2101   127315593 : TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index,
    2102             :                                                        SpillMode spill_mode) {
    2103             :   int offset = spill_mode == SpillMode::kSpillAtDefinition
    2104             :                    ? 0
    2105   127315593 :                    : config()->num_general_registers();
    2106             :   DCHECK(index < config()->num_general_registers());
    2107   254631186 :   TopLevelLiveRange* result = data()->fixed_live_ranges()[offset + index];
    2108   127315593 :   if (result == nullptr) {
    2109             :     MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
    2110    24400047 :     result = data()->NewLiveRange(FixedLiveRangeID(offset + index), rep);
    2111             :     DCHECK(result->IsFixed());
    2112             :     result->set_assigned_register(index);
    2113    24396784 :     data()->MarkAllocated(rep, index);
    2114    24396686 :     if (spill_mode == SpillMode::kSpillDeferred) {
    2115             :       result->set_deferred_fixed();
    2116             :     }
    2117    24396686 :     data()->fixed_live_ranges()[offset + index] = result;
    2118             :   }
    2119   127312232 :   return result;
    2120             : }
    2121             : 
    2122    91795847 : TopLevelLiveRange* LiveRangeBuilder::FixedFPLiveRangeFor(
    2123             :     int index, MachineRepresentation rep, SpillMode spill_mode) {
    2124             :   int num_regs = config()->num_double_registers();
    2125             :   ZoneVector<TopLevelLiveRange*>* live_ranges =
    2126             :       &data()->fixed_double_live_ranges();
    2127             :   if (!kSimpleFPAliasing) {
    2128             :     switch (rep) {
    2129             :       case MachineRepresentation::kFloat32:
    2130             :         num_regs = config()->num_float_registers();
    2131             :         live_ranges = &data()->fixed_float_live_ranges();
    2132             :         break;
    2133             :       case MachineRepresentation::kSimd128:
    2134             :         num_regs = config()->num_simd128_registers();
    2135             :         live_ranges = &data()->fixed_simd128_live_ranges();
    2136             :         break;
    2137             :       default:
    2138             :         break;
    2139             :     }
    2140             :   }
    2141             : 
    2142    91795847 :   int offset = spill_mode == SpillMode::kSpillAtDefinition ? 0 : num_regs;
    2143             : 
    2144             :   DCHECK(index < num_regs);
    2145             :   USE(num_regs);
    2146   183591694 :   TopLevelLiveRange* result = (*live_ranges)[offset + index];
    2147    91795847 :   if (result == nullptr) {
    2148    29224269 :     result = data()->NewLiveRange(FixedFPLiveRangeID(offset + index, rep), rep);
    2149             :     DCHECK(result->IsFixed());
    2150             :     result->set_assigned_register(index);
    2151    29220025 :     data()->MarkAllocated(rep, index);
    2152    29220606 :     if (spill_mode == SpillMode::kSpillDeferred) {
    2153             :       result->set_deferred_fixed();
    2154             :     }
    2155    29220606 :     (*live_ranges)[offset + index] = result;
    2156             :   }
    2157    91792184 :   return result;
    2158             : }
    2159             : 
    2160   180562066 : TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand,
    2161             :                                                   SpillMode spill_mode) {
    2162   180562066 :   if (operand->IsUnallocated()) {
    2163             :     return data()->GetOrCreateLiveRangeFor(
    2164   108387404 :         UnallocatedOperand::cast(operand)->virtual_register());
    2165    72174662 :   } else if (operand->IsConstant()) {
    2166             :     return data()->GetOrCreateLiveRangeFor(
    2167    14337532 :         ConstantOperand::cast(operand)->virtual_register());
    2168    57837130 :   } else if (operand->IsRegister()) {
    2169             :     return FixedLiveRangeFor(
    2170    54535751 :         LocationOperand::cast(operand)->GetRegister().code(), spill_mode);
    2171     3301379 :   } else if (operand->IsFPRegister()) {
    2172             :     LocationOperand* op = LocationOperand::cast(operand);
    2173             :     return FixedFPLiveRangeFor(op->register_code(), op->representation(),
    2174      809043 :                                spill_mode);
    2175             :   } else {
    2176             :     return nullptr;
    2177             :   }
    2178             : }
    2179             : 
    2180   111096217 : UsePosition* LiveRangeBuilder::NewUsePosition(LifetimePosition pos,
    2181             :                                               InstructionOperand* operand,
    2182             :                                               void* hint,
    2183             :                                               UsePositionHintType hint_type) {
    2184   222192090 :   return new (allocation_zone()) UsePosition(pos, operand, hint, hint_type);
    2185             : }
    2186             : 
    2187    72522029 : UsePosition* LiveRangeBuilder::Define(LifetimePosition position,
    2188             :                                       InstructionOperand* operand, void* hint,
    2189             :                                       UsePositionHintType hint_type,
    2190             :                                       SpillMode spill_mode) {
    2191    72522029 :   TopLevelLiveRange* range = LiveRangeFor(operand, spill_mode);
    2192    72522242 :   if (range == nullptr) return nullptr;
    2193             : 
    2194    71277840 :   if (range->IsEmpty() || range->Start() > position) {
    2195             :     // Can happen if there is a definition without use.
    2196     2711202 :     range->AddUseInterval(position, position.NextStart(), allocation_zone());
    2197     2711159 :     range->AddUsePosition(NewUsePosition(position.NextStart()));
    2198             :   } else {
    2199             :     range->ShortenTo(position);
    2200             :   }
    2201    71282609 :   if (!operand->IsUnallocated()) return nullptr;
    2202             :   UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
    2203             :   UsePosition* use_pos =
    2204    28254436 :       NewUsePosition(position, unalloc_operand, hint, hint_type);
    2205    28253872 :   range->AddUsePosition(use_pos);
    2206    28254327 :   return use_pos;
    2207             : }
    2208             : 
    2209   108044062 : UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start,
    2210             :                                    LifetimePosition position,
    2211             :                                    InstructionOperand* operand, void* hint,
    2212             :                                    UsePositionHintType hint_type,
    2213             :                                    SpillMode spill_mode) {
    2214   108044062 :   TopLevelLiveRange* range = LiveRangeFor(operand, spill_mode);
    2215   108041276 :   if (range == nullptr) return nullptr;
    2216             :   UsePosition* use_pos = nullptr;
    2217   106795656 :   if (operand->IsUnallocated()) {
    2218             :     UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
    2219    80147094 :     use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type);
    2220    80146695 :     range->AddUsePosition(use_pos);
    2221             :   }
    2222   106796235 :   range->AddUseInterval(block_start, position, allocation_zone());
    2223   106794397 :   return use_pos;
    2224             : }
    2225             : 
    2226    20472084 : void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
    2227             :                                            BitVector* live) {
    2228             :   int block_start = block->first_instruction_index();
    2229             :   LifetimePosition block_start_position =
    2230             :       LifetimePosition::GapFromInstructionIndex(block_start);
    2231             :   bool fixed_float_live_ranges = false;
    2232             :   bool fixed_simd128_live_ranges = false;
    2233             :   if (!kSimpleFPAliasing) {
    2234             :     int mask = data()->code()->representation_mask();
    2235             :     fixed_float_live_ranges = (mask & kFloat32Bit) != 0;
    2236             :     fixed_simd128_live_ranges = (mask & kSimd128Bit) != 0;
    2237             :   }
    2238             :   SpillMode spill_mode = SpillModeForBlock(block);
    2239             : 
    2240   158235470 :   for (int index = block->last_instruction_index(); index >= block_start;
    2241             :        index--) {
    2242             :     LifetimePosition curr_position =
    2243             :         LifetimePosition::InstructionFromInstructionIndex(index);
    2244             :     Instruction* instr = code()->InstructionAt(index);
    2245             :     DCHECK_NOT_NULL(instr);
    2246             :     DCHECK(curr_position.IsInstructionPosition());
    2247             :     // Process output, inputs, and temps of this instruction.
    2248   147157467 :     for (size_t i = 0; i < instr->OutputCount(); i++) {
    2249             :       InstructionOperand* output = instr->OutputAt(i);
    2250    39140721 :       if (output->IsUnallocated()) {
    2251             :         // Unsupported.
    2252             :         DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
    2253             :         int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
    2254             :         live->Remove(out_vreg);
    2255    24349146 :       } else if (output->IsConstant()) {
    2256             :         int out_vreg = ConstantOperand::cast(output)->virtual_register();
    2257             :         live->Remove(out_vreg);
    2258             :       }
    2259    39797568 :       if (block->IsHandler() && index == block_start && output->IsAllocated() &&
    2260    39344935 :           output->IsRegister() &&
    2261             :           AllocatedOperand::cast(output)->GetRegister() ==
    2262             :               v8::internal::kReturnRegister0) {
    2263             :         // The register defined here is blocked from gap start - it is the
    2264             :         // exception value.
    2265             :         // TODO(mtrofin): should we explore an explicit opcode for
    2266             :         // the first instruction in the handler?
    2267             :         Define(LifetimePosition::GapFromInstructionIndex(index), output,
    2268             :                spill_mode);
    2269             :       } else {
    2270             :         Define(curr_position, output, spill_mode);
    2271             :       }
    2272             :     }
    2273             : 
    2274    68874538 :     if (instr->ClobbersRegisters()) {
    2275   151642448 :       for (int i = 0; i < config()->num_allocatable_general_registers(); ++i) {
    2276             :         // Create a UseInterval at this instruction for all fixed registers,
    2277             :         // (including the instruction outputs). Adding another UseInterval here
    2278             :         // is OK because AddUseInterval will just merge it with the existing
    2279             :         // one at the end of the range.
    2280             :         int code = config()->GetAllocatableGeneralCode(i);
    2281    72787984 :         TopLevelLiveRange* range = FixedLiveRangeFor(code, spill_mode);
    2282             :         range->AddUseInterval(curr_position, curr_position.End(),
    2283    72786126 :                               allocation_zone());
    2284             :       }
    2285             :     }
    2286             : 
    2287    68874280 :     if (instr->ClobbersDoubleRegisters()) {
    2288   188045782 :       for (int i = 0; i < config()->num_allocatable_double_registers(); ++i) {
    2289             :         // Add a UseInterval for all DoubleRegisters. See comment above for
    2290             :         // general registers.
    2291             :         int code = config()->GetAllocatableDoubleCode(i);
    2292             :         TopLevelLiveRange* range = FixedFPLiveRangeFor(
    2293    90990053 :             code, MachineRepresentation::kFloat64, spill_mode);
    2294             :         range->AddUseInterval(curr_position, curr_position.End(),
    2295    90986900 :                               allocation_zone());
    2296             :       }
    2297             :       // Clobber fixed float registers on archs with non-simple aliasing.
    2298             :       if (!kSimpleFPAliasing) {
    2299             :         if (fixed_float_live_ranges) {
    2300             :           for (int i = 0; i < config()->num_allocatable_float_registers();
    2301             :                ++i) {
    2302             :             // Add a UseInterval for all FloatRegisters. See comment above for
    2303             :             // general registers.
    2304             :             int code = config()->GetAllocatableFloatCode(i);
    2305             :             TopLevelLiveRange* range = FixedFPLiveRangeFor(
    2306             :                 code, MachineRepresentation::kFloat32, spill_mode);
    2307             :             range->AddUseInterval(curr_position, curr_position.End(),
    2308             :                                   allocation_zone());
    2309             :           }
    2310             :         }
    2311             :         if (fixed_simd128_live_ranges) {
    2312             :           for (int i = 0; i < config()->num_allocatable_simd128_registers();
    2313             :                ++i) {
    2314             :             int code = config()->GetAllocatableSimd128Code(i);
    2315             :             TopLevelLiveRange* range = FixedFPLiveRangeFor(
    2316             :                 code, MachineRepresentation::kSimd128, spill_mode);
    2317             :             range->AddUseInterval(curr_position, curr_position.End(),
    2318             :                                   allocation_zone());
    2319             :           }
    2320             :         }
    2321             :       }
    2322             :     }
    2323             : 
    2324   341344776 :     for (size_t i = 0; i < instr->InputCount(); i++) {
    2325             :       InstructionOperand* input = instr->InputAt(i);
    2326   136228383 :       if (input->IsImmediate() || input->IsExplicit()) {
    2327             :         continue;  // Ignore immediates and explicitly reserved registers.
    2328             :       }
    2329             :       LifetimePosition use_pos;
    2330   123547867 :       if (input->IsUnallocated() &&
    2331             :           UnallocatedOperand::cast(input)->IsUsedAtStart()) {
    2332             :         use_pos = curr_position;
    2333             :       } else {
    2334             :         use_pos = curr_position.End();
    2335             :       }
    2336             : 
    2337    71697766 :       if (input->IsUnallocated()) {
    2338             :         UnallocatedOperand* unalloc = UnallocatedOperand::cast(input);
    2339             :         int vreg = unalloc->virtual_register();
    2340             :         live->Add(vreg);
    2341    51850385 :         if (unalloc->HasSlotPolicy()) {
    2342    13839323 :           if (data()->is_turbo_control_flow_aware_allocation()) {
    2343           0 :             data()->GetOrCreateLiveRangeFor(vreg)->register_slot_use(
    2344             :                 block->IsDeferred()
    2345             :                     ? TopLevelLiveRange::SlotUseKind::kDeferredSlotUse
    2346             :                     : TopLevelLiveRange::SlotUseKind::kGeneralSlotUse);
    2347             :           } else {
    2348    13839323 :             data()->GetOrCreateLiveRangeFor(vreg)->register_slot_use(
    2349             :                 TopLevelLiveRange::SlotUseKind::kGeneralSlotUse);
    2350             :           }
    2351             :         }
    2352             :       }
    2353             :       Use(block_start_position, use_pos, input, spill_mode);
    2354             :     }
    2355             : 
    2356    70369437 :     for (size_t i = 0; i < instr->TempCount(); i++) {
    2357             :       InstructionOperand* temp = instr->TempAt(i);
    2358             :       // Unsupported.
    2359             :       DCHECK_IMPLIES(temp->IsUnallocated(),
    2360             :                      !UnallocatedOperand::cast(temp)->HasSlotPolicy());
    2361      744303 :       if (instr->ClobbersTemps()) {
    2362           0 :         if (temp->IsRegister()) continue;
    2363           0 :         if (temp->IsUnallocated()) {
    2364             :           UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
    2365           0 :           if (temp_unalloc->HasFixedPolicy()) {
    2366             :             continue;
    2367             :           }
    2368             :         }
    2369             :       }
    2370             :       Use(block_start_position, curr_position.End(), temp, spill_mode);
    2371             :       Define(curr_position, temp, spill_mode);
    2372             :     }
    2373             : 
    2374             :     // Process the moves of the instruction's gaps, making their sources live.
    2375             :     const Instruction::GapPosition kPositions[] = {Instruction::END,
    2376    68880812 :                                                    Instruction::START};
    2377             :     curr_position = curr_position.PrevStart();
    2378             :     DCHECK(curr_position.IsGapPosition());
    2379   344391020 :     for (const Instruction::GapPosition& position : kPositions) {
    2380   137754223 :       ParallelMove* move = instr->GetParallelMove(position);
    2381   137754223 :       if (move == nullptr) continue;
    2382    25508243 :       if (position == Instruction::END) {
    2383             :         curr_position = curr_position.End();
    2384             :       } else {
    2385             :         curr_position = curr_position.Start();
    2386             :       }
    2387    63152433 :       for (MoveOperands* cur : *move) {
    2388             :         InstructionOperand& from = cur->source();
    2389             :         InstructionOperand& to = cur->destination();
    2390             :         void* hint = &to;
    2391    37643309 :         UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to);
    2392             :         UsePosition* to_use = nullptr;
    2393             :         int phi_vreg = -1;
    2394    37644048 :         if (to.IsUnallocated()) {
    2395             :           int to_vreg = UnallocatedOperand::cast(to).virtual_register();
    2396             :           TopLevelLiveRange* to_range =
    2397    17795062 :               data()->GetOrCreateLiveRangeFor(to_vreg);
    2398    17795039 :           if (to_range->is_phi()) {
    2399             :             phi_vreg = to_vreg;
    2400     5054136 :             if (to_range->is_non_loop_phi()) {
    2401             :               hint = to_range->current_hint_position();
    2402             :               hint_type = hint == nullptr ? UsePositionHintType::kNone
    2403     4339852 :                                           : UsePositionHintType::kUsePos;
    2404             :             } else {
    2405             :               hint_type = UsePositionHintType::kPhi;
    2406             :               hint = data()->GetPhiMapValueFor(to_vreg);
    2407             :             }
    2408             :           } else {
    2409    12740903 :             if (live->Contains(to_vreg)) {
    2410             :               to_use =
    2411    10704354 :                   Define(curr_position, &to, &from,
    2412    10704414 :                          UsePosition::HintTypeForOperand(from), spill_mode);
    2413             :               live->Remove(to_vreg);
    2414             :             } else {
    2415             :               cur->Eliminate();
    2416             :               continue;
    2417             :             }
    2418             :           }
    2419             :         } else {
    2420             :           Define(curr_position, &to, spill_mode);
    2421             :         }
    2422             :         UsePosition* from_use = Use(block_start_position, curr_position, &from,
    2423    35607587 :                                     hint, hint_type, spill_mode);
    2424             :         // Mark range live.
    2425    35607654 :         if (from.IsUnallocated()) {
    2426             :           live->Add(UnallocatedOperand::cast(from).virtual_register());
    2427             :         }
    2428             :         // Resolve use position hints just created.
    2429    35607654 :         if (to_use != nullptr && from_use != nullptr) {
    2430             :           to_use->ResolveHint(from_use);
    2431             :           from_use->ResolveHint(to_use);
    2432             :         }
    2433             :         DCHECK_IMPLIES(to_use != nullptr, to_use->IsResolved());
    2434             :         DCHECK_IMPLIES(from_use != nullptr, from_use->IsResolved());
    2435             :         // Potentially resolve phi hint.
    2436    35607654 :         if (phi_vreg != -1) ResolvePhiHint(&from, from_use);
    2437             :       }
    2438             :     }
    2439             :   }
    2440    20475108 : }
    2441             : 
    2442    20475045 : void LiveRangeBuilder::ProcessPhis(const InstructionBlock* block,
    2443             :                                    BitVector* live) {
    2444    22563307 :   for (PhiInstruction* phi : block->phis()) {
    2445             :     // The live range interval already ends at the first instruction of the
    2446             :     // block.
    2447             :     int phi_vreg = phi->virtual_register();
    2448             :     live->Remove(phi_vreg);
    2449             :     // Select a hint from a predecessor block that precedes this block in the
    2450             :     // rpo order. In order of priority:
    2451             :     // - Avoid hints from deferred blocks.
    2452             :     // - Prefer hints from allocated (or explicit) operands.
    2453             :     // - Prefer hints from empty blocks (containing just parallel moves and a
    2454             :     //   jump). In these cases, if we can elide the moves, the jump threader
    2455             :     //   is likely to be able to elide the jump.
    2456             :     // The enforcement of hinting in rpo order is required because hint
    2457             :     // resolution that happens later in the compiler pipeline visits
    2458             :     // instructions in reverse rpo order, relying on the fact that phis are
    2459             :     // encountered before their hints.
    2460             :     InstructionOperand* hint = nullptr;
    2461             :     int hint_preference = 0;
    2462             : 
    2463             :     // The cost of hinting increases with the number of predecessors. At the
    2464             :     // same time, the typical benefit decreases, since this hinting only
    2465             :     // optimises the execution path through one predecessor. A limit of 2 is
    2466             :     // sufficient to hit the common if/else pattern.
    2467             :     int predecessor_limit = 2;
    2468             : 
    2469     4535031 :     for (RpoNumber predecessor : block->predecessors()) {
    2470             :       const InstructionBlock* predecessor_block =
    2471     4179140 :           code()->InstructionBlockAt(predecessor);
    2472             :       DCHECK_EQ(predecessor_block->rpo_number(), predecessor);
    2473             : 
    2474             :       // Only take hints from earlier rpo numbers.
    2475     4179141 :       if (predecessor >= block->rpo_number()) continue;
    2476             : 
    2477             :       // Look up the predecessor instruction.
    2478             :       const Instruction* predecessor_instr =
    2479             :           GetLastInstruction(code(), predecessor_block);
    2480             :       InstructionOperand* predecessor_hint = nullptr;
    2481             :       // Phis are assigned in the END position of the last instruction in each
    2482             :       // predecessor block.
    2483     4829446 :       for (MoveOperands* move :
    2484     4829452 :            *predecessor_instr->GetParallelMove(Instruction::END)) {
    2485             :         InstructionOperand& to = move->destination();
    2486     9658860 :         if (to.IsUnallocated() &&
    2487             :             UnallocatedOperand::cast(to).virtual_register() == phi_vreg) {
    2488             :           predecessor_hint = &move->source();
    2489     3820692 :           break;
    2490             :         }
    2491             :       }
    2492             :       DCHECK_NOT_NULL(predecessor_hint);
    2493             : 
    2494             :       // For each predecessor, generate a score according to the priorities
    2495             :       // described above, and pick the best one. Flags in higher-order bits have
    2496             :       // a higher priority than those in lower-order bits.
    2497             :       int predecessor_hint_preference = 0;
    2498             :       const int kNotDeferredBlockPreference = (1 << 2);
    2499             :       const int kMoveIsAllocatedPreference = (1 << 1);
    2500             :       const int kBlockIsEmptyPreference = (1 << 0);
    2501             : 
    2502             :       // - Avoid hints from deferred blocks.
    2503     3820686 :       if (!predecessor_block->IsDeferred()) {
    2504             :         predecessor_hint_preference |= kNotDeferredBlockPreference;
    2505             :       }
    2506             : 
    2507             :       // - Prefer hints from allocated (or explicit) operands.
    2508             :       //
    2509             :       // Already-allocated or explicit operands are typically assigned using
    2510             :       // the parallel moves on the last instruction. For example:
    2511             :       //
    2512             :       //      gap (v101 = [x0|R|w32]) (v100 = v101)
    2513             :       //      ArchJmp
    2514             :       //    ...
    2515             :       //    phi: v100 = v101 v102
    2516             :       //
    2517             :       // We have already found the END move, so look for a matching START move
    2518             :       // from an allocated (or explicit) operand.
    2519             :       //
    2520             :       // Note that we cannot simply look up data()->live_ranges()[vreg] here
    2521             :       // because the live ranges are still being built when this function is
    2522             :       // called.
    2523             :       // TODO(v8): Find a way to separate hinting from live range analysis in
    2524             :       // BuildLiveRanges so that we can use the O(1) live-range look-up.
    2525             :       auto moves = predecessor_instr->GetParallelMove(Instruction::START);
    2526     3820686 :       if (moves != nullptr) {
    2527      430190 :         for (MoveOperands* move : *moves) {
    2528             :           InstructionOperand& to = move->destination();
    2529      363720 :           if (predecessor_hint->Equals(to)) {
    2530      297249 :             if (move->source().IsAllocated() || move->source().IsExplicit()) {
    2531      297249 :               predecessor_hint_preference |= kMoveIsAllocatedPreference;
    2532             :             }
    2533             :             break;
    2534             :           }
    2535             :         }
    2536             :       }
    2537             : 
    2538             :       // - Prefer hints from empty blocks.
    2539     3820686 :       if (predecessor_block->last_instruction_index() ==
    2540             :           predecessor_block->first_instruction_index()) {
    2541     1410199 :         predecessor_hint_preference |= kBlockIsEmptyPreference;
    2542             :       }
    2543             : 
    2544     7641372 :       if ((hint == nullptr) ||
    2545     3820686 :           (predecessor_hint_preference > hint_preference)) {
    2546             :         // Take the hint from this predecessor.
    2547             :         hint = predecessor_hint;
    2548             :         hint_preference = predecessor_hint_preference;
    2549             :       }
    2550             : 
    2551     3820686 :       if (--predecessor_limit <= 0) break;
    2552             :     }
    2553             :     DCHECK_NOT_NULL(hint);
    2554             : 
    2555             :     LifetimePosition block_start = LifetimePosition::GapFromInstructionIndex(
    2556     2088272 :         block->first_instruction_index());
    2557     2088272 :     UsePosition* use_pos = Define(block_start, &phi->output(), hint,
    2558             :                                   UsePosition::HintTypeForOperand(*hint),
    2559     2088265 :                                   SpillModeForBlock(block));
    2560             :     MapPhiHint(hint, use_pos);
    2561             :   }
    2562    20475035 : }
    2563             : 
    2564      231636 : void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block,
    2565             :                                          BitVector* live) {
    2566             :   DCHECK(block->IsLoopHeader());
    2567             :   // Add a live range stretching from the first loop instruction to the last
    2568             :   // for each value live on entry to the header.
    2569             :   BitVector::Iterator iterator(live);
    2570             :   LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
    2571      231638 :       block->first_instruction_index());
    2572             :   LifetimePosition end = LifetimePosition::GapFromInstructionIndex(
    2573      231638 :                              code()->LastLoopInstructionIndex(block))
    2574      231639 :                              .NextFullStart();
    2575     4475289 :   while (!iterator.Done()) {
    2576             :     int operand_index = iterator.Current();
    2577     2121826 :     TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
    2578     2121826 :     range->EnsureInterval(start, end, allocation_zone());
    2579     2121825 :     iterator.Advance();
    2580             :   }
    2581             :   // Insert all values into the live in sets of all blocks in the loop.
    2582     3544108 :   for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt();
    2583             :        ++i) {
    2584     6624940 :     live_in_sets()[i]->Union(*live);
    2585             :   }
    2586      231638 : }
    2587             : 
    2588     2640932 : void LiveRangeBuilder::BuildLiveRanges() {
    2589             :   // Process the blocks in reverse order.
    2590    23116939 :   for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
    2591             :        --block_id) {
    2592             :     InstructionBlock* block =
    2593    20473514 :         code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
    2594    20474081 :     BitVector* live = ComputeLiveOut(block, data());
    2595             :     // Initially consider all live_out values live for the entire block. We
    2596             :     // will shorten these intervals if necessary.
    2597    20472224 :     AddInitialIntervals(block, live);
    2598             :     // Process the instructions in reverse order, generating and killing
    2599             :     // live values.
    2600    20472314 :     ProcessInstructions(block, live);
    2601             :     // All phi output operands are killed by this block.
    2602    20475222 :     ProcessPhis(block, live);
    2603             :     // Now live is live_in for this block except not including values live
    2604             :     // out on backward successor edges.
    2605    20475777 :     if (block->IsLoopHeader()) ProcessLoopHeader(block, live);
    2606    40952014 :     live_in_sets()[block_id] = live;
    2607             :   }
    2608             :   // Postprocess the ranges.
    2609             :   const size_t live_ranges_size = data()->live_ranges().size();
    2610    91534911 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    2611    88892163 :     CHECK_EQ(live_ranges_size,
    2612             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    2613    88892163 :     if (range == nullptr) continue;
    2614             :     // Give slots to all ranges with a non fixed slot use.
    2615    44072524 :     if (range->has_slot_use() && range->HasNoSpillType()) {
    2616             :       SpillMode spill_mode =
    2617             :           range->slot_use_kind() ==
    2618             :                   TopLevelLiveRange::SlotUseKind::kDeferredSlotUse
    2619             :               ? SpillMode::kSpillDeferred
    2620     1246670 :               : SpillMode::kSpillAtDefinition;
    2621     1246670 :       data()->AssignSpillRangeToLiveRange(range, spill_mode);
    2622             :     }
    2623             :     // TODO(bmeurer): This is a horrible hack to make sure that for constant
    2624             :     // live ranges, every use requires the constant to be in a register.
    2625             :     // Without this hack, all uses with "any" policy would get the constant
    2626             :     // operand assigned.
    2627    57357946 :     if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
    2628    54297816 :       for (UsePosition* pos = range->first_pos(); pos != nullptr;
    2629             :            pos = pos->next()) {
    2630    19980054 :         if (pos->type() == UsePositionType::kRequiresSlot ||
    2631             :             pos->type() == UsePositionType::kRegisterOrSlotOrConstant) {
    2632             :           continue;
    2633             :         }
    2634             :         UsePositionType new_type = UsePositionType::kRegisterOrSlot;
    2635             :         // Can't mark phis as needing a register.
    2636    19980511 :         if (!pos->pos().IsGapPosition()) {
    2637             :           new_type = UsePositionType::kRequiresRegister;
    2638             :         }
    2639             :         pos->set_type(new_type, true);
    2640             :       }
    2641             :     }
    2642             :   }
    2643     3197210 :   for (auto preassigned : data()->preassigned_slot_ranges()) {
    2644             :     TopLevelLiveRange* range = preassigned.first;
    2645             :     int slot_id = preassigned.second;
    2646             :     SpillRange* spill = range->HasSpillRange()
    2647             :                             ? range->GetSpillRange()
    2648             :                             : data()->AssignSpillRangeToLiveRange(
    2649      554789 :                                   range, SpillMode::kSpillAtDefinition);
    2650             :     spill->set_assigned_slot(slot_id);
    2651             :   }
    2652             : #ifdef DEBUG
    2653             :   Verify();
    2654             : #endif
    2655     2642421 : }
    2656             : 
    2657           0 : void LiveRangeBuilder::MapPhiHint(InstructionOperand* operand,
    2658             :                                   UsePosition* use_pos) {
    2659             :   DCHECK(!use_pos->IsResolved());
    2660     4176524 :   auto res = phi_hints_.insert(std::make_pair(operand, use_pos));
    2661             :   DCHECK(res.second);
    2662             :   USE(res);
    2663           0 : }
    2664             : 
    2665     5054104 : void LiveRangeBuilder::ResolvePhiHint(InstructionOperand* operand,
    2666             :                                       UsePosition* use_pos) {
    2667             :   auto it = phi_hints_.find(operand);
    2668     5054104 :   if (it == phi_hints_.end()) return;
    2669             :   DCHECK(!it->second->IsResolved());
    2670     2088258 :   it->second->ResolveHint(use_pos);
    2671             : }
    2672             : 
    2673           0 : void LiveRangeBuilder::Verify() const {
    2674           0 :   for (auto& hint : phi_hints_) {
    2675           0 :     CHECK(hint.second->IsResolved());
    2676             :   }
    2677           0 :   for (const TopLevelLiveRange* current : data()->live_ranges()) {
    2678           0 :     if (current != nullptr && !current->IsEmpty()) {
    2679             :       // New LiveRanges should not be split.
    2680           0 :       CHECK_NULL(current->next());
    2681             :       // General integrity check.
    2682           0 :       current->Verify();
    2683             :       const UseInterval* first = current->first_interval();
    2684           0 :       if (first->next() == nullptr) continue;
    2685             : 
    2686             :       // Consecutive intervals should not end and start in the same block,
    2687             :       // otherwise the intervals should have been joined, because the
    2688             :       // variable is live throughout that block.
    2689           0 :       CHECK(NextIntervalStartsInDifferentBlocks(first));
    2690             : 
    2691           0 :       for (const UseInterval* i = first->next(); i != nullptr; i = i->next()) {
    2692             :         // Except for the first interval, the other intevals must start at
    2693             :         // a block boundary, otherwise data wouldn't flow to them.
    2694           0 :         CHECK(IntervalStartsAtBlockBoundary(i));
    2695             :         // The last instruction of the predecessors of the block the interval
    2696             :         // starts must be covered by the range.
    2697           0 :         CHECK(IntervalPredecessorsCoveredByRange(i, current));
    2698           0 :         if (i->next() != nullptr) {
    2699             :           // Check the consecutive intervals property, except for the last
    2700             :           // interval, where it doesn't apply.
    2701           0 :           CHECK(NextIntervalStartsInDifferentBlocks(i));
    2702             :         }
    2703             :       }
    2704             :     }
    2705             :   }
    2706           0 : }
    2707             : 
    2708           0 : bool LiveRangeBuilder::IntervalStartsAtBlockBoundary(
    2709             :     const UseInterval* interval) const {
    2710             :   LifetimePosition start = interval->start();
    2711           0 :   if (!start.IsFullStart()) return false;
    2712             :   int instruction_index = start.ToInstructionIndex();
    2713             :   const InstructionBlock* block =
    2714           0 :       data()->code()->GetInstructionBlock(instruction_index);
    2715           0 :   return block->first_instruction_index() == instruction_index;
    2716             : }
    2717             : 
    2718           0 : bool LiveRangeBuilder::IntervalPredecessorsCoveredByRange(
    2719             :     const UseInterval* interval, const TopLevelLiveRange* range) const {
    2720             :   LifetimePosition start = interval->start();
    2721             :   int instruction_index = start.ToInstructionIndex();
    2722             :   const InstructionBlock* block =
    2723           0 :       data()->code()->GetInstructionBlock(instruction_index);
    2724           0 :   for (RpoNumber pred_index : block->predecessors()) {
    2725             :     const InstructionBlock* predecessor =
    2726           0 :         data()->code()->InstructionBlockAt(pred_index);
    2727             :     LifetimePosition last_pos = LifetimePosition::GapFromInstructionIndex(
    2728             :         predecessor->last_instruction_index());
    2729             :     last_pos = last_pos.NextStart().End();
    2730           0 :     if (!range->Covers(last_pos)) return false;
    2731             :   }
    2732           0 :   return true;
    2733             : }
    2734             : 
    2735           0 : bool LiveRangeBuilder::NextIntervalStartsInDifferentBlocks(
    2736             :     const UseInterval* interval) const {
    2737             :   DCHECK_NOT_NULL(interval->next());
    2738             :   LifetimePosition end = interval->end();
    2739             :   LifetimePosition next_start = interval->next()->start();
    2740             :   // Since end is not covered, but the previous position is, move back a
    2741             :   // position
    2742           0 :   end = end.IsStart() ? end.PrevStart().End() : end.Start();
    2743             :   int last_covered_index = end.ToInstructionIndex();
    2744             :   const InstructionBlock* block =
    2745           0 :       data()->code()->GetInstructionBlock(last_covered_index);
    2746             :   const InstructionBlock* next_block =
    2747           0 :       data()->code()->GetInstructionBlock(next_start.ToInstructionIndex());
    2748           0 :   return block->rpo_number() < next_block->rpo_number();
    2749             : }
    2750             : 
    2751     2641628 : void BundleBuilder::BuildBundles() {
    2752     2641628 :   TRACE("Build bundles\n");
    2753             :   // Process the blocks in reverse order.
    2754    23118391 :   for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
    2755             :        --block_id) {
    2756             :     InstructionBlock* block =
    2757    20475882 :         code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
    2758    20476016 :     TRACE("Block B%d\n", block_id);
    2759    22564520 :     for (auto phi : block->phis()) {
    2760             :       LiveRange* out_range =
    2761     2088254 :           data()->GetOrCreateLiveRangeFor(phi->virtual_register());
    2762             :       LiveRangeBundle* out = out_range->get_bundle();
    2763     2088255 :       if (out == nullptr) {
    2764             :         out = new (data()->allocation_zone())
    2765     1586915 :             LiveRangeBundle(data()->allocation_zone(), next_bundle_id_++);
    2766     1586910 :         out->TryAddRange(out_range);
    2767             :       }
    2768     2088258 :       TRACE("Processing phi for v%d with %d:%d\n", phi->virtual_register(),
    2769             :             out_range->TopLevel()->vreg(), out_range->relative_id());
    2770     7142390 :       for (auto input : phi->operands()) {
    2771     5054125 :         LiveRange* input_range = data()->GetOrCreateLiveRangeFor(input);
    2772     5054128 :         TRACE("Input value v%d with range %d:%d\n", input,
    2773             :               input_range->TopLevel()->vreg(), input_range->relative_id());
    2774             :         LiveRangeBundle* input_bundle = input_range->get_bundle();
    2775     5054136 :         if (input_bundle != nullptr) {
    2776      646073 :           TRACE("Merge\n");
    2777      646073 :           if (out->TryMerge(input_bundle))
    2778      386140 :             TRACE("Merged %d and %d to %d\n", phi->virtual_register(), input,
    2779             :                   out->id());
    2780             :         } else {
    2781     4408063 :           TRACE("Add\n");
    2782     4408063 :           if (out->TryAddRange(input_range))
    2783     4012801 :             TRACE("Added %d and %d to %d\n", phi->virtual_register(), input,
    2784             :                   out->id());
    2785             :         }
    2786             :       }
    2787             :     }
    2788    20476266 :     TRACE("Done block B%d\n", block_id);
    2789             :   }
    2790     2642509 : }
    2791             : 
    2792     5994939 : bool LiveRangeBundle::TryAddRange(LiveRange* range) {
    2793             :   DCHECK_NULL(range->get_bundle());
    2794             :   // We may only add a new live range if its use intervals do not
    2795             :   // overlap with existing intervals in the bundle.
    2796     5994939 :   if (UsesOverlap(range->first_interval())) return false;
    2797             :   ranges_.insert(range);
    2798     5599705 :   range->set_bundle(this);
    2799     5599705 :   InsertUses(range->first_interval());
    2800     5599701 :   return true;
    2801             : }
    2802      646072 : bool LiveRangeBundle::TryMerge(LiveRangeBundle* other) {
    2803      646072 :   if (other == this) return true;
    2804             : 
    2805             :   auto iter1 = uses_.begin();
    2806             :   auto iter2 = other->uses_.begin();
    2807             : 
    2808     2103566 :   while (iter1 != uses_.end() && iter2 != other->uses_.end()) {
    2809     1029659 :     if (iter1->start > iter2->end) {
    2810             :       ++iter2;
    2811      439244 :     } else if (iter2->start > iter1->end) {
    2812             :       ++iter1;
    2813             :     } else {
    2814      259933 :       TRACE("No merge %d:%d %d:%d\n", iter1->start, iter1->end, iter2->start,
    2815             :             iter2->end);
    2816             :       return false;
    2817             :     }
    2818             :   }
    2819             :   // Uses are disjoint, merging is possible.
    2820      164688 :   for (auto it = other->ranges_.begin(); it != other->ranges_.end(); ++it) {
    2821      138286 :     (*it)->set_bundle(this);
    2822      138286 :     InsertUses((*it)->first_interval());
    2823             :   }
    2824             :   ranges_.insert(other->ranges_.begin(), other->ranges_.end());
    2825             :   other->ranges_.clear();
    2826             : 
    2827       26402 :   return true;
    2828             : }
    2829             : 
    2830     5599681 : void LiveRangeBundle::MergeSpillRanges() {
    2831             :   SpillRange* target = nullptr;
    2832    52742676 :   for (auto range : ranges_) {
    2833    47142969 :     if (range->TopLevel()->HasSpillRange()) {
    2834             :       SpillRange* current = range->TopLevel()->GetSpillRange();
    2835     3650867 :       if (target == nullptr) {
    2836             :         target = current;
    2837     1458587 :       } else if (target != current) {
    2838       98604 :         target->TryMerge(current);
    2839             :       }
    2840             :     }
    2841             :   }
    2842     5599707 : }
    2843             : 
    2844           0 : RegisterAllocator::RegisterAllocator(RegisterAllocationData* data,
    2845             :                                      RegisterKind kind)
    2846             :     : data_(data),
    2847             :       mode_(kind),
    2848             :       num_registers_(GetRegisterCount(data->config(), kind)),
    2849             :       num_allocatable_registers_(
    2850             :           GetAllocatableRegisterCount(data->config(), kind)),
    2851             :       allocatable_register_codes_(
    2852             :           GetAllocatableRegisterCodes(data->config(), kind)),
    2853    11745864 :       check_fp_aliasing_(false) {
    2854             :   if (!kSimpleFPAliasing && kind == FP_REGISTERS) {
    2855             :     check_fp_aliasing_ = (data->code()->representation_mask() &
    2856             :                           (kFloat32Bit | kSimd128Bit)) != 0;
    2857             :   }
    2858           0 : }
    2859             : 
    2860           0 : LifetimePosition RegisterAllocator::GetSplitPositionForInstruction(
    2861             :     const LiveRange* range, int instruction_index) {
    2862             :   LifetimePosition ret = LifetimePosition::Invalid();
    2863             : 
    2864             :   ret = LifetimePosition::GapFromInstructionIndex(instruction_index);
    2865    13625508 :   if (range->Start() >= ret || ret >= range->End()) {
    2866             :     return LifetimePosition::Invalid();
    2867             :   }
    2868           0 :   return ret;
    2869             : }
    2870             : 
    2871     2936124 : void RegisterAllocator::SplitAndSpillRangesDefinedByMemoryOperand() {
    2872             :   size_t initial_range_count = data()->live_ranges().size();
    2873   257025224 :   for (size_t i = 0; i < initial_range_count; ++i) {
    2874   127043491 :     CHECK_EQ(initial_range_count,
    2875             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    2876   127043491 :     TopLevelLiveRange* range = data()->live_ranges()[i];
    2877   127043491 :     if (!CanProcessRange(range)) continue;
    2878             :     // Only assume defined by memory operand if we are guaranteed to spill it or
    2879             :     // it has a spill operand.
    2880    91059814 :     if (range->HasNoSpillType() ||
    2881     2707222 :         (range->HasSpillRange() && !range->has_non_deferred_slot_use())) {
    2882             :       continue;
    2883             :     }
    2884             :     LifetimePosition start = range->Start();
    2885    19529939 :     TRACE("Live range %d:%d is defined by a spill operand.\n",
    2886             :           range->TopLevel()->vreg(), range->relative_id());
    2887             :     LifetimePosition next_pos = start;
    2888    19529933 :     if (next_pos.IsGapPosition()) {
    2889             :       next_pos = next_pos.NextStart();
    2890             :     }
    2891             : 
    2892             :     // With splinters, we can be more strict and skip over positions
    2893             :     // not strictly needing registers.
    2894             :     UsePosition* pos =
    2895             :         range->IsSplinter()
    2896     3110133 :             ? range->NextRegisterPosition(next_pos)
    2897    22640066 :             : range->NextUsePositionRegisterIsBeneficial(next_pos);
    2898             :     // If the range already has a spill operand and it doesn't need a
    2899             :     // register immediately, split it and spill the first part of the range.
    2900    19529872 :     if (pos == nullptr) {
    2901     5269865 :       Spill(range, SpillMode::kSpillAtDefinition);
    2902    14260007 :     } else if (pos->pos() > range->Start().NextStart()) {
    2903             :       // Do not spill live range eagerly if use position that can benefit from
    2904             :       // the register is too close to the start of live range.
    2905             :       LifetimePosition split_pos = GetSplitPositionForInstruction(
    2906             :           range, pos->pos().ToInstructionIndex());
    2907             :       // There is no place to split, so we can't split and spill.
    2908    13625508 :       if (!split_pos.IsValid()) continue;
    2909             : 
    2910             :       split_pos =
    2911    13624731 :           FindOptimalSplitPos(range->Start().NextFullStart(), split_pos);
    2912             : 
    2913    13624712 :       SplitRangeAt(range, split_pos);
    2914    13625151 :       Spill(range, SpillMode::kSpillAtDefinition);
    2915             :     }
    2916             :   }
    2917     2937183 : }
    2918             : 
    2919    26485553 : LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
    2920             :                                            LifetimePosition pos) {
    2921             :   DCHECK(!range->TopLevel()->IsFixed());
    2922    26485553 :   TRACE("Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(),
    2923             :         range->relative_id(), pos.value());
    2924             : 
    2925    26485842 :   if (pos <= range->Start()) return range;
    2926             : 
    2927             :   // We can't properly connect liveranges if splitting occurred at the end
    2928             :   // a block.
    2929             :   DCHECK(pos.IsStart() || pos.IsGapPosition() ||
    2930             :          (GetInstructionBlock(code(), pos)->last_instruction_index() !=
    2931             :           pos.ToInstructionIndex()));
    2932             : 
    2933    23656730 :   LiveRange* result = range->SplitAt(pos, allocation_zone());
    2934    23656958 :   return result;
    2935             : }
    2936             : 
    2937     3745929 : LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
    2938             :                                            LifetimePosition start,
    2939             :                                            LifetimePosition end) {
    2940             :   DCHECK(!range->TopLevel()->IsFixed());
    2941     3745929 :   TRACE("Splitting live range %d:%d in position between [%d, %d]\n",
    2942             :         range->TopLevel()->vreg(), range->relative_id(), start.value(),
    2943             :         end.value());
    2944             : 
    2945     3745929 :   LifetimePosition split_pos = FindOptimalSplitPos(start, end);
    2946             :   DCHECK(split_pos >= start);
    2947     3745942 :   return SplitRangeAt(range, split_pos);
    2948             : }
    2949             : 
    2950    17370484 : LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
    2951             :                                                         LifetimePosition end) {
    2952             :   int start_instr = start.ToInstructionIndex();
    2953             :   int end_instr = end.ToInstructionIndex();
    2954             :   DCHECK_LE(start_instr, end_instr);
    2955             : 
    2956             :   // We have no choice
    2957    17370484 :   if (start_instr == end_instr) return end;
    2958             : 
    2959             :   const InstructionBlock* start_block = GetInstructionBlock(code(), start);
    2960             :   const InstructionBlock* end_block = GetInstructionBlock(code(), end);
    2961             : 
    2962    11017638 :   if (end_block == start_block) {
    2963             :     // The interval is split in the same basic block. Split at the latest
    2964             :     // possible position.
    2965     8110698 :     return end;
    2966             :   }
    2967             : 
    2968             :   const InstructionBlock* block = end_block;
    2969             :   // Find header of outermost loop.
    2970             :   do {
    2971             :     const InstructionBlock* loop = GetContainingLoop(code(), block);
    2972     3006727 :     if (loop == nullptr ||
    2973             :         loop->rpo_number().ToInt() <= start_block->rpo_number().ToInt()) {
    2974             :       // No more loops or loop starts before the lifetime start.
    2975             :       break;
    2976             :     }
    2977             :     block = loop;
    2978             :   } while (true);
    2979             : 
    2980             :   // We did not find any suitable outer loop. Split at the latest possible
    2981             :   // position unless end_block is a loop header itself.
    2982     5717524 :   if (block == end_block && !end_block->IsLoopHeader()) return end;
    2983             : 
    2984             :   return LifetimePosition::GapFromInstructionIndex(
    2985             :       block->first_instruction_index());
    2986             : }
    2987             : 
    2988     1106878 : LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
    2989             :     LiveRange* range, LifetimePosition pos) {
    2990             :   const InstructionBlock* block = GetInstructionBlock(code(), pos.Start());
    2991             :   const InstructionBlock* loop_header =
    2992     1106877 :       block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
    2993             : 
    2994     1106877 :   if (loop_header == nullptr) return pos;
    2995             : 
    2996             :   const UsePosition* prev_use =
    2997             :       range->PreviousUsePositionRegisterIsBeneficial(pos);
    2998             : 
    2999      236524 :   while (loop_header != nullptr) {
    3000             :     // We are going to spill live range inside the loop.
    3001             :     // If possible try to move spilling position backwards to loop header.
    3002             :     // This will reduce number of memory moves on the back edge.
    3003             :     LifetimePosition loop_start = LifetimePosition::GapFromInstructionIndex(
    3004             :         loop_header->first_instruction_index());
    3005             : 
    3006       87763 :     if (range->Covers(loop_start)) {
    3007       67278 :       if (prev_use == nullptr || prev_use->pos() < loop_start) {
    3008             :         // No register beneficial use inside the loop before the pos.
    3009             :         pos = loop_start;
    3010             :       }
    3011             :     }
    3012             : 
    3013             :     // Try hoisting out to an outer loop.
    3014             :     loop_header = GetContainingLoop(code(), loop_header);
    3015             :   }
    3016             : 
    3017       60998 :   return pos;
    3018             : }
    3019             : 
    3020    26613955 : void RegisterAllocator::Spill(LiveRange* range, SpillMode spill_mode) {
    3021             :   DCHECK(!range->spilled());
    3022             :   DCHECK(spill_mode == SpillMode::kSpillAtDefinition ||
    3023             :          GetInstructionBlock(code(), range->Start())->IsDeferred());
    3024             :   TopLevelLiveRange* first = range->TopLevel();
    3025    26613955 :   TRACE("Spilling live range %d:%d mode %d\n", first->vreg(),
    3026             :         range->relative_id(), spill_mode);
    3027             : 
    3028    26614216 :   TRACE("Starting spill type is %d\n", static_cast<int>(first->spill_type()));
    3029    26614216 :   if (first->HasNoSpillType()) {
    3030     4981891 :     TRACE("New spill range needed");
    3031     4981891 :     data()->AssignSpillRangeToLiveRange(first, spill_mode);
    3032             :   }
    3033             :   // Upgrade the spillmode, in case this was only spilled in deferred code so
    3034             :   // far.
    3035    53228168 :   if ((spill_mode == SpillMode::kSpillAtDefinition) &&
    3036             :       (first->spill_type() ==
    3037             :        TopLevelLiveRange::SpillType::kDeferredSpillRange)) {
    3038           0 :     TRACE("Upgrading\n");
    3039             :     first->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange);
    3040             :   }
    3041    26614214 :   TRACE("Final spill type is %d\n", static_cast<int>(first->spill_type()));
    3042             :   range->Spill();
    3043    26614214 : }
    3044             : 
    3045           0 : const char* RegisterAllocator::RegisterName(int register_code) const {
    3046             :   return mode() == GENERAL_REGISTERS
    3047             :              ? i::RegisterName(Register::from_code(register_code))
    3048           0 :              : i::RegisterName(DoubleRegister::from_code(register_code));
    3049             : }
    3050             : 
    3051     2936466 : LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
    3052             :                                          RegisterKind kind, Zone* local_zone)
    3053             :     : RegisterAllocator(data, kind),
    3054             :       unhandled_live_ranges_(local_zone),
    3055             :       active_live_ranges_(local_zone),
    3056             :       inactive_live_ranges_(local_zone),
    3057             :       next_active_ranges_change_(LifetimePosition::Invalid()),
    3058     2936466 :       next_inactive_ranges_change_(LifetimePosition::Invalid()) {
    3059     2936466 :   active_live_ranges().reserve(8);
    3060     2937897 :   inactive_live_ranges().reserve(8);
    3061     2937966 : }
    3062             : 
    3063           0 : void LinearScanAllocator::MaybeUndoPreviousSplit(LiveRange* range) {
    3064           0 :   if (range->next() != nullptr && range->next()->ShouldRecombine()) {
    3065           0 :     LiveRange* to_remove = range->next();
    3066           0 :     TRACE("Recombining %d:%d with %d\n", range->TopLevel()->vreg(),
    3067             :           range->relative_id(), to_remove->relative_id());
    3068             : 
    3069             :     // Remove the range from unhandled, as attaching it will change its
    3070             :     // state and hence ordering in the unhandled set.
    3071             :     auto removed_cnt = unhandled_live_ranges().erase(to_remove);
    3072             :     DCHECK_EQ(removed_cnt, 1);
    3073             :     USE(removed_cnt);
    3074             : 
    3075           0 :     range->AttachToNext();
    3076           0 :   } else if (range->next() != nullptr) {
    3077           0 :     TRACE("No recombine for %d:%d to %d\n", range->TopLevel()->vreg(),
    3078             :           range->relative_id(), range->next()->relative_id());
    3079             :   }
    3080           0 : }
    3081             : 
    3082           0 : void LinearScanAllocator::SpillNotLiveRanges(RangeWithRegisterSet& to_be_live,
    3083             :                                              LifetimePosition position,
    3084             :                                              SpillMode spill_mode) {
    3085           0 :   for (auto it = active_live_ranges().begin();
    3086             :        it != active_live_ranges().end();) {
    3087           0 :     LiveRange* active_range = *it;
    3088             :     TopLevelLiveRange* toplevel = (*it)->TopLevel();
    3089           0 :     auto found = to_be_live.find({toplevel, kUnassignedRegister});
    3090           0 :     if (found == to_be_live.end()) {
    3091             :       // Is not contained in {to_be_live}, spill it.
    3092             :       // Fixed registers are exempt from this. They might have been
    3093             :       // added from inactive at the block boundary but we know that
    3094             :       // they cannot conflict as they are built before register
    3095             :       // allocation starts. It would be algorithmically fine to split
    3096             :       // them and reschedule but the code does not allow to do this.
    3097           0 :       if (toplevel->IsFixed()) {
    3098           0 :         TRACE("Keeping reactivated fixed range for %s\n",
    3099             :               RegisterName(toplevel->assigned_register()));
    3100             :         ++it;
    3101             :       } else {
    3102             :         // When spilling a previously spilled/reloaded range, we add back the
    3103             :         // tail that we might have split off when we reloaded/spilled it
    3104             :         // previously. Otherwise we might keep generating small split-offs.
    3105           0 :         MaybeUndoPreviousSplit(active_range);
    3106           0 :         TRACE("Putting back %d:%d\n", toplevel->vreg(),
    3107             :               active_range->relative_id());
    3108           0 :         LiveRange* split = SplitRangeAt(active_range, position);
    3109             :         DCHECK_NE(split, active_range);
    3110             : 
    3111             :         // Make sure we revisit this range once it has a use that requires
    3112             :         // a register.
    3113           0 :         UsePosition* next_use = split->NextRegisterPosition(position);
    3114           0 :         if (next_use != nullptr) {
    3115             :           // Move to the start of the gap before use so that we have a space
    3116             :           // to perform the potential reload. Otherwise, do not spill but add
    3117             :           // to unhandled for reallocation.
    3118             :           LifetimePosition revisit_at = next_use->pos().FullStart();
    3119           0 :           TRACE("Next use at %d\n", revisit_at.value());
    3120           0 :           if (!data()->IsBlockBoundary(revisit_at)) {
    3121             :             // Leave some space so we have enough gap room.
    3122             :             revisit_at = revisit_at.PrevStart().FullStart();
    3123             :           }
    3124             :           // If this range became life right at the block boundary that we are
    3125             :           // currently processing, we do not need to split it. Instead move it
    3126             :           // to unhandled right away.
    3127           0 :           if (position < revisit_at) {
    3128           0 :             LiveRange* third_part = SplitRangeAt(split, revisit_at);
    3129             :             DCHECK_NE(split, third_part);
    3130           0 :             Spill(split, spill_mode);
    3131           0 :             TRACE("Marking %d:%d to recombine\n", toplevel->vreg(),
    3132             :                   third_part->relative_id());
    3133             :             third_part->SetRecombine();
    3134           0 :             AddToUnhandled(third_part);
    3135             :           } else {
    3136           0 :             AddToUnhandled(split);
    3137             :           }
    3138             :         } else {
    3139           0 :           Spill(split, spill_mode);
    3140             :         }
    3141           0 :         it = ActiveToHandled(it);
    3142             :       }
    3143             :     } else {
    3144             :       // This range is contained in {to_be_live}, so we can keep it.
    3145           0 :       int expected_register = (*found).expected_register;
    3146             :       to_be_live.erase(found);
    3147           0 :       if (expected_register == active_range->assigned_register()) {
    3148             :         // Was life and in correct register, simply pass through.
    3149           0 :         TRACE("Keeping %d:%d in %s\n", toplevel->vreg(),
    3150             :               active_range->relative_id(),
    3151             :               RegisterName(active_range->assigned_register()));
    3152             :         ++it;
    3153             :       } else {
    3154             :         // Was life but wrong register. Split and schedule for
    3155             :         // allocation.
    3156           0 :         TRACE("Scheduling %d:%d\n", toplevel->vreg(),
    3157             :               active_range->relative_id());
    3158           0 :         LiveRange* split = SplitRangeAt(active_range, position);
    3159             :         split->set_controlflow_hint(expected_register);
    3160           0 :         AddToUnhandled(split);
    3161           0 :         it = ActiveToHandled(it);
    3162             :       }
    3163             :     }
    3164             :   }
    3165           0 : }
    3166             : 
    3167           0 : LiveRange* LinearScanAllocator::AssignRegisterOnReload(LiveRange* range,
    3168             :                                                        int reg) {
    3169             :   // We know the register is currently free but it might be in
    3170             :   // use by a currently inactive range. So we might not be able
    3171             :   // to reload for the full distance. In such case, split here.
    3172             :   // TODO(herhut):
    3173             :   // It might be better if we could use the normal unhandled queue and
    3174             :   // give reloading registers pecedence. That way we would compute the
    3175             :   // intersection for the entire future.
    3176             :   LifetimePosition new_end = range->End();
    3177           0 :   for (const auto inactive : inactive_live_ranges()) {
    3178             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3179           0 :       if (inactive->assigned_register() != reg) continue;
    3180             :     } else {
    3181             :       bool conflict = inactive->assigned_register() == reg;
    3182             :       if (!conflict) {
    3183             :         int alias_base_index = -1;
    3184             :         int aliases = data()->config()->GetAliases(range->representation(), reg,
    3185             :                                                    inactive->representation(),
    3186             :                                                    &alias_base_index);
    3187             :         DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    3188             :         while (aliases-- && !conflict) {
    3189             :           int aliased_reg = alias_base_index + aliases;
    3190             :           if (aliased_reg == reg) {
    3191             :             conflict = true;
    3192             :           }
    3193             :         }
    3194             :       }
    3195             :       if (!conflict) continue;
    3196             :     }
    3197           0 :     for (auto interval = inactive->first_interval(); interval != nullptr;
    3198             :          interval = interval->next()) {
    3199           0 :       if (interval->start() > new_end) break;
    3200           0 :       if (interval->end() <= range->Start()) continue;
    3201           0 :       if (new_end > interval->start()) new_end = interval->start();
    3202             :     }
    3203             :   }
    3204           0 :   if (new_end != range->End()) {
    3205           0 :     TRACE("Found new end for %d:%d at %d\n", range->TopLevel()->vreg(),
    3206             :           range->relative_id(), new_end.value());
    3207           0 :     LiveRange* tail = SplitRangeAt(range, new_end);
    3208           0 :     AddToUnhandled(tail);
    3209             :   }
    3210           0 :   SetLiveRangeAssignedRegister(range, reg);
    3211           0 :   return range;
    3212             : }
    3213             : 
    3214           0 : void LinearScanAllocator::ReloadLiveRanges(RangeWithRegisterSet& to_be_live,
    3215             :                                            LifetimePosition position) {
    3216             :   // Assumption: All ranges in {to_be_live} are currently spilled and there are
    3217             :   // no conflicting registers in the active ranges.
    3218             :   // The former is ensured by SpillNotLiveRanges, the latter is by construction
    3219             :   // of the to_be_live set.
    3220           0 :   for (RangeWithRegister range_with_register : to_be_live) {
    3221             :     TopLevelLiveRange* range = range_with_register.range;
    3222             :     int reg = range_with_register.expected_register;
    3223           0 :     LiveRange* to_resurrect = range->GetChildCovers(position);
    3224           0 :     if (to_resurrect == nullptr) {
    3225             :       // While the range was life until the end of the predecessor block, it is
    3226             :       // not live in this block. Either there is a lifetime gap or the range
    3227             :       // died.
    3228           0 :       TRACE("No candidate for %d at %d\n", range->vreg(), position.value());
    3229             :     } else {
    3230             :       // We might be resurrecting a range that we spilled until its next use
    3231             :       // before. In such cases, we have to unsplit it before processing as
    3232             :       // otherwise we might get register changes from one range to the other
    3233             :       // in the middle of blocks.
    3234             :       // If there is a gap between this range and the next, we can just keep
    3235             :       // it as a register change won't hurt.
    3236           0 :       MaybeUndoPreviousSplit(to_resurrect);
    3237           0 :       if (to_resurrect->Start() == position) {
    3238             :         // This range already starts at this block. It might have been spilled,
    3239             :         // so we have to unspill it. Otherwise, it is already in the unhandled
    3240             :         // queue waiting for processing.
    3241             :         DCHECK(!to_resurrect->HasRegisterAssigned());
    3242           0 :         TRACE("Reload %d:%d starting at %d itself\n", range->vreg(),
    3243             :               to_resurrect->relative_id(), position.value());
    3244           0 :         if (to_resurrect->spilled()) {
    3245             :           to_resurrect->Unspill();
    3246           0 :           to_resurrect->set_controlflow_hint(reg);
    3247           0 :           AddToUnhandled(to_resurrect);
    3248             :         } else {
    3249             :           // Assign the preassigned register if we know. Otherwise, nothing to
    3250             :           // do as already in unhandeled.
    3251           0 :           if (reg != kUnassignedRegister) {
    3252             :             auto erased_cnt = unhandled_live_ranges().erase(to_resurrect);
    3253             :             DCHECK_EQ(erased_cnt, 1);
    3254             :             USE(erased_cnt);
    3255             :             // We know that there is no conflict with active ranges, so just
    3256             :             // assign the register to the range.
    3257           0 :             to_resurrect = AssignRegisterOnReload(to_resurrect, reg);
    3258           0 :             AddToActive(to_resurrect);
    3259             :           }
    3260             :         }
    3261             :       } else {
    3262             :         // This range was spilled before. We have to split it and schedule the
    3263             :         // second part for allocation (or assign the register if we know).
    3264             :         DCHECK(to_resurrect->spilled());
    3265           0 :         LiveRange* split = SplitRangeAt(to_resurrect, position);
    3266           0 :         TRACE("Reload %d:%d starting at %d as %d\n", range->vreg(),
    3267             :               to_resurrect->relative_id(), split->Start().value(),
    3268             :               split->relative_id());
    3269             :         DCHECK_NE(split, to_resurrect);
    3270           0 :         if (reg != kUnassignedRegister) {
    3271             :           // We know that there is no conflict with active ranges, so just
    3272             :           // assign the register to the range.
    3273           0 :           split = AssignRegisterOnReload(split, reg);
    3274           0 :           AddToActive(split);
    3275             :         } else {
    3276             :           // Let normal register assignment find a suitable register.
    3277             :           split->set_controlflow_hint(reg);
    3278           0 :           AddToUnhandled(split);
    3279             :         }
    3280             :       }
    3281             :     }
    3282             :   }
    3283           0 : }
    3284             : 
    3285           0 : RpoNumber LinearScanAllocator::ChooseOneOfTwoPredecessorStates(
    3286             :     InstructionBlock* current_block, LifetimePosition boundary) {
    3287             :   using SmallRangeVector =
    3288             :       base::SmallVector<TopLevelLiveRange*,
    3289             :                         RegisterConfiguration::kMaxRegisters>;
    3290             :   // Pick the state that would generate the least spill/reloads.
    3291             :   // Compute vectors of ranges with imminent use for both sides.
    3292             :   // As GetChildCovers is cached, it is cheaper to repeatedly
    3293             :   // call is rather than compute a shared set first.
    3294             :   auto& left = data()->GetSpillState(current_block->predecessors()[0]);
    3295             :   auto& right = data()->GetSpillState(current_block->predecessors()[1]);
    3296             :   SmallRangeVector left_used;
    3297           0 :   for (const auto item : left) {
    3298           0 :     LiveRange* at_next_block = item->TopLevel()->GetChildCovers(boundary);
    3299           0 :     if (at_next_block != nullptr &&
    3300           0 :         at_next_block->NextUsePositionRegisterIsBeneficial(boundary) !=
    3301             :             nullptr) {
    3302           0 :       left_used.emplace_back(item->TopLevel());
    3303             :     }
    3304             :   }
    3305             :   SmallRangeVector right_used;
    3306           0 :   for (const auto item : right) {
    3307           0 :     LiveRange* at_next_block = item->TopLevel()->GetChildCovers(boundary);
    3308           0 :     if (at_next_block != nullptr &&
    3309           0 :         at_next_block->NextUsePositionRegisterIsBeneficial(boundary) !=
    3310             :             nullptr) {
    3311           0 :       right_used.emplace_back(item->TopLevel());
    3312             :     }
    3313             :   }
    3314           0 :   if (left_used.empty() && right_used.empty()) {
    3315             :     // There are no beneficial register uses. Look at any use at
    3316             :     // all. We do not account for all uses, like flowing into a phi.
    3317             :     // So we just look at ranges still being live.
    3318           0 :     TRACE("Looking at only uses\n");
    3319           0 :     for (const auto item : left) {
    3320           0 :       LiveRange* at_next_block = item->TopLevel()->GetChildCovers(boundary);
    3321           0 :       if (at_next_block != nullptr &&
    3322             :           at_next_block->NextUsePosition(boundary) != nullptr) {
    3323           0 :         left_used.emplace_back(item->TopLevel());
    3324             :       }
    3325             :     }
    3326           0 :     for (const auto item : right) {
    3327           0 :       LiveRange* at_next_block = item->TopLevel()->GetChildCovers(boundary);
    3328           0 :       if (at_next_block != nullptr &&
    3329             :           at_next_block->NextUsePosition(boundary) != nullptr) {
    3330           0 :         right_used.emplace_back(item->TopLevel());
    3331             :       }
    3332             :     }
    3333             :   }
    3334             :   // Now left_used and right_used contains those ranges that matter.
    3335             :   // Count which side matches this most.
    3336           0 :   TRACE("Vote went %zu vs %zu\n", left_used.size(), right_used.size());
    3337             :   return left_used.size() > right_used.size()
    3338             :              ? current_block->predecessors()[0]
    3339           0 :              : current_block->predecessors()[1];
    3340             : }
    3341             : 
    3342           0 : void LinearScanAllocator::ComputeStateFromManyPredecessors(
    3343             :     InstructionBlock* current_block, RangeWithRegisterSet* to_be_live) {
    3344             :   struct Vote {
    3345             :     size_t count;
    3346             :     int used_registers[RegisterConfiguration::kMaxRegisters];
    3347             :   };
    3348             :   struct TopLevelLiveRangeComparator {
    3349             :     bool operator()(const TopLevelLiveRange* lhs,
    3350             :                     const TopLevelLiveRange* rhs) const {
    3351           0 :       return lhs->vreg() < rhs->vreg();
    3352             :     }
    3353             :   };
    3354             :   ZoneMap<TopLevelLiveRange*, Vote, TopLevelLiveRangeComparator> counts(
    3355             :       data()->allocation_zone());
    3356             :   int deferred_blocks = 0;
    3357           0 :   for (RpoNumber pred : current_block->predecessors()) {
    3358           0 :     if (!ConsiderBlockForControlFlow(current_block, pred)) {
    3359             :       // Back edges of a loop count as deferred here too.
    3360           0 :       deferred_blocks++;
    3361           0 :       continue;
    3362             :     }
    3363             :     const auto& pred_state = data()->GetSpillState(pred);
    3364           0 :     for (LiveRange* range : pred_state) {
    3365             :       // We might have spilled the register backwards, so the range we
    3366             :       // stored might have lost its register. Ignore those.
    3367           0 :       if (!range->HasRegisterAssigned()) continue;
    3368           0 :       TopLevelLiveRange* toplevel = range->TopLevel();
    3369             :       auto previous = counts.find(toplevel);
    3370           0 :       if (previous == counts.end()) {
    3371           0 :         auto result = counts.emplace(std::make_pair(toplevel, Vote{1, {0}}));
    3372           0 :         CHECK(result.second);
    3373           0 :         result.first->second.used_registers[range->assigned_register()]++;
    3374             :       } else {
    3375           0 :         previous->second.count++;
    3376           0 :         previous->second.used_registers[range->assigned_register()]++;
    3377             :       }
    3378             :     }
    3379             :   }
    3380             : 
    3381             :   // Choose the live ranges from the majority.
    3382             :   const size_t majority =
    3383           0 :       (current_block->PredecessorCount() + 2 - deferred_blocks) / 2;
    3384           0 :   bool taken_registers[RegisterConfiguration::kMaxRegisters] = {0};
    3385           0 :   auto assign_to_live = [this, counts, majority](
    3386             :                             std::function<bool(TopLevelLiveRange*)> filter,
    3387             :                             RangeWithRegisterSet* to_be_live,
    3388           0 :                             bool* taken_registers) {
    3389           0 :     for (const auto& val : counts) {
    3390           0 :       if (!filter(val.first)) continue;
    3391           0 :       if (val.second.count >= majority) {
    3392             :         int register_max = 0;
    3393           0 :         int reg = kUnassignedRegister;
    3394           0 :         for (int idx = 0; idx < RegisterConfiguration::kMaxRegisters; idx++) {
    3395           0 :           int uses = val.second.used_registers[idx];
    3396           0 :           if (uses == 0) continue;
    3397           0 :           if (uses > register_max) {
    3398           0 :             reg = idx;
    3399             :             register_max = val.second.used_registers[idx];
    3400           0 :           } else if (taken_registers[reg] && uses == register_max) {
    3401           0 :             reg = idx;
    3402             :           }
    3403             :         }
    3404           0 :         if (taken_registers[reg]) {
    3405           0 :           reg = kUnassignedRegister;
    3406             :         } else {
    3407           0 :           taken_registers[reg] = true;
    3408             :         }
    3409           0 :         to_be_live->emplace(val.first, reg);
    3410           0 :         TRACE("Reset %d as live due vote %zu in %s\n",
    3411             :               val.first->TopLevel()->vreg(), val.second.count,
    3412             :               reg == kUnassignedRegister ? "unassigned" : RegisterName(reg));
    3413             :       }
    3414             :     }
    3415           0 :   };
    3416             :   // First round, process fixed registers, as these have precedence.
    3417             :   // There is only one fixed range per register, so we cannot have
    3418             :   // conflicts.
    3419           0 :   assign_to_live([](TopLevelLiveRange* r) { return r->IsFixed(); }, to_be_live,
    3420           0 :                  taken_registers);
    3421             :   // Second round, process the rest.
    3422           0 :   assign_to_live([](TopLevelLiveRange* r) { return !r->IsFixed(); }, to_be_live,
    3423           0 :                  taken_registers);
    3424           0 : }
    3425             : 
    3426           0 : bool LinearScanAllocator::ConsiderBlockForControlFlow(
    3427             :     InstructionBlock* current_block, RpoNumber predecessor) {
    3428             :   // We ignore predecessors on back edges when looking for control flow effects,
    3429             :   // as those lie in the future of allocation and we have no data yet. Also,
    3430             :   // deferred bocks are ignored on deferred to non-deferred boundaries, as we do
    3431             :   // not want them to influence allocation of non deferred code.
    3432           0 :   return (predecessor < current_block->rpo_number()) &&
    3433           0 :          (current_block->IsDeferred() ||
    3434           0 :           !code()->InstructionBlockAt(predecessor)->IsDeferred());
    3435             : }
    3436             : 
    3437           0 : void LinearScanAllocator::UpdateDeferredFixedRanges(SpillMode spill_mode,
    3438             :                                                     InstructionBlock* block) {
    3439           0 :   if (spill_mode == SpillMode::kSpillDeferred) {
    3440             :     LifetimePosition max = LifetimePosition::InstructionFromInstructionIndex(
    3441           0 :         LastDeferredInstructionIndex(block));
    3442             :     // Adds range back to inactive, resolving resulting conflicts.
    3443           0 :     auto add_to_inactive = [this, max](LiveRange* range) {
    3444           0 :       AddToInactive(range);
    3445             :       // Splits other if it conflicts with range. Other is placed in unhandled
    3446             :       // for later reallocation.
    3447             :       auto split_conflicting = [this, max](LiveRange* range, LiveRange* other,
    3448             :                                            std::function<void(LiveRange*)>
    3449           0 :                                                update_caches) {
    3450           0 :         if (other->TopLevel()->IsFixed()) return;
    3451             :         int reg = range->assigned_register();
    3452             :         if (kSimpleFPAliasing || !check_fp_aliasing()) {
    3453           0 :           if (other->assigned_register() != reg) {
    3454             :             return;
    3455             :           }
    3456             :         } else {
    3457             :           if (!data()->config()->AreAliases(range->representation(), reg,
    3458             :                                             other->representation(),
    3459             :                                             other->assigned_register())) {
    3460             :             return;
    3461             :           }
    3462             :         }
    3463             :         // The inactive range might conflict, so check whether we need to
    3464             :         // split and spill. We can look for the first intersection, as there
    3465             :         // cannot be any intersections in the past, as those would have been a
    3466             :         // conflict then.
    3467           0 :         LifetimePosition next_start = range->FirstIntersection(other);
    3468           0 :         if (!next_start.IsValid() || (next_start > max)) {
    3469             :           // There is no conflict or the conflict is outside of the current
    3470             :           // stretch of deferred code. In either case we can ignore the
    3471             :           // inactive range.
    3472             :           return;
    3473             :         }
    3474             :         // They overlap. So we need to split active and reschedule it
    3475             :         // for allocation.
    3476           0 :         TRACE("Resolving conflict of %d with deferred fixed for register %s\n",
    3477             :               other->TopLevel()->vreg(),
    3478             :               RegisterName(other->assigned_register()));
    3479             :         LiveRange* split_off =
    3480           0 :             other->SplitAt(next_start, data()->allocation_zone());
    3481             :         DCHECK_NE(split_off, other);
    3482           0 :         AddToUnhandled(split_off);
    3483             :         update_caches(other);
    3484           0 :       };
    3485             :       // Now check for conflicts in active and inactive ranges. We might have
    3486             :       // conflicts in inactive, as we do not do this check on every block
    3487             :       // boundary but only on deferred/non-deferred changes but inactive
    3488             :       // live ranges might become live on any block boundary.
    3489           0 :       for (auto active : active_live_ranges()) {
    3490           0 :         split_conflicting(range, active, [this](LiveRange* updated) {
    3491             :           next_active_ranges_change_ =
    3492           0 :               Min(updated->End(), next_active_ranges_change_);
    3493           0 :         });
    3494             :       }
    3495           0 :       for (auto inactive : inactive_live_ranges()) {
    3496           0 :         split_conflicting(range, inactive, [this](LiveRange* updated) {
    3497             :           next_inactive_ranges_change_ =
    3498           0 :               Min(updated->End(), next_inactive_ranges_change_);
    3499           0 :         });
    3500             :       }
    3501           0 :     };
    3502           0 :     if (mode() == GENERAL_REGISTERS) {
    3503           0 :       for (TopLevelLiveRange* current : data()->fixed_live_ranges()) {
    3504           0 :         if (current != nullptr) {
    3505           0 :           if (current->IsDeferredFixed()) {
    3506           0 :             add_to_inactive(current);
    3507             :           }
    3508             :         }
    3509             :       }
    3510             :     } else {
    3511           0 :       for (TopLevelLiveRange* current : data()->fixed_double_live_ranges()) {
    3512           0 :         if (current != nullptr) {
    3513           0 :           if (current->IsDeferredFixed()) {
    3514           0 :             add_to_inactive(current);
    3515             :           }
    3516             :         }
    3517             :       }
    3518             :       if (!kSimpleFPAliasing && check_fp_aliasing()) {
    3519             :         for (TopLevelLiveRange* current : data()->fixed_float_live_ranges()) {
    3520             :           if (current != nullptr) {
    3521             :             if (current->IsDeferredFixed()) {
    3522             :               add_to_inactive(current);
    3523             :             }
    3524             :           }
    3525             :         }
    3526             :         for (TopLevelLiveRange* current : data()->fixed_simd128_live_ranges()) {
    3527             :           if (current != nullptr) {
    3528             :             if (current->IsDeferredFixed()) {
    3529             :               add_to_inactive(current);
    3530             :             }
    3531             :           }
    3532             :         }
    3533             :       }
    3534             :     }
    3535             :   } else {
    3536             :     // Remove all ranges.
    3537           0 :     for (auto it = inactive_live_ranges().begin();
    3538             :          it != inactive_live_ranges().end();) {
    3539           0 :       if ((*it)->TopLevel()->IsDeferredFixed()) {
    3540           0 :         it = inactive_live_ranges().erase(it);
    3541             :       } else {
    3542             :         ++it;
    3543             :       }
    3544             :     }
    3545             :   }
    3546           0 : }
    3547             : 
    3548           0 : bool LinearScanAllocator::BlockIsDeferredOrImmediatePredecessorIsNotDeferred(
    3549             :     const InstructionBlock* block) {
    3550           0 :   if (block->IsDeferred()) return true;
    3551           0 :   if (block->PredecessorCount() == 0) return true;
    3552             :   bool pred_is_deferred = false;
    3553           0 :   for (auto pred : block->predecessors()) {
    3554           0 :     if (pred.IsNext(block->rpo_number())) {
    3555           0 :       pred_is_deferred = code()->InstructionBlockAt(pred)->IsDeferred();
    3556           0 :       break;
    3557             :     }
    3558             :   }
    3559           0 :   return !pred_is_deferred;
    3560             : }
    3561             : 
    3562           0 : bool LinearScanAllocator::HasNonDeferredPredecessor(InstructionBlock* block) {
    3563           0 :   for (auto pred : block->predecessors()) {
    3564           0 :     InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
    3565           0 :     if (!pred_block->IsDeferred()) return true;
    3566             :   }
    3567           0 :   return false;
    3568             : }
    3569             : 
    3570     2936149 : void LinearScanAllocator::AllocateRegisters() {
    3571             :   DCHECK(unhandled_live_ranges().empty());
    3572             :   DCHECK(active_live_ranges().empty());
    3573             :   DCHECK(inactive_live_ranges().empty());
    3574             : 
    3575     2936149 :   SplitAndSpillRangesDefinedByMemoryOperand();
    3576             :   data()->ResetSpillState();
    3577             : 
    3578     2937210 :   if (FLAG_trace_alloc) {
    3579           0 :     PrintRangeOverview(std::cout);
    3580             :   }
    3581             : 
    3582             :   const size_t live_ranges_size = data()->live_ranges().size();
    3583   129971716 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    3584   127034455 :     CHECK_EQ(live_ranges_size,
    3585             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    3586   127034455 :     if (!CanProcessRange(range)) continue;
    3587   163823983 :     for (LiveRange* to_add = range; to_add != nullptr;
    3588             :          to_add = to_add->next()) {
    3589    59151905 :       if (!to_add->spilled()) {
    3590    40257733 :         AddToUnhandled(to_add);
    3591             :       }
    3592             :     }
    3593             :   }
    3594             : 
    3595     2937261 :   if (mode() == GENERAL_REGISTERS) {
    3596    87161019 :     for (TopLevelLiveRange* current : data()->fixed_live_ranges()) {
    3597    84513733 :       if (current != nullptr) {
    3598    24403053 :         if (current->IsDeferredFixed()) continue;
    3599    24403434 :         AddToInactive(current);
    3600             :       }
    3601             :     }
    3602             :   } else {
    3603     9731570 :     for (TopLevelLiveRange* current : data()->fixed_double_live_ranges()) {
    3604     9436804 :       if (current != nullptr) {
    3605     4064951 :         if (current->IsDeferredFixed()) continue;
    3606     4065035 :         AddToInactive(current);
    3607             :       }
    3608             :     }
    3609             :     if (!kSimpleFPAliasing && check_fp_aliasing()) {
    3610             :       for (TopLevelLiveRange* current : data()->fixed_float_live_ranges()) {
    3611             :         if (current != nullptr) {
    3612             :           if (current->IsDeferredFixed()) continue;
    3613             :           AddToInactive(current);
    3614             :         }
    3615             :       }
    3616             :       for (TopLevelLiveRange* current : data()->fixed_simd128_live_ranges()) {
    3617             :         if (current != nullptr) {
    3618             :           if (current->IsDeferredFixed()) continue;
    3619             :           AddToInactive(current);
    3620             :         }
    3621             :       }
    3622             :     }
    3623             :   }
    3624             : 
    3625             :   RpoNumber last_block = RpoNumber::FromInt(0);
    3626             :   RpoNumber max_blocks =
    3627     2942052 :       RpoNumber::FromInt(code()->InstructionBlockCount() - 1);
    3628             :   LifetimePosition next_block_boundary =
    3629             :       LifetimePosition::InstructionFromInstructionIndex(
    3630             :           data()
    3631             :               ->code()
    3632             :               ->InstructionBlockAt(last_block)
    3633     2942052 :               ->last_instruction_index())
    3634             :           .NextFullStart();
    3635             :   SpillMode spill_mode = SpillMode::kSpillAtDefinition;
    3636             : 
    3637             :   // Process all ranges. We also need to ensure that we have seen all block
    3638             :   // boundaries. Linear scan might have assigned and spilled ranges before
    3639             :   // reaching the last block and hence we would ignore control flow effects for
    3640             :   // those. Not only does this produce a potentially bad assignment, it also
    3641             :   // breaks with the invariant that we undo spills that happen in deferred code
    3642             :   // when crossing a deferred/non-deferred boundary.
    3643   107176810 :   while (!unhandled_live_ranges().empty() ||
    3644           0 :          (data()->is_turbo_control_flow_aware_allocation() &&
    3645             :           last_block < max_blocks)) {
    3646             :     LiveRange* current = unhandled_live_ranges().empty()
    3647             :                              ? nullptr
    3648    49181921 :                              : *unhandled_live_ranges().begin();
    3649             :     LifetimePosition position =
    3650    49181921 :         current ? current->Start() : next_block_boundary;
    3651             : #ifdef DEBUG
    3652             :     allocation_finger_ = position;
    3653             : #endif
    3654    49181921 :     if (data()->is_turbo_control_flow_aware_allocation()) {
    3655             :       // Splintering is not supported.
    3656           0 :       CHECK(!data()->is_turbo_preprocess_ranges());
    3657             :       // Check whether we just moved across a block boundary. This will trigger
    3658             :       // for the first range that is past the current boundary.
    3659           0 :       if (position >= next_block_boundary) {
    3660           0 :         TRACE("Processing boundary at %d leaving %d\n",
    3661             :               next_block_boundary.value(), last_block.ToInt());
    3662             : 
    3663             :         // Forward state to before block boundary
    3664           0 :         LifetimePosition end_of_block = next_block_boundary.PrevStart().End();
    3665           0 :         ForwardStateTo(end_of_block);
    3666             : 
    3667             :         // Remember this state.
    3668             :         InstructionBlock* current_block = data()->code()->GetInstructionBlock(
    3669           0 :             next_block_boundary.ToInstructionIndex());
    3670             : 
    3671             :         // Store current spill state (as the state at end of block). For
    3672             :         // simplicity, we store the active ranges, e.g., the live ranges that
    3673             :         // are not spilled.
    3674             :         data()->RememberSpillState(last_block, active_live_ranges());
    3675             : 
    3676             :         // Only reset the state if this was not a direct fallthrough. Otherwise
    3677             :         // control flow resolution will get confused (it does not expect changes
    3678             :         // across fallthrough edges.).
    3679           0 :         bool fallthrough = (current_block->PredecessorCount() == 1) &&
    3680             :                            current_block->predecessors()[0].IsNext(
    3681             :                                current_block->rpo_number());
    3682             : 
    3683             :         // When crossing a deferred/non-deferred boundary, we have to load or
    3684             :         // remove the deferred fixed ranges from inactive.
    3685           0 :         if ((spill_mode == SpillMode::kSpillDeferred) !=
    3686             :             current_block->IsDeferred()) {
    3687             :           // Update spill mode.
    3688             :           spill_mode = current_block->IsDeferred()
    3689             :                            ? SpillMode::kSpillDeferred
    3690           0 :                            : SpillMode::kSpillAtDefinition;
    3691             : 
    3692           0 :           ForwardStateTo(next_block_boundary);
    3693             : 
    3694             : #ifdef DEBUG
    3695             :           // Allow allocation at current position.
    3696             :           allocation_finger_ = next_block_boundary;
    3697             : #endif
    3698           0 :           UpdateDeferredFixedRanges(spill_mode, current_block);
    3699             :         }
    3700             : 
    3701             :         // Allocation relies on the fact that each non-deferred block has at
    3702             :         // least one non-deferred predecessor. Check this invariant here.
    3703             :         DCHECK_IMPLIES(!current_block->IsDeferred(),
    3704             :                        HasNonDeferredPredecessor(current_block));
    3705             : 
    3706           0 :         if (!fallthrough) {
    3707             : #ifdef DEBUG
    3708             :           // Allow allocation at current position.
    3709             :           allocation_finger_ = next_block_boundary;
    3710             : #endif
    3711             : 
    3712             :           // We are currently at next_block_boundary - 1. Move the state to the
    3713             :           // actual block boundary position. In particular, we have to
    3714             :           // reactivate inactive ranges so that they get rescheduled for
    3715             :           // allocation if they were not live at the predecessors.
    3716           0 :           ForwardStateTo(next_block_boundary);
    3717             : 
    3718             :           RangeWithRegisterSet to_be_live(data()->allocation_zone());
    3719             : 
    3720             :           // If we end up deciding to use the state of the immediate
    3721             :           // predecessor, it is better not to perform a change. It would lead to
    3722             :           // the same outcome anyway.
    3723             :           // This may never happen on boundaries between deferred and
    3724             :           // non-deferred code, as we rely on explicit respill to ensure we
    3725             :           // spill at definition.
    3726             :           bool no_change_required = false;
    3727             : 
    3728             :           auto pick_state_from = [this, current_block](
    3729             :                                      RpoNumber pred,
    3730           0 :                                      RangeWithRegisterSet* to_be_live) -> bool {
    3731           0 :             TRACE("Using information from B%d\n", pred.ToInt());
    3732             :             // If this is a fall-through that is not across a deferred
    3733             :             // boundary, there is nothing to do.
    3734             :             bool is_noop = pred.IsNext(current_block->rpo_number());
    3735           0 :             if (!is_noop) {
    3736             :               auto& spill_state = data()->GetSpillState(pred);
    3737           0 :               TRACE("Not a fallthrough. Adding %zu elements...\n",
    3738             :                     spill_state.size());
    3739           0 :               for (const auto range : spill_state) {
    3740             :                 // Filter out ranges that had their register stolen by backwards
    3741             :                 // working spill heuristics. These have been spilled after the
    3742             :                 // fact, so ignore them.
    3743           0 :                 if (!range->HasRegisterAssigned()) continue;
    3744             :                 to_be_live->emplace(range);
    3745             :               }
    3746             :             }
    3747           0 :             return is_noop;
    3748           0 :           };
    3749             : 
    3750             :           // Multiple cases here:
    3751             :           // 1) We have a single predecessor => this is a control flow split, so
    3752             :           //     just restore the predecessor state.
    3753             :           // 2) We have two predecessors => this is a conditional, so break ties
    3754             :           //     based on what to do based on forward uses, trying to benefit
    3755             :           //     the same branch if in doubt (make one path fast).
    3756             :           // 3) We have many predecessors => this is a switch. Compute union
    3757             :           //     based on majority, break ties by looking forward.
    3758           0 :           if (current_block->PredecessorCount() == 1) {
    3759           0 :             TRACE("Single predecessor for B%d\n",
    3760             :                   current_block->rpo_number().ToInt());
    3761             :             no_change_required =
    3762           0 :                 pick_state_from(current_block->predecessors()[0], &to_be_live);
    3763           0 :           } else if (current_block->PredecessorCount() == 2) {
    3764           0 :             TRACE("Two predecessors for B%d\n",
    3765             :                   current_block->rpo_number().ToInt());
    3766             :             // If one of the branches does not contribute any information,
    3767             :             // e.g. because it is deferred or a back edge, we can short cut
    3768             :             // here right away.
    3769             :             RpoNumber chosen_predecessor = RpoNumber::Invalid();
    3770           0 :             if (!ConsiderBlockForControlFlow(
    3771             :                     current_block, current_block->predecessors()[0])) {
    3772           0 :               chosen_predecessor = current_block->predecessors()[1];
    3773           0 :             } else if (!ConsiderBlockForControlFlow(
    3774             :                            current_block, current_block->predecessors()[1])) {
    3775           0 :               chosen_predecessor = current_block->predecessors()[0];
    3776             :             } else {
    3777             :               chosen_predecessor = ChooseOneOfTwoPredecessorStates(
    3778           0 :                   current_block, next_block_boundary);
    3779             :             }
    3780             :             no_change_required =
    3781           0 :                 pick_state_from(chosen_predecessor, &to_be_live);
    3782             : 
    3783             :           } else {
    3784             :             // Merge at the end of, e.g., a switch.
    3785           0 :             ComputeStateFromManyPredecessors(current_block, &to_be_live);
    3786             :           }
    3787             : 
    3788           0 :           if (!no_change_required) {
    3789           0 :             SpillNotLiveRanges(to_be_live, next_block_boundary, spill_mode);
    3790           0 :             ReloadLiveRanges(to_be_live, next_block_boundary);
    3791             :           }
    3792             : 
    3793             :           // TODO(herhut) Check removal.
    3794             :           // Now forward to current position
    3795           0 :           ForwardStateTo(next_block_boundary);
    3796             :         }
    3797             :         // Update block information
    3798             :         last_block = current_block->rpo_number();
    3799             :         next_block_boundary = LifetimePosition::InstructionFromInstructionIndex(
    3800             :                                   current_block->last_instruction_index())
    3801             :                                   .NextFullStart();
    3802             : 
    3803             :         // We might have created new unhandled live ranges, so cycle around the
    3804             :         // loop to make sure we pick the top most range in unhandled for
    3805             :         // processing.
    3806             :         continue;
    3807             :       }
    3808             :     }
    3809             : 
    3810             :     DCHECK_NOT_NULL(current);
    3811             : 
    3812    49181921 :     TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
    3813             :           current->relative_id(), position.value());
    3814             : 
    3815             :     // Now we can erase current, as we are sure to process it.
    3816             :     unhandled_live_ranges().erase(unhandled_live_ranges().begin());
    3817             : 
    3818    49180301 :     if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
    3819             :       continue;
    3820             : 
    3821    49167761 :     ForwardStateTo(position);
    3822             : 
    3823             :     DCHECK(!current->HasRegisterAssigned() && !current->spilled());
    3824             : 
    3825    49170733 :     ProcessCurrentRange(current, spill_mode);
    3826             :   }
    3827             : 
    3828     2937648 :   if (FLAG_trace_alloc) {
    3829           0 :     PrintRangeOverview(std::cout);
    3830             :   }
    3831     2937648 : }
    3832             : 
    3833     2648955 : bool LinearScanAllocator::TrySplitAndSpillSplinter(LiveRange* range) {
    3834             :   DCHECK(!data()->is_turbo_control_flow_aware_allocation());
    3835             :   DCHECK(range->TopLevel()->IsSplinter());
    3836             :   // If we can spill the whole range, great. Otherwise, split above the
    3837             :   // first use needing a register and spill the top part.
    3838     2648955 :   const UsePosition* next_reg = range->NextRegisterPosition(range->Start());
    3839     2648862 :   if (next_reg == nullptr) {
    3840     2032659 :     Spill(range, SpillMode::kSpillAtDefinition);
    3841     2032691 :     return true;
    3842      616204 :   } else if (range->FirstHintPosition() == nullptr) {
    3843             :     // If there was no hint, but we have a use position requiring a
    3844             :     // register, apply the hot path heuristics.
    3845             :     return false;
    3846      449896 :   } else if (next_reg->pos().PrevStart() > range->Start()) {
    3847      229335 :     LiveRange* tail = SplitRangeAt(range, next_reg->pos().PrevStart());
    3848      229337 :     AddToUnhandled(tail);
    3849      229336 :     Spill(range, SpillMode::kSpillAtDefinition);
    3850      229336 :     return true;
    3851             :   }
    3852             :   return false;
    3853             : }
    3854             : 
    3855    42570366 : void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
    3856             :                                                        int reg) {
    3857    42570366 :   data()->MarkAllocated(range->representation(), reg);
    3858             :   range->set_assigned_register(reg);
    3859             :   range->SetUseHints(reg);
    3860             :   range->UpdateBundleRegister(reg);
    3861    67106303 :   if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
    3862             :     data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
    3863             :   }
    3864    42572193 : }
    3865             : 
    3866    42571675 : void LinearScanAllocator::AddToActive(LiveRange* range) {
    3867    42571675 :   TRACE("Add live range %d:%d in %s to active\n", range->TopLevel()->vreg(),
    3868             :         range->relative_id(), RegisterName(range->assigned_register()));
    3869    42571675 :   active_live_ranges().push_back(range);
    3870             :   next_active_ranges_change_ =
    3871   127713993 :       std::min(next_active_ranges_change_, range->NextEndAfter(range->Start()));
    3872    42571331 : }
    3873             : 
    3874    28465914 : void LinearScanAllocator::AddToInactive(LiveRange* range) {
    3875    28465914 :   TRACE("Add live range %d:%d to inactive\n", range->TopLevel()->vreg(),
    3876             :         range->relative_id());
    3877    28465914 :   inactive_live_ranges().push_back(range);
    3878             :   next_inactive_ranges_change_ = std::min(
    3879    85395282 :       next_inactive_ranges_change_, range->NextStartAfter(range->Start()));
    3880    28465094 : }
    3881             : 
    3882    49182449 : void LinearScanAllocator::AddToUnhandled(LiveRange* range) {
    3883    49182449 :   if (range == nullptr || range->IsEmpty()) return;
    3884             :   DCHECK(!range->HasRegisterAssigned() && !range->spilled());
    3885             :   DCHECK(allocation_finger_ <= range->Start());
    3886             : 
    3887    49184051 :   TRACE("Add live range %d:%d to unhandled\n", range->TopLevel()->vreg(),
    3888             :         range->relative_id());
    3889             :   unhandled_live_ranges().insert(range);
    3890             : }
    3891             : 
    3892    52461635 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::ActiveToHandled(
    3893             :     const ZoneVector<LiveRange*>::iterator it) {
    3894    52461635 :   TRACE("Moving live range %d:%d from active to handled\n",
    3895             :         (*it)->TopLevel()->vreg(), (*it)->relative_id());
    3896   104922761 :   return active_live_ranges().erase(it);
    3897             : }
    3898             : 
    3899    25794978 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::ActiveToInactive(
    3900             :     const ZoneVector<LiveRange*>::iterator it, LifetimePosition position) {
    3901    25794978 :   LiveRange* range = *it;
    3902    25794978 :   inactive_live_ranges().push_back(range);
    3903    25795050 :   TRACE("Moving live range %d:%d from active to inactive\n",
    3904             :         (range)->TopLevel()->vreg(), range->relative_id());
    3905             :   next_inactive_ranges_change_ =
    3906    77385237 :       std::min(next_inactive_ranges_change_, range->NextStartAfter(position));
    3907    51590110 :   return active_live_ranges().erase(it);
    3908             : }
    3909             : 
    3910     8531079 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::InactiveToHandled(
    3911             :     ZoneVector<LiveRange*>::iterator it) {
    3912     8531079 :   TRACE("Moving live range %d:%d from inactive to handled\n",
    3913             :         (*it)->TopLevel()->vreg(), (*it)->relative_id());
    3914    17061869 :   return inactive_live_ranges().erase(it);
    3915             : }
    3916             : 
    3917    39793352 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::InactiveToActive(
    3918             :     ZoneVector<LiveRange*>::iterator it, LifetimePosition position) {
    3919    39793352 :   LiveRange* range = *it;
    3920    39793352 :   active_live_ranges().push_back(range);
    3921    39793272 :   TRACE("Moving live range %d:%d from inactive to active\n",
    3922             :         range->TopLevel()->vreg(), range->relative_id());
    3923             :   next_active_ranges_change_ =
    3924   119379870 :       std::min(next_active_ranges_change_, range->NextEndAfter(position));
    3925    79586445 :   return inactive_live_ranges().erase(it);
    3926             : }
    3927             : 
    3928    49167485 : void LinearScanAllocator::ForwardStateTo(LifetimePosition position) {
    3929    49167485 :   if (position >= next_active_ranges_change_) {
    3930    29494427 :     next_active_ranges_change_ = LifetimePosition::MaxPosition();
    3931   142447485 :     for (auto it = active_live_ranges().begin();
    3932             :          it != active_live_ranges().end();) {
    3933   112952444 :       LiveRange* cur_active = *it;
    3934   112952444 :       if (cur_active->End() <= position) {
    3935    51354766 :         it = ActiveToHandled(it);
    3936    61597678 :       } else if (!cur_active->Covers(position)) {
    3937    25795077 :         it = ActiveToInactive(it, position);
    3938             :       } else {
    3939             :         next_active_ranges_change_ = std::min(
    3940    71607558 :             next_active_ranges_change_, cur_active->NextEndAfter(position));
    3941             :         ++it;
    3942             :       }
    3943             :     }
    3944             :   }
    3945             : 
    3946    49168099 :   if (position >= next_inactive_ranges_change_) {
    3947    11928948 :     next_inactive_ranges_change_ = LifetimePosition::MaxPosition();
    3948   155700580 :     for (auto it = inactive_live_ranges().begin();
    3949             :          it != inactive_live_ranges().end();) {
    3950   143763789 :       LiveRange* cur_inactive = *it;
    3951   143763789 :       if (cur_inactive->End() <= position) {
    3952     8531244 :         it = InactiveToHandled(it);
    3953   135232545 :       } else if (cur_inactive->Covers(position)) {
    3954    39793421 :         it = InactiveToActive(it, position);
    3955             :       } else {
    3956             :         next_inactive_ranges_change_ =
    3957             :             std::min(next_inactive_ranges_change_,
    3958   190895690 :                      cur_inactive->NextStartAfter(position));
    3959             :         ++it;
    3960             :       }
    3961             :     }
    3962             :   }
    3963    49175942 : }
    3964             : 
    3965           0 : int LinearScanAllocator::LastDeferredInstructionIndex(InstructionBlock* start) {
    3966             :   DCHECK(start->IsDeferred());
    3967             :   RpoNumber last_block =
    3968           0 :       RpoNumber::FromInt(code()->InstructionBlockCount() - 1);
    3969           0 :   while ((start->rpo_number() < last_block)) {
    3970             :     InstructionBlock* next =
    3971           0 :         code()->InstructionBlockAt(start->rpo_number().Next());
    3972           0 :     if (!next->IsDeferred()) break;
    3973             :     start = next;
    3974             :   }
    3975           0 :   return start->last_instruction_index();
    3976             : }
    3977             : 
    3978           0 : void LinearScanAllocator::GetFPRegisterSet(MachineRepresentation rep,
    3979             :                                            int* num_regs, int* num_codes,
    3980             :                                            const int** codes) const {
    3981             :   DCHECK(!kSimpleFPAliasing);
    3982           0 :   if (rep == MachineRepresentation::kFloat32) {
    3983           0 :     *num_regs = data()->config()->num_float_registers();
    3984           0 :     *num_codes = data()->config()->num_allocatable_float_registers();
    3985           0 :     *codes = data()->config()->allocatable_float_codes();
    3986           0 :   } else if (rep == MachineRepresentation::kSimd128) {
    3987           0 :     *num_regs = data()->config()->num_simd128_registers();
    3988           0 :     *num_codes = data()->config()->num_allocatable_simd128_registers();
    3989           0 :     *codes = data()->config()->allocatable_simd128_codes();
    3990             :   } else {
    3991           0 :     UNREACHABLE();
    3992             :   }
    3993           0 : }
    3994             : 
    3995    49207948 : void LinearScanAllocator::FindFreeRegistersForRange(
    3996             :     LiveRange* range, Vector<LifetimePosition> positions) {
    3997             :   int num_regs = num_registers();
    3998             :   int num_codes = num_allocatable_registers();
    3999             :   const int* codes = allocatable_register_codes();
    4000             :   MachineRepresentation rep = range->representation();
    4001             :   if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
    4002             :                              rep == MachineRepresentation::kSimd128))
    4003             :     GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
    4004             :   DCHECK_GE(positions.length(), num_regs);
    4005             : 
    4006  1622722336 :   for (int i = 0; i < num_regs; ++i) {
    4007  1573514388 :     positions[i] = LifetimePosition::MaxPosition();
    4008             :   }
    4009             : 
    4010   195398156 :   for (LiveRange* cur_active : active_live_ranges()) {
    4011             :     int cur_reg = cur_active->assigned_register();
    4012             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    4013   292380108 :       positions[cur_reg] = LifetimePosition::GapFromInstructionIndex(0);
    4014   146190054 :       TRACE("Register %s is free until pos %d (1) due to %d\n",
    4015             :             RegisterName(cur_reg),
    4016             :             LifetimePosition::GapFromInstructionIndex(0).value(),
    4017             :             cur_active->TopLevel()->vreg());
    4018             :     } else {
    4019             :       int alias_base_index = -1;
    4020             :       int aliases = data()->config()->GetAliases(
    4021             :           cur_active->representation(), cur_reg, rep, &alias_base_index);
    4022             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    4023             :       while (aliases--) {
    4024             :         int aliased_reg = alias_base_index + aliases;
    4025             :         positions[aliased_reg] = LifetimePosition::GapFromInstructionIndex(0);
    4026             :       }
    4027             :     }
    4028             :   }
    4029             : 
    4030   555554108 :   for (LiveRange* cur_inactive : inactive_live_ranges()) {
    4031             :     DCHECK(cur_inactive->End() > range->Start());
    4032             :     int cur_reg = cur_inactive->assigned_register();
    4033             :     // No need to carry out intersections, when this register won't be
    4034             :     // interesting to this range anyway.
    4035             :     // TODO(mtrofin): extend to aliased ranges, too.
    4036   506380185 :     if ((kSimpleFPAliasing || !check_fp_aliasing()) &&
    4037   506380185 :         positions[cur_reg] < range->Start()) {
    4038   430797903 :       continue;
    4039             :     }
    4040             : 
    4041   404467802 :     LifetimePosition next_intersection = cur_inactive->FirstIntersection(range);
    4042   404472741 :     if (!next_intersection.IsValid()) continue;
    4043             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    4044    75587221 :       positions[cur_reg] = Min(positions[cur_reg], next_intersection);
    4045    75587221 :       TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
    4046             :             Min(positions[cur_reg], next_intersection).value());
    4047             :     } else {
    4048             :       int alias_base_index = -1;
    4049             :       int aliases = data()->config()->GetAliases(
    4050             :           cur_inactive->representation(), cur_reg, rep, &alias_base_index);
    4051             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    4052             :       while (aliases--) {
    4053             :         int aliased_reg = alias_base_index + aliases;
    4054             :         positions[aliased_reg] = Min(positions[aliased_reg], next_intersection);
    4055             :       }
    4056             :     }
    4057             :   }
    4058    49173923 : }
    4059             : 
    4060             : // High-level register allocation summary:
    4061             : //
    4062             : // For regular, or hot (i.e. not splinter) ranges, we attempt to first
    4063             : // allocate first the preferred (hint) register. If that is not possible,
    4064             : // we find a register that's free, and allocate that. If that's not possible,
    4065             : // we search for a register to steal from a range that was allocated. The
    4066             : // goal is to optimize for throughput by avoiding register-to-memory
    4067             : // moves, which are expensive.
    4068             : //
    4069             : // For splinters, the goal is to minimize the number of moves. First we try
    4070             : // to allocate the preferred register (more discussion follows). Failing that,
    4071             : // we bail out and spill as far as we can, unless the first use is at start,
    4072             : // case in which we apply the same behavior as we do for regular ranges.
    4073             : // If there is no hint, we apply the hot-path behavior.
    4074             : //
    4075             : // For the splinter, the hint register may come from:
    4076             : //
    4077             : // - the hot path (we set it at splintering time with SetHint). In this case, if
    4078             : // we cannot offer the hint register, spilling is better because it's at most
    4079             : // 1 move, while trying to find and offer another register is at least 1 move.
    4080             : //
    4081             : // - a constraint. If we cannot offer that register, it's because  there is some
    4082             : // interference. So offering the hint register up to the interference would
    4083             : // result
    4084             : // in a move at the interference, plus a move to satisfy the constraint. This is
    4085             : // also the number of moves if we spill, with the potential of the range being
    4086             : // already spilled and thus saving a move (the spill).
    4087             : // Note that this can only be an input constraint, if it were an output one,
    4088             : // the range wouldn't be a splinter because it means it'd be defined in a
    4089             : // deferred
    4090             : // block, and we don't mark those as splinters (they live in deferred blocks
    4091             : // only).
    4092             : //
    4093             : // - a phi. The same analysis as in the case of the input constraint applies.
    4094             : //
    4095    49173017 : void LinearScanAllocator::ProcessCurrentRange(LiveRange* current,
    4096             :                                               SpillMode spill_mode) {
    4097             :   EmbeddedVector<LifetimePosition, RegisterConfiguration::kMaxRegisters>
    4098             :       free_until_pos;
    4099    49173017 :   FindFreeRegistersForRange(current, free_until_pos);
    4100    49173909 :   if (!TryAllocatePreferredReg(current, free_until_pos)) {
    4101    28639207 :     if (current->TopLevel()->IsSplinter()) {
    4102             :       DCHECK(!data()->is_turbo_control_flow_aware_allocation());
    4103     4910998 :       if (TrySplitAndSpillSplinter(current)) return;
    4104             :     }
    4105    26377112 :     if (!TryAllocateFreeReg(current, free_until_pos)) {
    4106     5444836 :       AllocateBlockedReg(current, spill_mode);
    4107             :     }
    4108             :   }
    4109    46909292 :   if (current->HasRegisterAssigned()) {
    4110    42571832 :     AddToActive(current);
    4111             :   }
    4112             : }
    4113             : 
    4114    54124129 : bool LinearScanAllocator::TryAllocatePreferredReg(
    4115             :     LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
    4116             :   int hint_register;
    4117   108247257 :   if (current->RegisterFromControlFlow(&hint_register) ||
    4118    79334813 :       current->FirstHintPosition(&hint_register) != nullptr ||
    4119             :       current->RegisterFromBundle(&hint_register)) {
    4120    31008829 :     TRACE(
    4121             :         "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
    4122             :         RegisterName(hint_register), free_until_pos[hint_register].value(),
    4123             :         current->TopLevel()->vreg(), current->relative_id(),
    4124             :         current->End().value());
    4125             : 
    4126             :     // The desired register is free until the end of the current live range.
    4127    62017998 :     if (free_until_pos[hint_register] >= current->End()) {
    4128    22365970 :       TRACE("Assigning preferred reg %s to live range %d:%d\n",
    4129             :             RegisterName(hint_register), current->TopLevel()->vreg(),
    4130             :             current->relative_id());
    4131    22365970 :       SetLiveRangeAssignedRegister(current, hint_register);
    4132    22366125 :       return true;
    4133             :     }
    4134             :   }
    4135             :   return false;
    4136             : }
    4137             : 
    4138    30307581 : int LinearScanAllocator::PickRegisterThatIsAvailableLongest(
    4139             :     LiveRange* current, int hint_reg,
    4140             :     const Vector<LifetimePosition>& free_until_pos) {
    4141             :   int num_regs = 0;  // used only for the call to GetFPRegisterSet.
    4142             :   int num_codes = num_allocatable_registers();
    4143             :   const int* codes = allocatable_register_codes();
    4144             :   MachineRepresentation rep = current->representation();
    4145             :   if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
    4146             :                              rep == MachineRepresentation::kSimd128)) {
    4147             :     GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
    4148             :   }
    4149             : 
    4150             :   DCHECK_GE(free_until_pos.length(), num_codes);
    4151             : 
    4152             :   // Find the register which stays free for the longest time. Check for
    4153             :   // the hinted register first, as we might want to use that one. Only
    4154             :   // count full instructions for free ranges, as an instruction's internal
    4155             :   // positions do not help but might shadow a hinted register. This is
    4156             :   // typically the case for function calls, where all registered are
    4157             :   // cloberred after the call except for the argument registers, which are
    4158             :   // set before the call. Hence, the argument registers always get ignored,
    4159             :   // as their available time is shorter.
    4160    30307581 :   int reg = (hint_reg == kUnassignedRegister) ? codes[0] : hint_reg;
    4161             :   int current_free = -1;
    4162   766767341 :   for (int i = 0; i < num_codes; ++i) {
    4163   368234250 :     int code = codes[i];
    4164             :     // Prefer registers that have no fixed uses to avoid blocking later hints.
    4165             :     // We use the first register that has no fixed uses to ensure we use
    4166             :     // byte addressable registers in ia32 first.
    4167   368234250 :     int candidate_free = free_until_pos[code].ToInstructionIndex();
    4168   368234250 :     TRACE("Register %s in free until %d\n", RegisterName(code), candidate_free);
    4169   736463851 :     if ((candidate_free > current_free) ||
    4170   544273406 :         (candidate_free == current_free && reg != hint_reg &&
    4171   345577109 :          (data()->HasFixedUse(current->representation(), reg) &&
    4172    82019471 :           !data()->HasFixedUse(current->representation(), code)))) {
    4173             :       reg = code;
    4174             :       current_free = candidate_free;
    4175             :     }
    4176             :   }
    4177             : 
    4178    30303211 :   return reg;
    4179             : }
    4180             : 
    4181    26377027 : bool LinearScanAllocator::TryAllocateFreeReg(
    4182             :     LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
    4183             :   // Compute register hint, if such exists.
    4184    26377027 :   int hint_reg = kUnassignedRegister;
    4185    26376535 :   current->RegisterFromControlFlow(&hint_reg) ||
    4186             :       current->FirstHintPosition(&hint_reg) != nullptr ||
    4187    26377027 :       current->RegisterFromBundle(&hint_reg);
    4188             : 
    4189             :   int reg =
    4190    26376354 :       PickRegisterThatIsAvailableLongest(current, hint_reg, free_until_pos);
    4191             : 
    4192    52749468 :   LifetimePosition pos = free_until_pos[reg];
    4193             : 
    4194    26374734 :   if (pos <= current->Start()) {
    4195             :     // All registers are blocked.
    4196             :     return false;
    4197             :   }
    4198             : 
    4199    20930473 :   if (pos < current->End()) {
    4200             :     // Register reg is available at the range start but becomes blocked before
    4201             :     // the range end. Split current at position where it becomes blocked.
    4202     4950983 :     LiveRange* tail = SplitRangeAt(current, pos);
    4203     4950975 :     AddToUnhandled(tail);
    4204             : 
    4205             :     // Try to allocate preferred register once more.
    4206     4950977 :     if (TryAllocatePreferredReg(current, free_until_pos)) return true;
    4207             :   }
    4208             : 
    4209             :   // Register reg is available at the range start and is free until the range
    4210             :   // end.
    4211             :   DCHECK(pos >= current->End());
    4212    19099882 :   TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
    4213             :         current->TopLevel()->vreg(), current->relative_id());
    4214    19099882 :   SetLiveRangeAssignedRegister(current, reg);
    4215             : 
    4216    19101031 :   return true;
    4217             : }
    4218             : 
    4219     5444813 : void LinearScanAllocator::AllocateBlockedReg(LiveRange* current,
    4220             :                                              SpillMode spill_mode) {
    4221     5444813 :   UsePosition* register_use = current->NextRegisterPosition(current->Start());
    4222     5444850 :   if (register_use == nullptr) {
    4223             :     // There is no use in the current live range that requires a register.
    4224             :     // We can just spill it.
    4225     1515766 :     Spill(current, spill_mode);
    4226     1515766 :     return;
    4227             :   }
    4228             : 
    4229             :   MachineRepresentation rep = current->representation();
    4230             : 
    4231             :   // use_pos keeps track of positions a register/alias is used at.
    4232             :   // block_pos keeps track of positions where a register/alias is blocked
    4233             :   // from.
    4234             :   EmbeddedVector<LifetimePosition, RegisterConfiguration::kMaxRegisters>
    4235             :       use_pos(LifetimePosition::MaxPosition());
    4236             :   EmbeddedVector<LifetimePosition, RegisterConfiguration::kMaxRegisters>
    4237             :       block_pos(LifetimePosition::MaxPosition());
    4238             : 
    4239    51467064 :   for (LiveRange* range : active_live_ranges()) {
    4240             :     int cur_reg = range->assigned_register();
    4241             :     bool is_fixed_or_cant_spill =
    4242    47537984 :         range->TopLevel()->IsFixed() || !range->CanBeSpilled(current->Start());
    4243             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    4244    47537982 :       if (is_fixed_or_cant_spill) {
    4245    34978385 :         block_pos[cur_reg] = use_pos[cur_reg] =
    4246    34978385 :             LifetimePosition::GapFromInstructionIndex(0);
    4247             :       } else {
    4248             :         DCHECK_NE(LifetimePosition::GapFromInstructionIndex(0),
    4249             :                   block_pos[cur_reg]);
    4250    12559597 :         use_pos[cur_reg] =
    4251    25119192 :             range->NextLifetimePositionRegisterIsBeneficial(current->Start());
    4252             :       }
    4253             :     } else {
    4254             :       int alias_base_index = -1;
    4255             :       int aliases = data()->config()->GetAliases(
    4256             :           range->representation(), cur_reg, rep, &alias_base_index);
    4257             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    4258             :       while (aliases--) {
    4259             :         int aliased_reg = alias_base_index + aliases;
    4260             :         if (is_fixed_or_cant_spill) {
    4261             :           block_pos[aliased_reg] = use_pos[aliased_reg] =
    4262             :               LifetimePosition::GapFromInstructionIndex(0);
    4263             :         } else {
    4264             :           use_pos[aliased_reg] =
    4265             :               Min(block_pos[aliased_reg],
    4266             :                   range->NextLifetimePositionRegisterIsBeneficial(
    4267             :                       current->Start()));
    4268             :         }
    4269             :       }
    4270             :     }
    4271             :   }
    4272             : 
    4273    18773193 :   for (LiveRange* range : inactive_live_ranges()) {
    4274             :     DCHECK(range->End() > current->Start());
    4275             :     int cur_reg = range->assigned_register();
    4276             :     bool is_fixed = range->TopLevel()->IsFixed();
    4277             : 
    4278             :     // Don't perform costly intersections if they are guaranteed to not update
    4279             :     // block_pos or use_pos.
    4280             :     // TODO(mtrofin): extend to aliased ranges, too.
    4281             :     if ((kSimpleFPAliasing || !check_fp_aliasing())) {
    4282    14844120 :       if (is_fixed) {
    4283    40015384 :         if (block_pos[cur_reg] < range->Start()) continue;
    4284             :       } else {
    4285     4095848 :         if (use_pos[cur_reg] < range->Start()) continue;
    4286             :       }
    4287             :     }
    4288             : 
    4289    12108969 :     LifetimePosition next_intersection = range->FirstIntersection(current);
    4290    12108962 :     if (!next_intersection.IsValid()) continue;
    4291             : 
    4292             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    4293      421121 :       if (is_fixed) {
    4294      820278 :         block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
    4295      410139 :         use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
    4296             :       } else {
    4297       21964 :         use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
    4298             :       }
    4299             :     } else {
    4300             :       int alias_base_index = -1;
    4301             :       int aliases = data()->config()->GetAliases(
    4302             :           range->representation(), cur_reg, rep, &alias_base_index);
    4303             :       DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
    4304             :       while (aliases--) {
    4305             :         int aliased_reg = alias_base_index + aliases;
    4306             :         if (is_fixed) {
    4307             :           block_pos[aliased_reg] =
    4308             :               Min(block_pos[aliased_reg], next_intersection);
    4309             :           use_pos[aliased_reg] =
    4310             :               Min(block_pos[aliased_reg], use_pos[aliased_reg]);
    4311             :         } else {
    4312             :           use_pos[aliased_reg] = Min(use_pos[aliased_reg], next_intersection);
    4313             :         }
    4314             :       }
    4315             :     }
    4316             :   }
    4317             : 
    4318             :   // Compute register hint if it exists.
    4319     3929073 :   int hint_reg = kUnassignedRegister;
    4320     3929072 :   current->RegisterFromControlFlow(&hint_reg) ||
    4321     3929066 :       register_use->HintRegister(&hint_reg) ||
    4322     3929073 :       current->RegisterFromBundle(&hint_reg);
    4323     3929079 :   int reg = PickRegisterThatIsAvailableLongest(current, hint_reg, use_pos);
    4324             : 
    4325     7858152 :   if (use_pos[reg] < register_use->pos()) {
    4326             :     // If there is a gap position before the next register use, we can
    4327             :     // spill until there. The gap position will then fit the fill move.
    4328     2822197 :     if (LifetimePosition::ExistsGapPositionBetween(current->Start(),
    4329             :                                                    register_use->pos())) {
    4330             :       SpillBetween(current, current->Start(), register_use->pos(), spill_mode);
    4331             :       return;
    4332             :     }
    4333             :   }
    4334             : 
    4335             :   // When in deferred spilling mode avoid stealing registers beyond the current
    4336             :   // deferred region. This is required as we otherwise might spill an inactive
    4337             :   // range with a start outside of deferred code and that would not be reloaded.
    4338             :   LifetimePosition new_end = current->End();
    4339     1106877 :   if (spill_mode == SpillMode::kSpillDeferred) {
    4340             :     InstructionBlock* deferred_block =
    4341           0 :         code()->GetInstructionBlock(current->Start().ToInstructionIndex());
    4342             :     new_end = Min(new_end, LifetimePosition::GapFromInstructionIndex(
    4343           0 :                                LastDeferredInstructionIndex(deferred_block)));
    4344             :   }
    4345             : 
    4346             :   // We couldn't spill until the next register use. Split before the register
    4347             :   // is blocked, if applicable.
    4348     1106877 :   if (block_pos[reg] < new_end) {
    4349             :     // Register becomes blocked before the current range end. Split before that
    4350             :     // position.
    4351             :     new_end = block_pos[reg].Start();
    4352             :   }
    4353             : 
    4354             :   // If there is no register available at all, we can only spill this range.
    4355             :   // Happens for instance on entry to deferred code where registers might
    4356             :   // become blocked yet we aim to reload ranges.
    4357     1106877 :   if (new_end == current->Start()) {
    4358             :     SpillBetween(current, new_end, register_use->pos(), spill_mode);
    4359             :     return;
    4360             :   }
    4361             : 
    4362             :   // Split at the new end if we found one.
    4363     1106877 :   if (new_end != current->End()) {
    4364       13594 :     LiveRange* tail = SplitBetween(current, current->Start(), new_end);
    4365       13594 :     AddToUnhandled(tail);
    4366             :   }
    4367             : 
    4368             :   // Register reg is not blocked for the whole range.
    4369             :   DCHECK(block_pos[reg] >= current->End());
    4370     1106877 :   TRACE("Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg),
    4371             :         current->TopLevel()->vreg(), current->relative_id());
    4372     1106877 :   SetLiveRangeAssignedRegister(current, reg);
    4373             : 
    4374             :   // This register was not free. Thus we need to find and spill
    4375             :   // parts of active and inactive live regions that use the same register
    4376             :   // at the same lifetime positions as current.
    4377     1106878 :   SplitAndSpillIntersecting(current, spill_mode);
    4378             : }
    4379             : 
    4380     1106879 : void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current,
    4381             :                                                     SpillMode spill_mode) {
    4382             :   DCHECK(current->HasRegisterAssigned());
    4383             :   int reg = current->assigned_register();
    4384             :   LifetimePosition split_pos = current->Start();
    4385    14420483 :   for (auto it = active_live_ranges().begin();
    4386             :        it != active_live_ranges().end();) {
    4387    13313607 :     LiveRange* range = *it;
    4388             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    4389    13313607 :       if (range->assigned_register() != reg) {
    4390             :         ++it;
    4391             :         continue;
    4392             :       }
    4393             :     } else {
    4394             :       if (!data()->config()->AreAliases(current->representation(), reg,
    4395             :                                         range->representation(),
    4396             :                                         range->assigned_register())) {
    4397             :         ++it;
    4398             :         continue;
    4399             :       }
    4400             :     }
    4401             : 
    4402     1106878 :     UsePosition* next_pos = range->NextRegisterPosition(current->Start());
    4403             :     // TODO(herhut): Be more clever here as long as we do not move split_pos
    4404             :     // out of deferred code.
    4405             :     LifetimePosition spill_pos = spill_mode == SpillMode::kSpillDeferred
    4406             :                                      ? split_pos
    4407     1106877 :                                      : FindOptimalSpillingPos(range, split_pos);
    4408     1106876 :     if (next_pos == nullptr) {
    4409             :       SpillAfter(range, spill_pos, spill_mode);
    4410             :     } else {
    4411             :       // When spilling between spill_pos and next_pos ensure that the range
    4412             :       // remains spilled at least until the start of the current live range.
    4413             :       // This guarantees that we will not introduce new unhandled ranges that
    4414             :       // start before the current range as this violates allocation invariants
    4415             :       // and will lead to an inconsistent state of active and inactive
    4416             :       // live-ranges: ranges are allocated in order of their start positions,
    4417             :       // ranges are retired from active/inactive when the start of the
    4418             :       // current live-range is larger than their end.
    4419             :       DCHECK(LifetimePosition::ExistsGapPositionBetween(current->Start(),
    4420             :                                                         next_pos->pos()));
    4421             :       SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos(),
    4422      904247 :                         spill_mode);
    4423             :     }
    4424     1106876 :     it = ActiveToHandled(it);
    4425             :   }
    4426             : 
    4427    13900027 :   for (auto it = inactive_live_ranges().begin();
    4428             :        it != inactive_live_ranges().end();) {
    4429    12793151 :     LiveRange* range = *it;
    4430             :     DCHECK(range->End() > current->Start());
    4431    12793151 :     if (range->TopLevel()->IsFixed()) {
    4432             :       ++it;
    4433             :       continue;
    4434             :     }
    4435             :     if (kSimpleFPAliasing || !check_fp_aliasing()) {
    4436      272369 :       if (range->assigned_register() != reg) {
    4437             :         ++it;
    4438             :         continue;
    4439             :       }
    4440             :     } else {
    4441             :       if (!data()->config()->AreAliases(current->representation(), reg,
    4442             :                                         range->representation(),
    4443             :                                         range->assigned_register())) {
    4444             :         ++it;
    4445             :         continue;
    4446             :       }
    4447             :     }
    4448             : 
    4449       22717 :     LifetimePosition next_intersection = range->FirstIntersection(current);
    4450       22717 :     if (next_intersection.IsValid()) {
    4451          98 :       UsePosition* next_pos = range->NextRegisterPosition(current->Start());
    4452          98 :       if (next_pos == nullptr) {
    4453             :         SpillAfter(range, split_pos, spill_mode);
    4454             :       } else {
    4455             :         next_intersection = Min(next_intersection, next_pos->pos());
    4456             :         SpillBetween(range, split_pos, next_intersection, spill_mode);
    4457             :       }
    4458          98 :       it = InactiveToHandled(it);
    4459             :     } else {
    4460             :       ++it;
    4461             :     }
    4462             :   }
    4463     1106876 : }
    4464             : 
    4465    26632871 : bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
    4466    26632871 :   if (!range->is_phi()) return false;
    4467             : 
    4468             :   DCHECK(!range->HasSpillOperand());
    4469             :   // Check how many operands belong to the same bundle as the output.
    4470             :   LiveRangeBundle* out_bundle = range->get_bundle();
    4471             :   RegisterAllocationData::PhiMapValue* phi_map_value =
    4472             :       data()->GetPhiMapValueFor(range);
    4473             :   const PhiInstruction* phi = phi_map_value->phi();
    4474             :   const InstructionBlock* block = phi_map_value->block();
    4475             :   // Count the number of spilled operands.
    4476             :   size_t spilled_count = 0;
    4477    11636469 :   for (size_t i = 0; i < phi->operands().size(); i++) {
    4478     4816350 :     int op = phi->operands()[i];
    4479     4816350 :     LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op);
    4480     4816352 :     if (!op_range->TopLevel()->HasSpillRange()) continue;
    4481             :     const InstructionBlock* pred =
    4482      281283 :         code()->InstructionBlockAt(block->predecessors()[i]);
    4483             :     LifetimePosition pred_end =
    4484             :         LifetimePosition::InstructionFromInstructionIndex(
    4485             :             pred->last_instruction_index());
    4486     1341258 :     while (op_range != nullptr && !op_range->CanCover(pred_end)) {
    4487             :       op_range = op_range->next();
    4488             :     }
    4489      551343 :     if (op_range != nullptr && op_range->spilled() &&
    4490             :         op_range->get_bundle() == out_bundle) {
    4491      144528 :       spilled_count++;
    4492             :     }
    4493             :   }
    4494             : 
    4495             :   // Only continue if more than half of the operands are spilled to the same
    4496             :   // slot (because part of same bundle).
    4497     2003767 :   if (spilled_count * 2 <= phi->operands().size()) {
    4498             :     return false;
    4499             :   }
    4500             : 
    4501             :   // If the range does not need register soon, spill it to the merged
    4502             :   // spill range.
    4503             :   LifetimePosition next_pos = range->Start();
    4504       13749 :   if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
    4505       13749 :   UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
    4506       13749 :   if (pos == nullptr) {
    4507        6797 :     Spill(range, SpillMode::kSpillAtDefinition);
    4508        6797 :     return true;
    4509        6952 :   } else if (pos->pos() > range->Start().NextStart()) {
    4510             :     SpillBetween(range, range->Start(), pos->pos(),
    4511             :                  SpillMode::kSpillAtDefinition);
    4512        5903 :     return true;
    4513             :   }
    4514             :   return false;
    4515             : }
    4516             : 
    4517           0 : void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos,
    4518             :                                      SpillMode spill_mode) {
    4519      202687 :   LiveRange* second_part = SplitRangeAt(range, pos);
    4520      202689 :   Spill(second_part, spill_mode);
    4521           0 : }
    4522             : 
    4523           0 : void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
    4524             :                                        LifetimePosition end,
    4525             :                                        SpillMode spill_mode) {
    4526     2828142 :   SpillBetweenUntil(range, start, start, end, spill_mode);
    4527           0 : }
    4528             : 
    4529     3732378 : void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
    4530             :                                             LifetimePosition start,
    4531             :                                             LifetimePosition until,
    4532             :                                             LifetimePosition end,
    4533             :                                             SpillMode spill_mode) {
    4534     3732378 :   CHECK(start < end);
    4535     3732378 :   LiveRange* second_part = SplitRangeAt(range, start);
    4536             : 
    4537     3732376 :   if (second_part->Start() < end) {
    4538             :     // The split result intersects with [start, end[.
    4539             :     // Split it at position between ]start+1, end[, spill the middle part
    4540             :     // and put the rest to unhandled.
    4541             : 
    4542             :     // Make sure that the third part always starts after the start of the
    4543             :     // second part, as that likely is the current position of the register
    4544             :     // allocator and we cannot add ranges to unhandled that start before
    4545             :     // the current position.
    4546             :     LifetimePosition split_start = Max(second_part->Start().End(), until);
    4547             : 
    4548             :     // If end is an actual use (which it typically is) we have to split
    4549             :     // so that there is a gap before so that we have space for moving the
    4550             :     // value into its position.
    4551             :     // However, if we have no choice, split right where asked.
    4552     3732336 :     LifetimePosition third_part_end = Max(split_start, end.PrevStart().End());
    4553             :     // Instead of spliting right after or even before the block boundary,
    4554             :     // split on the boumndary to avoid extra moves.
    4555     3732336 :     if (data()->IsBlockBoundary(end.Start())) {
    4556           0 :       third_part_end = Max(split_start, end.Start());
    4557             :     }
    4558             : 
    4559             :     LiveRange* third_part =
    4560     3732335 :         SplitBetween(second_part, split_start, third_part_end);
    4561             : 
    4562     3732340 :     AddToUnhandled(third_part);
    4563             :     // This can happen, even if we checked for start < end above, as we fiddle
    4564             :     // with the end location. However, we are guaranteed to be after or at
    4565             :     // until, so this is fine.
    4566     3732342 :     if (third_part != second_part) {
    4567     3732343 :       Spill(second_part, spill_mode);
    4568             :     }
    4569             :   } else {
    4570             :     // The split result does not intersect with [start, end[.
    4571             :     // Nothing to spill. Just put it to unhandled as whole.
    4572          40 :     AddToUnhandled(second_part);
    4573             :   }
    4574     3732383 : }
    4575             : 
    4576     2642028 : SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
    4577     2642028 :     : data_(data) {}
    4578             : 
    4579     2642320 : void SpillSlotLocator::LocateSpillSlots() {
    4580             :   const InstructionSequence* code = data()->code();
    4581             :   const size_t live_ranges_size = data()->live_ranges().size();
    4582    91530887 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    4583    88888709 :     CHECK_EQ(live_ranges_size,
    4584             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    4585    88888709 :     if (range == nullptr || range->IsEmpty()) continue;
    4586             :     // We care only about ranges which spill in the frame.
    4587    45443325 :     if (!range->HasSpillRange() ||
    4588             :         range->IsSpilledOnlyInDeferredBlocks(data())) {
    4589             :       continue;
    4590             :     }
    4591             :     TopLevelLiveRange::SpillMoveInsertionList* spills =
    4592             :         range->GetSpillMoveInsertionLocations(data());
    4593             :     DCHECK_NOT_NULL(spills);
    4594    13510338 :     for (; spills != nullptr; spills = spills->next) {
    4595     4504450 :       code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
    4596             :     }
    4597             :   }
    4598     2642178 : }
    4599             : 
    4600     7925547 : OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
    4601             : 
    4602     2640488 : void OperandAssigner::DecideSpillingMode() {
    4603     2640488 :   if (data()->is_turbo_control_flow_aware_allocation()) {
    4604           0 :     for (auto range : data()->live_ranges()) {
    4605             :       int max_blocks = data()->code()->InstructionBlockCount();
    4606           0 :       if (range != nullptr && range->IsSpilledOnlyInDeferredBlocks(data())) {
    4607             :         // If the range is spilled only in deferred blocks and starts in
    4608             :         // a non-deferred block, we transition its representation here so
    4609             :         // that the LiveRangeConnector processes them correctly. If,
    4610             :         // however, they start in a deferred block, we uograde them to
    4611             :         // spill at definition, as that definition is in a deferred block
    4612             :         // anyway. While this is an optimization, the code in LiveRangeConnector
    4613             :         // relies on it!
    4614           0 :         if (GetInstructionBlock(data()->code(), range->Start())->IsDeferred()) {
    4615           0 :           TRACE("Live range %d is spilled and alive in deferred code only\n",
    4616             :                 range->vreg());
    4617             :           range->TransitionRangeToSpillAtDefinition();
    4618             :         } else {
    4619           0 :           TRACE(
    4620             :               "Live range %d is spilled deferred code only but alive outside\n",
    4621             :               range->vreg());
    4622             :           DCHECK(data()->is_turbo_control_flow_aware_allocation());
    4623             :           range->TransitionRangeToDeferredSpill(data()->allocation_zone(),
    4624           0 :                                                 max_blocks);
    4625             :         }
    4626             :       }
    4627             :     }
    4628             :   }
    4629     2640488 : }
    4630             : 
    4631     2642417 : void OperandAssigner::AssignSpillSlots() {
    4632    91535568 :   for (auto range : data()->live_ranges()) {
    4633    88892727 :     if (range != nullptr && range->get_bundle() != nullptr) {
    4634     5599708 :       range->get_bundle()->MergeSpillRanges();
    4635             :     }
    4636             :   }
    4637             :   ZoneVector<SpillRange*>& spill_ranges = data()->spill_ranges();
    4638             :   // Merge disjoint spill ranges
    4639    91539295 :   for (size_t i = 0; i < spill_ranges.size(); ++i) {
    4640    44448375 :     SpillRange* range = spill_ranges[i];
    4641    44448375 :     if (range == nullptr) continue;
    4642     5576435 :     if (range->IsEmpty()) continue;
    4643  2313664780 :     for (size_t j = i + 1; j < spill_ranges.size(); ++j) {
    4644  1153125548 :       SpillRange* other = spill_ranges[j];
    4645  1153125548 :       if (other != nullptr && !other->IsEmpty()) {
    4646   259813435 :         range->TryMerge(other);
    4647             :       }
    4648             :     }
    4649             :   }
    4650             :   // Allocate slots for the merged spill ranges.
    4651    47085404 :   for (SpillRange* range : spill_ranges) {
    4652    44442711 :     if (range == nullptr || range->IsEmpty()) continue;
    4653             :     // Allocate a new operand referring to the spill slot.
    4654     3706094 :     if (!range->HasSlot()) {
    4655             :       int index = data()->frame()->AllocateSpillSlot(range->byte_width());
    4656             :       range->set_assigned_slot(index);
    4657             :     }
    4658             :   }
    4659     2642693 : }
    4660             : 
    4661     2654234 : void OperandAssigner::CommitAssignment() {
    4662             :   const size_t live_ranges_size = data()->live_ranges().size();
    4663    91543681 :   for (TopLevelLiveRange* top_range : data()->live_ranges()) {
    4664    88900870 :     CHECK_EQ(live_ranges_size,
    4665             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    4666   137926861 :     if (top_range == nullptr || top_range->IsEmpty()) continue;
    4667             :     InstructionOperand spill_operand;
    4668    39874879 :     if (top_range->HasSpillOperand()) {
    4669    15453378 :       spill_operand = *top_range->TopLevel()->GetSpillOperand();
    4670    24421501 :     } else if (top_range->TopLevel()->HasSpillRange()) {
    4671     5576333 :       spill_operand = top_range->TopLevel()->GetSpillRangeOperand();
    4672             :     }
    4673    39874879 :     if (top_range->is_phi()) {
    4674             :       data()->GetPhiMapValueFor(top_range)->CommitAssignment(
    4675     4176468 :           top_range->GetAssignedOperand());
    4676             :     }
    4677   221890979 :     for (LiveRange* range = top_range; range != nullptr;
    4678             :          range = range->next()) {
    4679    91014560 :       InstructionOperand assigned = range->GetAssignedOperand();
    4680             :       DCHECK(!assigned.IsUnallocated());
    4681    91013626 :       range->ConvertUsesToOperand(assigned, spill_operand);
    4682             :     }
    4683             : 
    4684    39868366 :     if (!spill_operand.IsInvalid()) {
    4685             :       // If this top level range has a child spilled in a deferred block, we use
    4686             :       // the range and control flow connection mechanism instead of spilling at
    4687             :       // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
    4688             :       // phases. Normally, when we spill at definition, we do not insert a
    4689             :       // connecting move when a successor child range is spilled - because the
    4690             :       // spilled range picks up its value from the slot which was assigned at
    4691             :       // definition. For ranges that are determined to spill only in deferred
    4692             :       // blocks, we let ConnectLiveRanges and ResolveControlFlow find the blocks
    4693             :       // where a spill operand is expected, and then finalize by inserting the
    4694             :       // spills in the deferred blocks dominators.
    4695    21029740 :       if (!top_range->IsSpilledOnlyInDeferredBlocks(data())) {
    4696             :         // Spill at definition if the range isn't spilled only in deferred
    4697             :         // blocks.
    4698    19955253 :         top_range->CommitSpillMoves(
    4699             :             data(), spill_operand,
    4700    57958801 :             top_range->has_slot_use() || top_range->spilled());
    4701             :       }
    4702             :     }
    4703             :   }
    4704     2642811 : }
    4705             : 
    4706     2642821 : ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
    4707     2642821 :     : data_(data) {}
    4708             : 
    4709           0 : bool ReferenceMapPopulator::SafePointsAreInOrder() const {
    4710             :   int safe_point = 0;
    4711           0 :   for (ReferenceMap* map : *data()->code()->reference_maps()) {
    4712           0 :     if (safe_point > map->instruction_position()) return false;
    4713             :     safe_point = map->instruction_position();
    4714             :   }
    4715           0 :   return true;
    4716             : }
    4717             : 
    4718     2641691 : void ReferenceMapPopulator::PopulateReferenceMaps() {
    4719             :   DCHECK(SafePointsAreInOrder());
    4720             :   // Map all delayed references.
    4721     2641691 :   for (RegisterAllocationData::DelayedReference& delayed_reference :
    4722             :        data()->delayed_references()) {
    4723           0 :     delayed_reference.map->RecordReference(
    4724           0 :         AllocatedOperand::cast(*delayed_reference.operand));
    4725             :   }
    4726             :   // Iterate over all safe point positions and record a pointer
    4727             :   // for all spilled live ranges at this point.
    4728             :   int last_range_start = 0;
    4729             :   const ReferenceMapDeque* reference_maps = data()->code()->reference_maps();
    4730             :   ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
    4731             :   const size_t live_ranges_size = data()->live_ranges().size();
    4732    91530993 :   for (TopLevelLiveRange* range : data()->live_ranges()) {
    4733    88888580 :     CHECK_EQ(live_ranges_size,
    4734             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    4735    88888580 :     if (range == nullptr) continue;
    4736             :     // Skip non-reference values.
    4737    41904485 :     if (!data()->code()->IsReference(range->vreg())) continue;
    4738             :     // Skip empty live ranges.
    4739    17738909 :     if (range->IsEmpty()) continue;
    4740    15736749 :     if (range->has_preassigned_slot()) continue;
    4741             : 
    4742             :     // Find the extent of the range and its children.
    4743             :     int start = range->Start().ToInstructionIndex();
    4744             :     int end = 0;
    4745    99569978 :     for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) {
    4746             :       LifetimePosition this_end = cur->End();
    4747    42194027 :       if (this_end.ToInstructionIndex() > end)
    4748             :         end = this_end.ToInstructionIndex();
    4749             :       DCHECK(cur->Start().ToInstructionIndex() >= start);
    4750             :     }
    4751             : 
    4752             :     // Most of the ranges are in order, but not all.  Keep an eye on when they
    4753             :     // step backwards and reset the first_it so we don't miss any safe points.
    4754    15181924 :     if (start < last_range_start) first_it = reference_maps->begin();
    4755             :     last_range_start = start;
    4756             : 
    4757             :     // Step across all the safe points that are before the start of this range,
    4758             :     // recording how far we step in order to save doing this for the next range.
    4759   697222581 :     for (; first_it != reference_maps->end(); ++first_it) {
    4760   696174468 :       ReferenceMap* map = *first_it;
    4761   696174468 :       if (map->instruction_position() >= start) break;
    4762             :     }
    4763             : 
    4764             :     InstructionOperand spill_operand;
    4765    21152669 :     if (((range->HasSpillOperand() &&
    4766    29303957 :           !range->GetSpillOperand()->IsConstant()) ||
    4767             :          range->HasSpillRange())) {
    4768     3903057 :       if (range->HasSpillOperand()) {
    4769     1059904 :         spill_operand = *range->GetSpillOperand();
    4770             :       } else {
    4771             :         spill_operand = range->GetSpillRangeOperand();
    4772             :       }
    4773             :       DCHECK(spill_operand.IsStackSlot());
    4774             :       DCHECK(CanBeTaggedOrCompressedPointer(
    4775             :           AllocatedOperand::cast(spill_operand).representation()));
    4776             :     }
    4777             : 
    4778             :     LiveRange* cur = range;
    4779             :     // Step through the safe points to see whether they are in the range.
    4780    62520383 :     for (auto it = first_it; it != reference_maps->end(); ++it) {
    4781    57650295 :       ReferenceMap* map = *it;
    4782             :       int safe_point = map->instruction_position();
    4783             : 
    4784             :       // The safe points are sorted so we can stop searching here.
    4785    57650295 :       if (safe_point - 1 > end) break;
    4786             : 
    4787             :       // Advance to the next active range that covers the current
    4788             :       // safe point position.
    4789             :       LifetimePosition safe_point_pos =
    4790             :           LifetimePosition::InstructionFromInstructionIndex(safe_point);
    4791             : 
    4792             :       // Search for the child range (cur) that covers safe_point_pos. If we
    4793             :       // don't find it before the children pass safe_point_pos, keep cur at
    4794             :       // the last child, because the next safe_point_pos may be covered by cur.
    4795             :       // This may happen if cur has more than one interval, and the current
    4796             :       // safe_point_pos is in between intervals.
    4797             :       // For that reason, cur may be at most the last child.
    4798             :       DCHECK_NOT_NULL(cur);
    4799             :       DCHECK(safe_point_pos >= cur->Start() || range == cur);
    4800             :       bool found = false;
    4801   104094670 :       while (!found) {
    4802    68643114 :         if (cur->Covers(safe_point_pos)) {
    4803             :           found = true;
    4804             :         } else {
    4805             :           LiveRange* next = cur->next();
    4806    33191545 :           if (next == nullptr || next->Start() > safe_point_pos) {
    4807             :             break;
    4808             :           }
    4809             :           cur = next;
    4810             :         }
    4811             :       }
    4812             : 
    4813    47338468 :       if (!found) {
    4814             :         continue;
    4815             :       }
    4816             : 
    4817             :       // Check if the live range is spilled and the safe point is after
    4818             :       // the spill position.
    4819             :       int spill_index = range->IsSpilledOnlyInDeferredBlocks(data())
    4820             :                             ? cur->Start().ToInstructionIndex()
    4821    35451666 :                             : range->spill_start_index();
    4822             : 
    4823    35451666 :       if (!spill_operand.IsInvalid() && safe_point >= spill_index) {
    4824    23158612 :         TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
    4825             :               range->vreg(), spill_index, safe_point);
    4826    23158612 :         map->RecordReference(AllocatedOperand::cast(spill_operand));
    4827             :       }
    4828             : 
    4829    35451657 :       if (!cur->spilled()) {
    4830           0 :         TRACE(
    4831             :             "Pointer in register for range %d:%d (start at %d) "
    4832             :             "at safe point %d\n",
    4833             :             range->vreg(), cur->relative_id(), cur->Start().value(),
    4834             :             safe_point);
    4835           0 :         InstructionOperand operand = cur->GetAssignedOperand();
    4836             :         DCHECK(!operand.IsStackSlot());
    4837             :         DCHECK(CanBeTaggedOrCompressedPointer(
    4838             :             AllocatedOperand::cast(operand).representation()));
    4839           0 :         map->RecordReference(AllocatedOperand::cast(operand));
    4840             :       }
    4841             :     }
    4842             :   }
    4843     2642413 : }
    4844             : 
    4845     5285406 : LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
    4846     5285406 :     : data_(data) {}
    4847             : 
    4848           0 : bool LiveRangeConnector::CanEagerlyResolveControlFlow(
    4849             :     const InstructionBlock* block) const {
    4850    21987122 :   if (block->PredecessorCount() != 1) return false;
    4851           0 :   return block->predecessors()[0].IsNext(block->rpo_number());
    4852             : }
    4853             : 
    4854     2642306 : void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
    4855             :   // Lazily linearize live ranges in memory for fast lookup.
    4856     2642306 :   LiveRangeFinder finder(data(), local_zone);
    4857             :   ZoneVector<BitVector*>& live_in_sets = data()->live_in_sets();
    4858    23118140 :   for (const InstructionBlock* block : code()->instruction_blocks()) {
    4859    28466398 :     if (CanEagerlyResolveControlFlow(block)) continue;
    4860    24973768 :     BitVector* live = live_in_sets[block->rpo_number().ToInt()];
    4861             :     BitVector::Iterator iterator(live);
    4862   174125061 :     while (!iterator.Done()) {
    4863             :       int vreg = iterator.Current();
    4864    80819581 :       LiveRangeBoundArray* array = finder.ArrayFor(vreg);
    4865   204545140 :       for (const RpoNumber& pred : block->predecessors()) {
    4866             :         FindResult result;
    4867   123726339 :         const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
    4868   123726698 :         if (!array->FindConnectableSubranges(block, pred_block, &result)) {
    4869   121083469 :           continue;
    4870             :         }
    4871     8203045 :         InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
    4872     8203016 :         InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
    4873     8203046 :         if (pred_op.Equals(cur_op)) continue;
    4874     5239453 :         if (!pred_op.IsAnyRegister() && cur_op.IsAnyRegister()) {
    4875             :           // We're doing a reload.
    4876             :           // We don't need to, if:
    4877             :           // 1) there's no register use in this block, and
    4878             :           // 2) the range ends before the block does, and
    4879             :           // 3) we don't have a successor, or the successor is spilled.
    4880             :           LifetimePosition block_start =
    4881             :               LifetimePosition::GapFromInstructionIndex(block->code_start());
    4882             :           LifetimePosition block_end =
    4883             :               LifetimePosition::GapFromInstructionIndex(block->code_end());
    4884     2489530 :           const LiveRange* current = result.cur_cover_;
    4885             :           // TODO(herhut): This is not the successor if we have control flow!
    4886             :           const LiveRange* successor = current->next();
    4887     4979060 :           if (current->End() < block_end &&
    4888      229350 :               (successor == nullptr || successor->spilled())) {
    4889             :             // verify point 1: no register use. We can go to the end of the
    4890             :             // range, since it's all within the block.
    4891             : 
    4892             :             bool uses_reg = false;
    4893         129 :             for (const UsePosition* use = current->NextUsePosition(block_start);
    4894      685687 :                  use != nullptr; use = use->next()) {
    4895      579050 :               if (use->operand()->IsAnyRegister()) {
    4896             :                 uses_reg = true;
    4897             :                 break;
    4898             :               }
    4899             :             }
    4900      685558 :             if (!uses_reg) continue;
    4901             :           }
    4902     2382917 :           if (current->TopLevel()->IsSpilledOnlyInDeferredBlocks(data()) &&
    4903             :               pred_block->IsDeferred()) {
    4904             :             // The spill location should be defined in pred_block, so add
    4905             :             // pred_block to the list of blocks requiring a spill operand.
    4906      291612 :             TRACE("Adding B%d to list of spill blocks for %d\n",
    4907             :                   pred_block->rpo_number().ToInt(),
    4908             :                   current->TopLevel()->vreg());
    4909             :             current->TopLevel()
    4910             :                 ->GetListOfBlocksRequiringSpillOperands(data())
    4911             :                 ->Add(pred_block->rpo_number().ToInt());
    4912             :           }
    4913             :         }
    4914             :         int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op);
    4915             :         USE(move_loc);
    4916             :         DCHECK_IMPLIES(
    4917             :             result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks(
    4918             :                 data()) &&
    4919             :                 !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()),
    4920             :             code()->GetInstructionBlock(move_loc)->IsDeferred());
    4921             :       }
    4922    80818801 :       iterator.Advance();
    4923             :     }
    4924             :   }
    4925             : 
    4926             :   // At this stage, we collected blocks needing a spill operand from
    4927             :   // ConnectRanges and from ResolveControlFlow. Time to commit the spills for
    4928             :   // deferred blocks.
    4929             :   const size_t live_ranges_size = data()->live_ranges().size();
    4930    91533812 :   for (TopLevelLiveRange* top : data()->live_ranges()) {
    4931    88891150 :     CHECK_EQ(live_ranges_size,
    4932             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    4933   128760311 :     if (top == nullptr || top->IsEmpty() ||
    4934             :         !top->IsSpilledOnlyInDeferredBlocks(data()))
    4935             :       continue;
    4936     1074680 :     CommitSpillsInDeferredBlocks(top, finder.ArrayFor(top->vreg()), local_zone);
    4937             :   }
    4938     2642662 : }
    4939             : 
    4940           0 : int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
    4941             :                                            const InstructionOperand& cur_op,
    4942             :                                            const InstructionBlock* pred,
    4943             :                                            const InstructionOperand& pred_op) {
    4944             :   DCHECK(!pred_op.Equals(cur_op));
    4945             :   int gap_index;
    4946             :   Instruction::GapPosition position;
    4947     2643309 :   if (block->PredecessorCount() == 1) {
    4948             :     gap_index = block->first_instruction_index();
    4949             :     position = Instruction::START;
    4950             :   } else {
    4951             :     DCHECK_EQ(1, pred->SuccessorCount());
    4952             :     DCHECK(!code()
    4953             :                 ->InstructionAt(pred->last_instruction_index())
    4954             :                 ->HasReferenceMap());
    4955             :     gap_index = pred->last_instruction_index();
    4956             :     position = Instruction::END;
    4957             :   }
    4958     2643309 :   data()->AddGapMove(gap_index, position, pred_op, cur_op);
    4959           0 :   return gap_index;
    4960             : }
    4961             : 
    4962     2642298 : void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
    4963             :   DelayedInsertionMap delayed_insertion_map(local_zone);
    4964             :   const size_t live_ranges_size = data()->live_ranges().size();
    4965    91527024 :   for (TopLevelLiveRange* top_range : data()->live_ranges()) {
    4966    88884689 :     CHECK_EQ(live_ranges_size,
    4967             :              data()->live_ranges().size());  // TODO(neis): crbug.com/831822
    4968    88884689 :     if (top_range == nullptr) continue;
    4969             :     bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks(data());
    4970             :     LiveRange* first_range = top_range;
    4971   144212706 :     for (LiveRange *second_range = first_range->next(); second_range != nullptr;
    4972             :          first_range = second_range, second_range = second_range->next()) {
    4973             :       LifetimePosition pos = second_range->Start();
    4974             :       // Add gap move if the two live ranges touch and there is no block
    4975             :       // boundary.
    4976    82508618 :       if (second_range->spilled()) continue;
    4977    21767490 :       if (first_range->End() != pos) continue;
    4978    22560120 :       if (data()->IsBlockBoundary(pos) &&
    4979             :           !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
    4980             :         continue;
    4981             :       }
    4982    20237425 :       InstructionOperand prev_operand = first_range->GetAssignedOperand();
    4983    20237614 :       InstructionOperand cur_operand = second_range->GetAssignedOperand();
    4984    20237676 :       if (prev_operand.Equals(cur_operand)) continue;
    4985             :       bool delay_insertion = false;
    4986             :       Instruction::GapPosition gap_pos;
    4987             :       int gap_index = pos.ToInstructionIndex();
    4988    22412534 :       if (connect_spilled && !prev_operand.IsAnyRegister() &&
    4989             :           cur_operand.IsAnyRegister()) {
    4990     1301267 :         const InstructionBlock* block = code()->GetInstructionBlock(gap_index);
    4991             :         DCHECK(block->IsDeferred());
    4992             :         // Performing a reload in this block, meaning the spill operand must
    4993             :         // be defined here.
    4994             :         top_range->GetListOfBlocksRequiringSpillOperands(data())->Add(
    4995             :             block->rpo_number().ToInt());
    4996             :       }
    4997             : 
    4998    19800998 :       if (pos.IsGapPosition()) {
    4999    19800803 :         gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
    5000             :       } else {
    5001         195 :         if (pos.IsStart()) {
    5002             :           delay_insertion = true;
    5003             :         } else {
    5004           0 :           gap_index++;
    5005             :         }
    5006         195 :         gap_pos = delay_insertion ? Instruction::END : Instruction::START;
    5007             :       }
    5008             :       // Reloads or spills for spilled in deferred blocks ranges must happen
    5009             :       // only in deferred blocks.
    5010             :       DCHECK_IMPLIES(connect_spilled && !(prev_operand.IsAnyRegister() &&
    5011             :                                           cur_operand.IsAnyRegister()),
    5012             :                      code()->GetInstructionBlock(gap_index)->IsDeferred());
    5013             : 
    5014             :       ParallelMove* move =
    5015             :           code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
    5016             :               gap_pos, code_zone());
    5017    19800308 :       if (!delay_insertion) {
    5018             :         move->AddMove(prev_operand, cur_operand);
    5019             :       } else {
    5020             :         delayed_insertion_map.insert(
    5021         195 :             std::make_pair(std::make_pair(move, prev_operand), cur_operand));
    5022             :       }
    5023             :     }
    5024             :   }
    5025     2642335 :   if (delayed_insertion_map.empty()) return;
    5026             :   // Insert all the moves which should occur after the stored move.
    5027             :   ZoneVector<MoveOperands*> to_insert(local_zone);
    5028             :   ZoneVector<MoveOperands*> to_eliminate(local_zone);
    5029          73 :   to_insert.reserve(4);
    5030          73 :   to_eliminate.reserve(4);
    5031          73 :   ParallelMove* moves = delayed_insertion_map.begin()->first.first;
    5032             :   for (auto it = delayed_insertion_map.begin();; ++it) {
    5033             :     bool done = it == delayed_insertion_map.end();
    5034         268 :     if (done || it->first.first != moves) {
    5035             :       // Commit the MoveOperands for current ParallelMove.
    5036         195 :       for (MoveOperands* move : to_eliminate) {
    5037             :         move->Eliminate();
    5038             :       }
    5039         390 :       for (MoveOperands* move : to_insert) {
    5040         195 :         moves->push_back(move);
    5041             :       }
    5042         195 :       if (done) break;
    5043             :       // Reset state.
    5044             :       to_eliminate.clear();
    5045             :       to_insert.clear();
    5046         122 :       moves = it->first.first;
    5047             :     }
    5048             :     // Gather all MoveOperands for a single ParallelMove.
    5049             :     MoveOperands* move =
    5050         195 :         new (code_zone()) MoveOperands(it->first.second, it->second);
    5051         195 :     moves->PrepareInsertAfter(move, &to_eliminate);
    5052         195 :     to_insert.push_back(move);
    5053             :   }
    5054             : }
    5055             : 
    5056     1074632 : void LiveRangeConnector::CommitSpillsInDeferredBlocks(
    5057             :     TopLevelLiveRange* range, LiveRangeBoundArray* array, Zone* temp_zone) {
    5058             :   DCHECK(range->IsSpilledOnlyInDeferredBlocks(data()));
    5059             :   DCHECK(!range->spilled());
    5060             : 
    5061             :   InstructionSequence* code = data()->code();
    5062     1074632 :   InstructionOperand spill_operand = range->GetSpillRangeOperand();
    5063             : 
    5064     1074632 :   TRACE("Live Range %d will be spilled only in deferred blocks.\n",
    5065             :         range->vreg());
    5066             :   // If we have ranges that aren't spilled but require the operand on the stack,
    5067             :   // make sure we insert the spill.
    5068     9549256 :   for (const LiveRange* child = range; child != nullptr;
    5069             :        child = child->next()) {
    5070    13871081 :     for (const UsePosition* pos = child->first_pos(); pos != nullptr;
    5071             :          pos = pos->next()) {
    5072     9362422 :       if (pos->type() != UsePositionType::kRequiresSlot && !child->spilled())
    5073             :         continue;
    5074      535179 :       range->AddBlockRequiringSpillOperand(
    5075             :           code->GetInstructionBlock(pos->pos().ToInstructionIndex())
    5076             :               ->rpo_number(),
    5077             :           data());
    5078             :     }
    5079             :   }
    5080             : 
    5081     1074670 :   ZoneQueue<int> worklist(temp_zone);
    5082             : 
    5083     4833981 :   for (BitVector::Iterator iterator(
    5084             :            range->GetListOfBlocksRequiringSpillOperands(data()));
    5085     1879651 :        !iterator.Done(); iterator.Advance()) {
    5086     3759353 :     worklist.push(iterator.Current());
    5087             :   }
    5088             : 
    5089             :   ZoneSet<std::pair<RpoNumber, int>> done_moves(temp_zone);
    5090             :   // Seek the deferred blocks that dominate locations requiring spill operands,
    5091             :   // and spill there. We only need to spill at the start of such blocks.
    5092             :   BitVector done_blocks(
    5093             :       range->GetListOfBlocksRequiringSpillOperands(data())->length(),
    5094     1074611 :       temp_zone);
    5095     7428983 :   while (!worklist.empty()) {
    5096     6354377 :     int block_id = worklist.front();
    5097             :     worklist.pop();
    5098     6354336 :     if (done_blocks.Contains(block_id)) continue;
    5099             :     done_blocks.Add(block_id);
    5100             :     InstructionBlock* spill_block =
    5101     5034931 :         code->InstructionBlockAt(RpoNumber::FromInt(block_id));
    5102             : 
    5103    11201407 :     for (const RpoNumber& pred : spill_block->predecessors()) {
    5104     6166487 :       const InstructionBlock* pred_block = code->InstructionBlockAt(pred);
    5105             : 
    5106     6166488 :       if (pred_block->IsDeferred()) {
    5107     8949770 :         worklist.push(pred_block->rpo_number().ToInt());
    5108             :       } else {
    5109             :         LifetimePosition pred_end =
    5110             :             LifetimePosition::InstructionFromInstructionIndex(
    5111             :                 pred_block->last_instruction_index());
    5112             : 
    5113             :         LiveRangeBound* bound = array->Find(pred_end);
    5114             : 
    5115     1691608 :         InstructionOperand pred_op = bound->range_->GetAssignedOperand();
    5116             : 
    5117             :         RpoNumber spill_block_number = spill_block->rpo_number();
    5118     3383173 :         if (done_moves.find(std::make_pair(
    5119             :                 spill_block_number, range->vreg())) == done_moves.end()) {
    5120     1691537 :           TRACE("Spilling deferred spill for range %d at B%d\n", range->vreg(),
    5121             :                 spill_block_number.ToInt());
    5122             :           data()->AddGapMove(spill_block->first_instruction_index(),
    5123             :                              Instruction::GapPosition::START, pred_op,
    5124     1691537 :                              spill_operand);
    5125     3383168 :           done_moves.insert(std::make_pair(spill_block_number, range->vreg()));
    5126             :           spill_block->mark_needs_frame();
    5127             :         }
    5128             :       }
    5129             :     }
    5130             :   }
    5131     1074678 : }
    5132             : 
    5133             : #undef TRACE
    5134             : 
    5135             : }  // namespace compiler
    5136             : }  // namespace internal
    5137      121996 : }  // namespace v8

Generated by: LCOV version 1.10