LCOV - code coverage report
Current view: top level - src/compiler/backend - register-allocator.h (source / functions) Hit Total Coverage
Test: app.info Lines: 95 113 84.1 %
Date: 2019-04-17 Functions: 5 6 83.3 %

          Line data    Source code
       1             : // Copyright 2014 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #ifndef V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
       6             : #define V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
       7             : 
       8             : #include "src/base/bits.h"
       9             : #include "src/base/compiler-specific.h"
      10             : #include "src/compiler/backend/instruction.h"
      11             : #include "src/flags.h"
      12             : #include "src/globals.h"
      13             : #include "src/ostreams.h"
      14             : #include "src/register-configuration.h"
      15             : #include "src/zone/zone-containers.h"
      16             : 
      17             : namespace v8 {
      18             : namespace internal {
      19             : namespace compiler {
      20             : 
      21             : static const int32_t kUnassignedRegister = RegisterConfiguration::kMaxRegisters;
      22             : 
      23             : enum RegisterKind { GENERAL_REGISTERS, FP_REGISTERS };
      24             : 
      25             : // This class represents a single point of a InstructionOperand's lifetime. For
      26             : // each instruction there are four lifetime positions:
      27             : //
      28             : //   [[START, END], [START, END]]
      29             : //
      30             : // Where the first half position corresponds to
      31             : //
      32             : //  [GapPosition::START, GapPosition::END]
      33             : //
      34             : // and the second half position corresponds to
      35             : //
      36             : //  [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
      37             : //
      38             : class LifetimePosition final {
      39             :  public:
      40             :   // Return the lifetime position that corresponds to the beginning of
      41             :   // the gap with the given index.
      42             :   static LifetimePosition GapFromInstructionIndex(int index) {
      43   244113218 :     return LifetimePosition(index * kStep);
      44             :   }
      45             :   // Return the lifetime position that corresponds to the beginning of
      46             :   // the instruction with the given index.
      47             :   static LifetimePosition InstructionFromInstructionIndex(int index) {
      48   265326790 :     return LifetimePosition(index * kStep + kHalfStep);
      49             :   }
      50             : 
      51             :   static bool ExistsGapPositionBetween(LifetimePosition pos1,
      52             :                                        LifetimePosition pos2) {
      53     2822197 :     if (pos1 > pos2) std::swap(pos1, pos2);
      54     2822197 :     LifetimePosition next(pos1.value_ + 1);
      55     2822197 :     if (next.IsGapPosition()) return next < pos2;
      56             :     return next.NextFullStart() < pos2;
      57             :   }
      58             : 
      59             :   // Returns a numeric representation of this lifetime position.
      60           0 :   int value() const { return value_; }
      61             : 
      62             :   // Returns the index of the instruction to which this lifetime position
      63             :   // corresponds.
      64             :   int ToInstructionIndex() const {
      65             :     DCHECK(IsValid());
      66   621812369 :     return value_ / kStep;
      67             :   }
      68             : 
      69             :   // Returns true if this lifetime position corresponds to a START value
      70    42981859 :   bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
      71             :   // Returns true if this lifetime position corresponds to an END value
      72             :   bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
      73             :   // Returns true if this lifetime position corresponds to a gap START value
      74    24781756 :   bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
      75             : 
      76   186894345 :   bool IsGapPosition() const { return (value_ & 0x2) == 0; }
      77             :   bool IsInstructionPosition() const { return !IsGapPosition(); }
      78             : 
      79             :   // Returns the lifetime position for the current START.
      80             :   LifetimePosition Start() const {
      81             :     DCHECK(IsValid());
      82   373568069 :     return LifetimePosition(value_ & ~(kHalfStep - 1));
      83             :   }
      84             : 
      85             :   // Returns the lifetime position for the current gap START.
      86             :   LifetimePosition FullStart() const {
      87             :     DCHECK(IsValid());
      88    39900981 :     return LifetimePosition(value_ & ~(kStep - 1));
      89             :   }
      90             : 
      91             :   // Returns the lifetime position for the current END.
      92             :   LifetimePosition End() const {
      93             :     DCHECK(IsValid());
      94   235284960 :     return LifetimePosition(Start().value_ + kHalfStep / 2);
      95             :   }
      96             : 
      97             :   // Returns the lifetime position for the beginning of the next START.
      98             :   LifetimePosition NextStart() const {
      99             :     DCHECK(IsValid());
     100    54088567 :     return LifetimePosition(Start().value_ + kHalfStep);
     101             :   }
     102             : 
     103             :   // Returns the lifetime position for the beginning of the next gap START.
     104             :   LifetimePosition NextFullStart() const {
     105             :     DCHECK(IsValid());
     106    40132620 :     return LifetimePosition(FullStart().value_ + kStep);
     107             :   }
     108             : 
     109             :   // Returns the lifetime position for the beginning of the previous START.
     110             :   LifetimePosition PrevStart() const {
     111             :     DCHECK(IsValid());
     112             :     DCHECK_LE(kHalfStep, value_);
     113    73063044 :     return LifetimePosition(Start().value_ - kHalfStep);
     114             :   }
     115             : 
     116             :   // Constructs the lifetime position which does not correspond to any
     117             :   // instruction.
     118  1830784944 :   LifetimePosition() : value_(-1) {}
     119             : 
     120             :   // Returns true if this lifetime positions corrensponds to some
     121             :   // instruction.
     122  1060449327 :   bool IsValid() const { return value_ != -1; }
     123             : 
     124             :   bool operator<(const LifetimePosition& that) const {
     125  3384916202 :     return this->value_ < that.value_;
     126             :   }
     127             : 
     128             :   bool operator<=(const LifetimePosition& that) const {
     129  1232686352 :     return this->value_ <= that.value_;
     130             :   }
     131             : 
     132             :   bool operator==(const LifetimePosition& that) const {
     133    26270174 :     return this->value_ == that.value_;
     134             :   }
     135             : 
     136             :   bool operator!=(const LifetimePosition& that) const {
     137             :     return this->value_ != that.value_;
     138             :   }
     139             : 
     140             :   bool operator>(const LifetimePosition& that) const {
     141    11497285 :     return this->value_ > that.value_;
     142             :   }
     143             : 
     144             :   bool operator>=(const LifetimePosition& that) const {
     145   145318689 :     return this->value_ >= that.value_;
     146             :   }
     147             : 
     148             :   void Print() const;
     149             : 
     150             :   static inline LifetimePosition Invalid() { return LifetimePosition(); }
     151             : 
     152             :   static inline LifetimePosition MaxPosition() {
     153             :     // We have to use this kind of getter instead of static member due to
     154             :     // crash bug in GDB.
     155             :     return LifetimePosition(kMaxInt);
     156             :   }
     157             : 
     158             :   static inline LifetimePosition FromInt(int value) {
     159             :     return LifetimePosition(value);
     160             :   }
     161             : 
     162             :  private:
     163             :   static const int kHalfStep = 2;
     164             :   static const int kStep = 2 * kHalfStep;
     165             : 
     166             :   static_assert(base::bits::IsPowerOfTwo(kHalfStep),
     167             :                 "Code relies on kStep and kHalfStep being a power of two");
     168             : 
     169             :   explicit LifetimePosition(int value) : value_(value) {}
     170             : 
     171             :   int value_;
     172             : };
     173             : 
     174             : std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
     175             : 
     176             : enum class RegisterAllocationFlag : unsigned {
     177             :   kTurboControlFlowAwareAllocation = 1 << 0,
     178             :   kTurboPreprocessRanges = 1 << 1
     179             : };
     180             : 
     181             : using RegisterAllocationFlags = base::Flags<RegisterAllocationFlag>;
     182             : 
     183             : class SpillRange;
     184             : class LiveRange;
     185             : class TopLevelLiveRange;
     186             : 
     187             : class RegisterAllocationData final : public ZoneObject {
     188             :  public:
     189             :   // Encodes whether a spill happens in deferred code (kSpillDeferred) or
     190             :   // regular code (kSpillAtDefinition).
     191             :   enum SpillMode { kSpillAtDefinition, kSpillDeferred };
     192             : 
     193             :   bool is_turbo_control_flow_aware_allocation() const {
     194             :     return flags_ & RegisterAllocationFlag::kTurboControlFlowAwareAllocation;
     195             :   }
     196             : 
     197             :   bool is_turbo_preprocess_ranges() const {
     198           0 :     return flags_ & RegisterAllocationFlag::kTurboPreprocessRanges;
     199             :   }
     200             : 
     201             :   static constexpr int kNumberOfFixedRangesPerRegister = 2;
     202             : 
     203             :   class PhiMapValue : public ZoneObject {
     204             :    public:
     205             :     PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
     206             : 
     207             :     const PhiInstruction* phi() const { return phi_; }
     208             :     const InstructionBlock* block() const { return block_; }
     209             : 
     210             :     // For hinting.
     211             :     int assigned_register() const { return assigned_register_; }
     212             :     void set_assigned_register(int register_code) {
     213             :       DCHECK_EQ(assigned_register_, kUnassignedRegister);
     214     1979237 :       assigned_register_ = register_code;
     215             :     }
     216             :     void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
     217             : 
     218             :     void AddOperand(InstructionOperand* operand);
     219             :     void CommitAssignment(const InstructionOperand& operand);
     220             : 
     221             :    private:
     222             :     PhiInstruction* const phi_;
     223             :     const InstructionBlock* const block_;
     224             :     ZoneVector<InstructionOperand*> incoming_operands_;
     225             :     int assigned_register_;
     226             :   };
     227             :   using PhiMap = ZoneMap<int, PhiMapValue*>;
     228             : 
     229             :   struct DelayedReference {
     230             :     ReferenceMap* map;
     231             :     InstructionOperand* operand;
     232             :   };
     233             :   using DelayedReferences = ZoneVector<DelayedReference>;
     234             :   using RangesWithPreassignedSlots =
     235             :       ZoneVector<std::pair<TopLevelLiveRange*, int>>;
     236             : 
     237             :   RegisterAllocationData(const RegisterConfiguration* config,
     238             :                          Zone* allocation_zone, Frame* frame,
     239             :                          InstructionSequence* code,
     240             :                          RegisterAllocationFlags flags,
     241             :                          const char* debug_name = nullptr);
     242             : 
     243             :   const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
     244             :     return live_ranges_;
     245             :   }
     246             :   ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
     247             :   const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
     248             :     return fixed_live_ranges_;
     249             :   }
     250             :   ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
     251             :     return fixed_live_ranges_;
     252             :   }
     253             :   ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() {
     254             :     return fixed_float_live_ranges_;
     255             :   }
     256             :   const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() const {
     257             :     return fixed_float_live_ranges_;
     258             :   }
     259             :   ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
     260             :     return fixed_double_live_ranges_;
     261             :   }
     262             :   const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
     263             :     return fixed_double_live_ranges_;
     264             :   }
     265             :   ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() {
     266             :     return fixed_simd128_live_ranges_;
     267             :   }
     268             :   const ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() const {
     269             :     return fixed_simd128_live_ranges_;
     270             :   }
     271             :   ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
     272             :   ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
     273             :   ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
     274             :   DelayedReferences& delayed_references() { return delayed_references_; }
     275             :   InstructionSequence* code() const { return code_; }
     276             :   // This zone is for data structures only needed during register allocation
     277             :   // phases.
     278             :   Zone* allocation_zone() const { return allocation_zone_; }
     279             :   // This zone is for InstructionOperands and moves that live beyond register
     280             :   // allocation.
     281             :   Zone* code_zone() const { return code()->zone(); }
     282             :   Frame* frame() const { return frame_; }
     283             :   const char* debug_name() const { return debug_name_; }
     284             :   const RegisterConfiguration* config() const { return config_; }
     285             : 
     286             :   MachineRepresentation RepresentationFor(int virtual_register);
     287             : 
     288             :   TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
     289             :   // Creates a new live range.
     290             :   TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
     291             :   TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
     292             : 
     293             :   SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range,
     294             :                                           SpillMode spill_mode);
     295             :   SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
     296             : 
     297             :   MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
     298             :                            const InstructionOperand& from,
     299             :                            const InstructionOperand& to);
     300             : 
     301             :   bool ExistsUseWithoutDefinition();
     302             :   bool RangesDefinedInDeferredStayInDeferred();
     303             : 
     304             :   void MarkFixedUse(MachineRepresentation rep, int index);
     305             :   bool HasFixedUse(MachineRepresentation rep, int index);
     306             : 
     307             :   void MarkAllocated(MachineRepresentation rep, int index);
     308             : 
     309             :   PhiMapValue* InitializePhiMap(const InstructionBlock* block,
     310             :                                 PhiInstruction* phi);
     311             :   PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
     312             :   PhiMapValue* GetPhiMapValueFor(int virtual_register);
     313             :   bool IsBlockBoundary(LifetimePosition pos) const;
     314             : 
     315             :   RangesWithPreassignedSlots& preassigned_slot_ranges() {
     316             :     return preassigned_slot_ranges_;
     317             :   }
     318             : 
     319             :   void RememberSpillState(RpoNumber block,
     320             :                           const ZoneVector<LiveRange*>& state) {
     321             :     spill_state_[block.ToSize()] = state;
     322             :   }
     323             : 
     324             :   ZoneVector<LiveRange*>& GetSpillState(RpoNumber block) {
     325             :     auto& result = spill_state_[block.ToSize()];
     326             :     return result;
     327             :   }
     328             : 
     329             :   void ResetSpillState() { spill_state_.clear(); }
     330             : 
     331             :  private:
     332             :   int GetNextLiveRangeId();
     333             : 
     334             :   Zone* const allocation_zone_;
     335             :   Frame* const frame_;
     336             :   InstructionSequence* const code_;
     337             :   const char* const debug_name_;
     338             :   const RegisterConfiguration* const config_;
     339             :   PhiMap phi_map_;
     340             :   ZoneVector<BitVector*> live_in_sets_;
     341             :   ZoneVector<BitVector*> live_out_sets_;
     342             :   ZoneVector<TopLevelLiveRange*> live_ranges_;
     343             :   ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
     344             :   ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_;
     345             :   ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
     346             :   ZoneVector<TopLevelLiveRange*> fixed_simd128_live_ranges_;
     347             :   ZoneVector<SpillRange*> spill_ranges_;
     348             :   DelayedReferences delayed_references_;
     349             :   BitVector* assigned_registers_;
     350             :   BitVector* assigned_double_registers_;
     351             :   BitVector* fixed_register_use_;
     352             :   BitVector* fixed_fp_register_use_;
     353             :   int virtual_register_count_;
     354             :   RangesWithPreassignedSlots preassigned_slot_ranges_;
     355             :   ZoneVector<ZoneVector<LiveRange*>> spill_state_;
     356             :   RegisterAllocationFlags flags_;
     357             : 
     358             :   DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
     359             : };
     360             : 
     361             : // Representation of the non-empty interval [start,end[.
     362             : class UseInterval final : public ZoneObject {
     363             :  public:
     364             :   UseInterval(LifetimePosition start, LifetimePosition end)
     365   284559559 :       : start_(start), end_(end), next_(nullptr) {
     366             :     DCHECK(start < end);
     367             :   }
     368             : 
     369             :   LifetimePosition start() const { return start_; }
     370   244864548 :   void set_start(LifetimePosition start) { start_ = start; }
     371             :   LifetimePosition end() const { return end_; }
     372    61522363 :   void set_end(LifetimePosition end) { end_ = end; }
     373             :   UseInterval* next() const { return next_; }
     374   191641904 :   void set_next(UseInterval* next) { next_ = next; }
     375             : 
     376             :   // Split this interval at the given position without effecting the
     377             :   // live range that owns it. The interval must contain the position.
     378             :   UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
     379             : 
     380             :   // If this interval intersects with other return smallest position
     381             :   // that belongs to both of them.
     382   873086389 :   LifetimePosition Intersect(const UseInterval* other) const {
     383   873086389 :     if (other->start() < start_) return other->Intersect(this);
     384   643827253 :     if (other->start() < end_) return other->start();
     385             :     return LifetimePosition::Invalid();
     386             :   }
     387             : 
     388             :   bool Contains(LifetimePosition point) const {
     389   839924817 :     return start_ <= point && point < end_;
     390             :   }
     391             : 
     392             :   // Returns the index of the first gap covered by this interval.
     393             :   int FirstGapIndex() const {
     394             :     int ret = start_.ToInstructionIndex();
     395    82306641 :     if (start_.IsInstructionPosition()) {
     396    50446069 :       ++ret;
     397             :     }
     398             :     return ret;
     399             :   }
     400             : 
     401             :   // Returns the index of the last gap covered by this interval.
     402             :   int LastGapIndex() const {
     403             :     int ret = end_.ToInstructionIndex();
     404    65621177 :     if (end_.IsGapPosition() && end_.IsStart()) {
     405     8886746 :       --ret;
     406             :     }
     407             :     return ret;
     408             :   }
     409             : 
     410             :  private:
     411             :   LifetimePosition start_;
     412             :   LifetimePosition end_;
     413             :   UseInterval* next_;
     414             : 
     415             :   DISALLOW_COPY_AND_ASSIGN(UseInterval);
     416             : };
     417             : 
     418             : enum class UsePositionType : uint8_t {
     419             :   kRegisterOrSlot,
     420             :   kRegisterOrSlotOrConstant,
     421             :   kRequiresRegister,
     422             :   kRequiresSlot
     423             : };
     424             : 
     425             : enum class UsePositionHintType : uint8_t {
     426             :   kNone,
     427             :   kOperand,
     428             :   kUsePos,
     429             :   kPhi,
     430             :   kUnresolved
     431             : };
     432             : 
     433             : // Representation of a use position.
     434             : class V8_EXPORT_PRIVATE UsePosition final
     435             :     : public NON_EXPORTED_BASE(ZoneObject) {
     436             :  public:
     437             :   UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
     438             :               UsePositionHintType hint_type);
     439             : 
     440             :   InstructionOperand* operand() const { return operand_; }
     441             :   bool HasOperand() const { return operand_ != nullptr; }
     442             : 
     443             :   bool RegisterIsBeneficial() const {
     444             :     return RegisterBeneficialField::decode(flags_);
     445             :   }
     446             :   UsePositionType type() const { return TypeField::decode(flags_); }
     447             :   void set_type(UsePositionType type, bool register_beneficial);
     448             : 
     449             :   LifetimePosition pos() const { return pos_; }
     450             : 
     451             :   UsePosition* next() const { return next_; }
     452   156909369 :   void set_next(UsePosition* next) { next_ = next; }
     453             : 
     454             :   // For hinting only.
     455             :   void set_assigned_register(int register_code) {
     456    91923287 :     flags_ = AssignedRegisterField::update(flags_, register_code);
     457             :   }
     458             : 
     459             :   UsePositionHintType hint_type() const {
     460             :     return HintTypeField::decode(flags_);
     461             :   }
     462             :   bool HasHint() const;
     463             :   bool HintRegister(int* register_code) const;
     464             :   void SetHint(UsePosition* use_pos);
     465             :   void ResolveHint(UsePosition* use_pos);
     466             :   bool IsResolved() const {
     467             :     return hint_type() != UsePositionHintType::kUnresolved;
     468             :   }
     469             :   static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
     470             : 
     471             :  private:
     472             :   using TypeField = BitField<UsePositionType, 0, 2>;
     473             :   using HintTypeField = BitField<UsePositionHintType, 2, 3>;
     474             :   using RegisterBeneficialField = BitField<bool, 5, 1>;
     475             :   using AssignedRegisterField = BitField<int32_t, 6, 6>;
     476             : 
     477             :   InstructionOperand* const operand_;
     478             :   void* hint_;
     479             :   UsePosition* next_;
     480             :   LifetimePosition const pos_;
     481             :   uint32_t flags_;
     482             : 
     483             :   DISALLOW_COPY_AND_ASSIGN(UsePosition);
     484             : };
     485             : 
     486             : class SpillRange;
     487             : class RegisterAllocationData;
     488             : class TopLevelLiveRange;
     489             : class LiveRangeBundle;
     490             : 
     491             : // Representation of SSA values' live ranges as a collection of (continuous)
     492             : // intervals over the instruction ordering.
     493             : class V8_EXPORT_PRIVATE LiveRange : public NON_EXPORTED_BASE(ZoneObject) {
     494             :  public:
     495             :   UseInterval* first_interval() const { return first_interval_; }
     496             :   UsePosition* first_pos() const { return first_pos_; }
     497             :   TopLevelLiveRange* TopLevel() { return top_level_; }
     498             :   const TopLevelLiveRange* TopLevel() const { return top_level_; }
     499             : 
     500             :   bool IsTopLevel() const;
     501             : 
     502             :   LiveRange* next() const { return next_; }
     503             : 
     504             :   int relative_id() const { return relative_id_; }
     505             : 
     506             :   bool IsEmpty() const { return first_interval() == nullptr; }
     507             : 
     508             :   InstructionOperand GetAssignedOperand() const;
     509             : 
     510             :   MachineRepresentation representation() const {
     511             :     return RepresentationField::decode(bits_);
     512             :   }
     513             : 
     514             :   int assigned_register() const { return AssignedRegisterField::decode(bits_); }
     515             :   bool HasRegisterAssigned() const {
     516             :     return assigned_register() != kUnassignedRegister;
     517             :   }
     518             :   void set_assigned_register(int reg);
     519             :   void UnsetAssignedRegister();
     520             : 
     521             :   bool ShouldRecombine() const { return RecombineField::decode(bits_); }
     522             : 
     523           0 :   void SetRecombine() { bits_ = RecombineField::update(bits_, true); }
     524             :   void set_controlflow_hint(int reg) {
     525           0 :     bits_ = ControlFlowRegisterHint::update(bits_, reg);
     526             :   }
     527             :   int controlflow_hint() const {
     528   134512133 :     return ControlFlowRegisterHint::decode(bits_);
     529             :   }
     530             :   bool RegisterFromControlFlow(int* reg) {
     531             :     int hint = controlflow_hint();
     532    84430229 :     if (hint != kUnassignedRegister) {
     533           0 :       *reg = hint;
     534             :       return true;
     535             :     }
     536             :     return false;
     537             :   }
     538             :   bool spilled() const { return SpilledField::decode(bits_); }
     539             :   void AttachToNext();
     540             :   void Unspill();
     541             :   void Spill();
     542             : 
     543             :   RegisterKind kind() const;
     544             : 
     545             :   // Returns use position in this live range that follows both start
     546             :   // and last processed use position.
     547             :   UsePosition* NextUsePosition(LifetimePosition start) const;
     548             : 
     549             :   // Returns use position for which register is required in this live
     550             :   // range and which follows both start and last processed use position
     551             :   UsePosition* NextRegisterPosition(LifetimePosition start) const;
     552             : 
     553             :   // Returns the first use position requiring stack slot, or nullptr.
     554             :   UsePosition* NextSlotPosition(LifetimePosition start) const;
     555             : 
     556             :   // Returns use position for which register is beneficial in this live
     557             :   // range and which follows both start and last processed use position
     558             :   UsePosition* NextUsePositionRegisterIsBeneficial(
     559             :       LifetimePosition start) const;
     560             : 
     561             :   // Returns lifetime position for which register is beneficial in this live
     562             :   // range and which follows both start and last processed use position.
     563             :   LifetimePosition NextLifetimePositionRegisterIsBeneficial(
     564             :       const LifetimePosition& start) const;
     565             : 
     566             :   // Returns use position for which register is beneficial in this live
     567             :   // range and which precedes start.
     568             :   UsePosition* PreviousUsePositionRegisterIsBeneficial(
     569             :       LifetimePosition start) const;
     570             : 
     571             :   // Can this live range be spilled at this position.
     572             :   bool CanBeSpilled(LifetimePosition pos) const;
     573             : 
     574             :   // Splitting primitive used by both splitting and splintering members.
     575             :   // Performs the split, but does not link the resulting ranges.
     576             :   // The given position must follow the start of the range.
     577             :   // All uses following the given position will be moved from this
     578             :   // live range to the result live range.
     579             :   // The current range will terminate at position, while result will start from
     580             :   // position.
     581             :   enum HintConnectionOption : bool {
     582             :     DoNotConnectHints = false,
     583             :     ConnectHints = true
     584             :   };
     585             :   UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
     586             :                         Zone* zone, HintConnectionOption connect_hints);
     587             : 
     588             :   // Detaches at position, and then links the resulting ranges. Returns the
     589             :   // child, which starts at position.
     590             :   LiveRange* SplitAt(LifetimePosition position, Zone* zone);
     591             : 
     592             :   // Returns nullptr when no register is hinted, otherwise sets register_index.
     593             :   UsePosition* FirstHintPosition(int* register_index) const;
     594             :   UsePosition* FirstHintPosition() const {
     595             :     int register_index;
     596             :     return FirstHintPosition(&register_index);
     597             :   }
     598             : 
     599             :   UsePosition* current_hint_position() const {
     600             :     DCHECK(current_hint_position_ == FirstHintPosition());
     601             :     return current_hint_position_;
     602             :   }
     603             : 
     604             :   LifetimePosition Start() const {
     605             :     DCHECK(!IsEmpty());
     606             :     return first_interval()->start();
     607             :   }
     608             : 
     609             :   LifetimePosition End() const {
     610             :     DCHECK(!IsEmpty());
     611             :     return last_interval_->end();
     612             :   }
     613             : 
     614             :   bool ShouldBeAllocatedBefore(const LiveRange* other) const;
     615             :   bool CanCover(LifetimePosition position) const;
     616             :   bool Covers(LifetimePosition position) const;
     617             :   LifetimePosition NextStartAfter(LifetimePosition position) const;
     618             :   LifetimePosition NextEndAfter(LifetimePosition position) const;
     619             :   LifetimePosition FirstIntersection(LiveRange* other) const;
     620             : 
     621             :   void VerifyChildStructure() const {
     622             :     VerifyIntervals();
     623           0 :     VerifyPositions();
     624             :   }
     625             : 
     626             :   void ConvertUsesToOperand(const InstructionOperand& op,
     627             :                             const InstructionOperand& spill_op);
     628             :   void SetUseHints(int register_index);
     629             :   void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
     630             : 
     631             :   void Print(const RegisterConfiguration* config, bool with_children) const;
     632             :   void Print(bool with_children) const;
     633             : 
     634    51827131 :   void set_bundle(LiveRangeBundle* bundle) { bundle_ = bundle; }
     635             :   LiveRangeBundle* get_bundle() const { return bundle_; }
     636             :   bool RegisterFromBundle(int* hint) const;
     637             :   void UpdateBundleRegister(int reg) const;
     638             : 
     639             :  private:
     640             :   friend class TopLevelLiveRange;
     641             :   explicit LiveRange(int relative_id, MachineRepresentation rep,
     642             :                      TopLevelLiveRange* top_level);
     643             : 
     644             :   void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
     645             : 
     646    96896192 :   void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
     647             : 
     648             :   UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
     649             :   void AdvanceLastProcessedMarker(UseInterval* to_start_of,
     650             :                                   LifetimePosition but_not_past) const;
     651             : 
     652             :   void VerifyPositions() const;
     653             :   void VerifyIntervals() const;
     654             : 
     655             :   using SpilledField = BitField<bool, 0, 1>;
     656             :   // Bits (1,7[ are used by TopLevelLiveRange.
     657             :   using AssignedRegisterField = BitField<int32_t, 7, 6>;
     658             :   using RepresentationField = BitField<MachineRepresentation, 13, 8>;
     659             :   using RecombineField = BitField<bool, 21, 1>;
     660             :   using ControlFlowRegisterHint = BitField<uint8_t, 22, 6>;
     661             :   // Bit 28 is used by TopLevelLiveRange.
     662             : 
     663             :   // Unique among children and splinters of the same virtual register.
     664             :   int relative_id_;
     665             :   uint32_t bits_;
     666             :   UseInterval* last_interval_;
     667             :   UseInterval* first_interval_;
     668             :   UsePosition* first_pos_;
     669             :   TopLevelLiveRange* top_level_;
     670             :   LiveRange* next_;
     671             :   // This is used as a cache, it doesn't affect correctness.
     672             :   mutable UseInterval* current_interval_;
     673             :   // This is used as a cache, it doesn't affect correctness.
     674             :   mutable UsePosition* last_processed_use_;
     675             :   // This is used as a cache, it's invalid outside of BuildLiveRanges.
     676             :   mutable UsePosition* current_hint_position_;
     677             :   // Cache the last position splintering stopped at.
     678             :   mutable UsePosition* splitting_pointer_;
     679             :   LiveRangeBundle* bundle_ = nullptr;
     680             : 
     681             :   DISALLOW_COPY_AND_ASSIGN(LiveRange);
     682             : };
     683             : 
     684             : struct LiveRangeOrdering {
     685             :   bool operator()(const LiveRange* left, const LiveRange* right) const {
     686             :     return left->Start() < right->Start();
     687             :   }
     688             : };
     689             : class LiveRangeBundle : public ZoneObject {
     690             :  public:
     691             :   void MergeSpillRanges();
     692             : 
     693             :   int id() { return id_; }
     694             : 
     695             :   int reg() { return reg_; }
     696             : 
     697             :   void set_reg(int reg) {
     698             :     DCHECK_EQ(reg_, kUnassignedRegister);
     699     1555944 :     reg_ = reg;
     700             :   }
     701             : 
     702             :  private:
     703             :   friend class BundleBuilder;
     704             : 
     705             :   class Range {
     706             :    public:
     707             :     Range(int s, int e) : start(s), end(e) {}
     708             :     Range(LifetimePosition s, LifetimePosition e)
     709     7600068 :         : start(s.value()), end(e.value()) {}
     710             :     int start;
     711             :     int end;
     712             :   };
     713             : 
     714             :   struct RangeOrdering {
     715             :     bool operator()(const Range left, const Range right) const {
     716    15247570 :       return left.start < right.start;
     717             :     }
     718             :   };
     719     5994935 :   bool UsesOverlap(UseInterval* interval) {
     720             :     auto use = uses_.begin();
     721    32803536 :     while (interval != nullptr && use != uses_.end()) {
     722    12717886 :       if (use->end <= interval->start().value()) {
     723             :         ++use;
     724     5947167 :       } else if (interval->end().value() <= use->start) {
     725             :         interval = interval->next();
     726             :       } else {
     727             :         return true;
     728             :       }
     729             :     }
     730             :     return false;
     731             :   }
     732     5737999 :   void InsertUses(UseInterval* interval) {
     733    20938115 :     while (interval != nullptr) {
     734     7600058 :       auto done = uses_.insert({interval->start(), interval->end()});
     735             :       USE(done);
     736             :       DCHECK_EQ(done.second, 1);
     737             :       interval = interval->next();
     738             :     }
     739     5737989 :   }
     740             :   explicit LiveRangeBundle(Zone* zone, int id)
     741     1586910 :       : ranges_(zone), uses_(zone), id_(id) {}
     742             : 
     743             :   bool TryAddRange(LiveRange* range);
     744             :   bool TryMerge(LiveRangeBundle* other);
     745             : 
     746             :   ZoneSet<LiveRange*, LiveRangeOrdering> ranges_;
     747             :   ZoneSet<Range, RangeOrdering> uses_;
     748             :   int id_;
     749             :   int reg_ = kUnassignedRegister;
     750             : };
     751             : 
     752             : class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange {
     753             :  public:
     754             :   explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
     755             :   int spill_start_index() const { return spill_start_index_; }
     756             : 
     757           0 :   bool IsFixed() const { return vreg_ < 0; }
     758             : 
     759             :   bool IsDeferredFixed() const { return DeferredFixedField::decode(bits_); }
     760           0 :   void set_deferred_fixed() { bits_ = DeferredFixedField::update(bits_, true); }
     761             :   bool is_phi() const { return IsPhiField::decode(bits_); }
     762     4176480 :   void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
     763             : 
     764             :   bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
     765             :   void set_is_non_loop_phi(bool value) {
     766     2088240 :     bits_ = IsNonLoopPhiField::update(bits_, value);
     767             :   }
     768             : 
     769             :   enum SlotUseKind { kNoSlotUse, kDeferredSlotUse, kGeneralSlotUse };
     770             : 
     771             :   bool has_slot_use() const {
     772   106665741 :     return slot_use_kind() > SlotUseKind::kNoSlotUse;
     773             :   }
     774             : 
     775             :   bool has_non_deferred_slot_use() const {
     776             :     return slot_use_kind() == SlotUseKind::kGeneralSlotUse;
     777             :   }
     778             : 
     779             :   void reset_slot_use() {
     780     6822342 :     bits_ = HasSlotUseField::update(bits_, SlotUseKind::kNoSlotUse);
     781             :   }
     782             :   void register_slot_use(SlotUseKind value) {
     783    22016307 :     bits_ = HasSlotUseField::update(bits_, Max(slot_use_kind(), value));
     784             :   }
     785             :   SlotUseKind slot_use_kind() const { return HasSlotUseField::decode(bits_); }
     786             : 
     787             :   // Add a new interval or a new use position to this live range.
     788             :   void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
     789             :   void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
     790             :   void AddUsePosition(UsePosition* pos);
     791             : 
     792             :   // Shorten the most recently added interval by setting a new start.
     793             :   void ShortenTo(LifetimePosition start);
     794             : 
     795             :   // Detaches between start and end, and attributes the resulting range to
     796             :   // result.
     797             :   // The current range is pointed to as "splintered_from". No parent/child
     798             :   // relationship is established between this and result.
     799             :   void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone);
     800             : 
     801             :   // Assuming other was splintered from this range, embeds other and its
     802             :   // children as part of the children sequence of this range.
     803             :   void Merge(TopLevelLiveRange* other, Zone* zone);
     804             : 
     805             :   // Spill range management.
     806             :   void SetSpillRange(SpillRange* spill_range);
     807             : 
     808             :   // Encodes whether a range is also available from a memory localtion:
     809             :   //   kNoSpillType: not availble in memory location.
     810             :   //   kSpillOperand: computed in a memory location at range start.
     811             :   //   kSpillRange: copied (spilled) to memory location at range start.
     812             :   //   kDeferredSpillRange: copied (spilled) to memory location at entry
     813             :   //                        to deferred blocks that have a use from memory.
     814             :   //
     815             :   // Ranges either start out at kSpillOperand, which is also their final
     816             :   // state, or kNoSpillType. When spilled only in deferred code, a range
     817             :   // ends up with kDeferredSpillRange, while when spilled in regular code,
     818             :   // a range will be tagged as kSpillRange.
     819             :   enum class SpillType {
     820             :     kNoSpillType,
     821             :     kSpillOperand,
     822             :     kSpillRange,
     823             :     kDeferredSpillRange
     824             :   };
     825             :   void set_spill_type(SpillType value) {
     826    49711463 :     bits_ = SpillTypeField::update(bits_, value);
     827             :   }
     828             :   SpillType spill_type() const { return SpillTypeField::decode(bits_); }
     829             :   InstructionOperand* GetSpillOperand() const {
     830             :     DCHECK_EQ(SpillType::kSpillOperand, spill_type());
     831             :     return spill_operand_;
     832             :   }
     833             : 
     834             :   SpillRange* GetAllocatedSpillRange() const {
     835             :     DCHECK_NE(SpillType::kSpillOperand, spill_type());
     836             :     return spill_range_;
     837             :   }
     838             : 
     839             :   SpillRange* GetSpillRange() const {
     840             :     DCHECK_GE(spill_type(), SpillType::kSpillRange);
     841             :     return spill_range_;
     842             :   }
     843             :   bool HasNoSpillType() const {
     844             :     return spill_type() == SpillType::kNoSpillType;
     845             :   }
     846             :   bool HasSpillOperand() const {
     847             :     return spill_type() == SpillType::kSpillOperand;
     848             :   }
     849             :   bool HasSpillRange() const { return spill_type() >= SpillType::kSpillRange; }
     850             :   bool HasGeneralSpillRange() const {
     851             :     return spill_type() == SpillType::kSpillRange;
     852             :   }
     853             :   AllocatedOperand GetSpillRangeOperand() const;
     854             : 
     855             :   void RecordSpillLocation(Zone* zone, int gap_index,
     856             :                            InstructionOperand* operand);
     857             :   void SetSpillOperand(InstructionOperand* operand);
     858             :   void SetSpillStartIndex(int start) {
     859    82757504 :     spill_start_index_ = Min(start, spill_start_index_);
     860             :   }
     861             : 
     862             :   void CommitSpillMoves(RegisterAllocationData* data,
     863             :                         const InstructionOperand& operand,
     864             :                         bool might_be_duplicated);
     865             : 
     866             :   // If all the children of this range are spilled in deferred blocks, and if
     867             :   // for any non-spilled child with a use position requiring a slot, that range
     868             :   // is contained in a deferred block, mark the range as
     869             :   // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
     870             :   // and instead let the LiveRangeConnector perform the spills within the
     871             :   // deferred blocks. If so, we insert here spills for non-spilled ranges
     872             :   // with slot use positions.
     873     1074571 :   void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
     874     1074571 :     spill_start_index_ = -1;
     875     1074571 :     spilled_in_deferred_blocks_ = true;
     876     1074571 :     spill_move_insertion_locations_ = nullptr;
     877             :     list_of_blocks_requiring_spill_operands_ =
     878     1074599 :         new (zone) BitVector(total_block_count, zone);
     879     1074587 :   }
     880             : 
     881             :   // Updates internal data structures to reflect that this range is not
     882             :   // spilled at definition but instead spilled in some blocks only.
     883           0 :   void TransitionRangeToDeferredSpill(Zone* zone, int total_block_count) {
     884           0 :     spill_start_index_ = -1;
     885           0 :     spill_move_insertion_locations_ = nullptr;
     886             :     list_of_blocks_requiring_spill_operands_ =
     887           0 :         new (zone) BitVector(total_block_count, zone);
     888           0 :   }
     889             : 
     890             :   // Promotes this range to spill at definition if it was marked for spilling
     891             :   // in deferred blocks before.
     892             :   void TransitionRangeToSpillAtDefinition() {
     893             :     DCHECK_NOT_NULL(spill_move_insertion_locations_);
     894           0 :     if (spill_type() == SpillType::kDeferredSpillRange) {
     895             :       set_spill_type(SpillType::kSpillRange);
     896             :     }
     897             :   }
     898             : 
     899             :   TopLevelLiveRange* splintered_from() const { return splintered_from_; }
     900             :   bool IsSplinter() const { return splintered_from_ != nullptr; }
     901             :   bool MayRequireSpillRange() const {
     902             :     DCHECK(!IsSplinter());
     903    15974314 :     return !HasSpillOperand() && spill_range_ == nullptr;
     904             :   }
     905             :   void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
     906             :   int vreg() const { return vreg_; }
     907             : 
     908             : #if DEBUG
     909             :   int debug_virt_reg() const;
     910             : #endif
     911             : 
     912             :   void Verify() const;
     913             :   void VerifyChildrenInOrder() const;
     914             :   LiveRange* GetChildCovers(LifetimePosition pos);
     915             :   int GetNextChildId() {
     916             :     return IsSplinter() ? splintered_from()->GetNextChildId()
     917    60725700 :                         : ++last_child_id_;
     918             :   }
     919             : 
     920     6917486 :   int GetMaxChildCount() const { return last_child_id_ + 1; }
     921             : 
     922             :   bool IsSpilledOnlyInDeferredBlocks(const RegisterAllocationData* data) const {
     923   146213011 :     if (data->is_turbo_control_flow_aware_allocation()) {
     924        1051 :       return spill_type() == SpillType::kDeferredSpillRange;
     925             :     }
     926   146211960 :     return spilled_in_deferred_blocks_;
     927             :   }
     928             : 
     929             :   struct SpillMoveInsertionList;
     930             : 
     931             :   SpillMoveInsertionList* GetSpillMoveInsertionLocations(
     932             :       const RegisterAllocationData* data) const {
     933             :     DCHECK(!IsSpilledOnlyInDeferredBlocks(data));
     934             :     return spill_move_insertion_locations_;
     935             :   }
     936             :   TopLevelLiveRange* splinter() const { return splinter_; }
     937     5663511 :   void SetSplinter(TopLevelLiveRange* splinter) {
     938             :     DCHECK_NULL(splinter_);
     939             :     DCHECK_NOT_NULL(splinter);
     940             : 
     941     5663511 :     splinter_ = splinter;
     942     5663511 :     splinter->relative_id_ = GetNextChildId();
     943             :     splinter->set_spill_type(spill_type());
     944     5663511 :     splinter->SetSplinteredFrom(this);
     945     5663448 :     if (bundle_ != nullptr) splinter->set_bundle(bundle_);
     946     5663448 :   }
     947             : 
     948      554416 :   void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
     949             :   bool has_preassigned_slot() const { return has_preassigned_slot_; }
     950             : 
     951             :   void AddBlockRequiringSpillOperand(RpoNumber block_id,
     952             :                                      const RegisterAllocationData* data) {
     953             :     DCHECK(IsSpilledOnlyInDeferredBlocks(data));
     954             :     GetListOfBlocksRequiringSpillOperands(data)->Add(block_id.ToInt());
     955             :   }
     956             : 
     957             :   BitVector* GetListOfBlocksRequiringSpillOperands(
     958             :       const RegisterAllocationData* data) const {
     959             :     DCHECK(IsSpilledOnlyInDeferredBlocks(data));
     960             :     return list_of_blocks_requiring_spill_operands_;
     961             :   }
     962             : 
     963             :  private:
     964             :   friend class LiveRange;
     965             :   void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
     966             : 
     967             :   using HasSlotUseField = BitField<SlotUseKind, 1, 2>;
     968             :   using IsPhiField = BitField<bool, 3, 1>;
     969             :   using IsNonLoopPhiField = BitField<bool, 4, 1>;
     970             :   using SpillTypeField = BitField<SpillType, 5, 2>;
     971             :   using DeferredFixedField = BitField<bool, 28, 1>;
     972             : 
     973             :   int vreg_;
     974             :   int last_child_id_;
     975             :   TopLevelLiveRange* splintered_from_;
     976             :   union {
     977             :     // Correct value determined by spill_type()
     978             :     InstructionOperand* spill_operand_;
     979             :     SpillRange* spill_range_;
     980             :   };
     981             : 
     982             :   union {
     983             :     SpillMoveInsertionList* spill_move_insertion_locations_;
     984             :     BitVector* list_of_blocks_requiring_spill_operands_;
     985             :   };
     986             : 
     987             :   // TODO(mtrofin): generalize spilling after definition, currently specialized
     988             :   // just for spill in a single deferred block.
     989             :   bool spilled_in_deferred_blocks_;
     990             :   int spill_start_index_;
     991             :   UsePosition* last_pos_;
     992             :   LiveRange* last_child_covers_;
     993             :   TopLevelLiveRange* splinter_;
     994             :   bool has_preassigned_slot_;
     995             : 
     996             :   DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
     997             : };
     998             : 
     999             : struct PrintableLiveRange {
    1000             :   const RegisterConfiguration* register_configuration_;
    1001             :   const LiveRange* range_;
    1002             : };
    1003             : 
    1004             : std::ostream& operator<<(std::ostream& os,
    1005             :                          const PrintableLiveRange& printable_range);
    1006             : 
    1007             : class SpillRange final : public ZoneObject {
    1008             :  public:
    1009             :   static const int kUnassignedSlot = -1;
    1010             :   SpillRange(TopLevelLiveRange* range, Zone* zone);
    1011             : 
    1012             :   UseInterval* interval() const { return use_interval_; }
    1013             : 
    1014             :   bool IsEmpty() const { return live_ranges_.empty(); }
    1015             :   bool TryMerge(SpillRange* other);
    1016             :   bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
    1017             : 
    1018             :   void set_assigned_slot(int index) {
    1019             :     DCHECK_EQ(kUnassignedSlot, assigned_slot_);
    1020     3706429 :     assigned_slot_ = index;
    1021             :   }
    1022             :   int assigned_slot() {
    1023             :     DCHECK_NE(kUnassignedSlot, assigned_slot_);
    1024             :     return assigned_slot_;
    1025             :   }
    1026             :   const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
    1027             :     return live_ranges_;
    1028             :   }
    1029             :   ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
    1030             :   // Spill slots can be 4, 8, or 16 bytes wide.
    1031             :   int byte_width() const { return byte_width_; }
    1032             :   void Print() const;
    1033             : 
    1034             :  private:
    1035             :   LifetimePosition End() const { return end_position_; }
    1036             :   bool IsIntersectingWith(SpillRange* other) const;
    1037             :   // Merge intervals, making sure the use intervals are sorted
    1038             :   void MergeDisjointIntervals(UseInterval* other);
    1039             : 
    1040             :   ZoneVector<TopLevelLiveRange*> live_ranges_;
    1041             :   UseInterval* use_interval_;
    1042             :   LifetimePosition end_position_;
    1043             :   int assigned_slot_;
    1044             :   int byte_width_;
    1045             : 
    1046             :   DISALLOW_COPY_AND_ASSIGN(SpillRange);
    1047             : };
    1048             : 
    1049             : class ConstraintBuilder final : public ZoneObject {
    1050             :  public:
    1051             :   explicit ConstraintBuilder(RegisterAllocationData* data);
    1052             : 
    1053             :   // Phase 1 : insert moves to account for fixed register operands.
    1054             :   void MeetRegisterConstraints();
    1055             : 
    1056             :   // Phase 2: deconstruct SSA by inserting moves in successors and the headers
    1057             :   // of blocks containing phis.
    1058             :   void ResolvePhis();
    1059             : 
    1060             :  private:
    1061   356214595 :   RegisterAllocationData* data() const { return data_; }
    1062             :   InstructionSequence* code() const { return data()->code(); }
    1063             :   Zone* allocation_zone() const { return data()->allocation_zone(); }
    1064             : 
    1065             :   InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
    1066             :                                     bool is_tagged, bool is_input);
    1067             :   void MeetRegisterConstraints(const InstructionBlock* block);
    1068             :   void MeetConstraintsBefore(int index);
    1069             :   void MeetConstraintsAfter(int index);
    1070             :   void MeetRegisterConstraintsForLastInstructionInBlock(
    1071             :       const InstructionBlock* block);
    1072             :   void ResolvePhis(const InstructionBlock* block);
    1073             : 
    1074             :   RegisterAllocationData* const data_;
    1075             : 
    1076             :   DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder);
    1077             : };
    1078             : 
    1079     2642380 : class LiveRangeBuilder final : public ZoneObject {
    1080             :  public:
    1081             :   explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone);
    1082             : 
    1083             :   // Phase 3: compute liveness of all virtual register.
    1084             :   void BuildLiveRanges();
    1085             :   static BitVector* ComputeLiveOut(const InstructionBlock* block,
    1086             :                                    RegisterAllocationData* data);
    1087             : 
    1088             :  private:
    1089             :   using SpillMode = RegisterAllocationData::SpillMode;
    1090             :   static constexpr int kNumberOfFixedRangesPerRegister =
    1091             :       RegisterAllocationData::kNumberOfFixedRangesPerRegister;
    1092             : 
    1093             :   RegisterAllocationData* data() const { return data_; }
    1094             :   InstructionSequence* code() const { return data()->code(); }
    1095             :   Zone* allocation_zone() const { return data()->allocation_zone(); }
    1096             :   Zone* code_zone() const { return code()->zone(); }
    1097             :   const RegisterConfiguration* config() const { return data()->config(); }
    1098             :   ZoneVector<BitVector*>& live_in_sets() const {
    1099             :     return data()->live_in_sets();
    1100             :   }
    1101             : 
    1102             :   // Verification.
    1103             :   void Verify() const;
    1104             :   bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
    1105             :   bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
    1106             :                                           const TopLevelLiveRange* range) const;
    1107             :   bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
    1108             : 
    1109             :   // Liveness analysis support.
    1110             :   void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
    1111             :   void ProcessInstructions(const InstructionBlock* block, BitVector* live);
    1112             :   void ProcessPhis(const InstructionBlock* block, BitVector* live);
    1113             :   void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
    1114             : 
    1115    24400047 :   static int FixedLiveRangeID(int index) { return -index - 1; }
    1116             :   int FixedFPLiveRangeID(int index, MachineRepresentation rep);
    1117             :   TopLevelLiveRange* FixedLiveRangeFor(int index, SpillMode spill_mode);
    1118             :   TopLevelLiveRange* FixedFPLiveRangeFor(int index, MachineRepresentation rep,
    1119             :                                          SpillMode spill_mode);
    1120             : 
    1121             :   void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
    1122             :   void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
    1123             : 
    1124             :   UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
    1125             :                               void* hint, UsePositionHintType hint_type);
    1126             :   UsePosition* NewUsePosition(LifetimePosition pos) {
    1127     2711188 :     return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
    1128             :   }
    1129             :   TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand,
    1130             :                                   SpillMode spill_mode);
    1131             :   // Helper methods for building intervals.
    1132             :   UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
    1133             :                       void* hint, UsePositionHintType hint_type,
    1134             :                       SpillMode spill_mode);
    1135             :   void Define(LifetimePosition position, InstructionOperand* operand,
    1136             :               SpillMode spill_mode) {
    1137    20797507 :     Define(position, operand, nullptr, UsePositionHintType::kNone, spill_mode);
    1138             :   }
    1139             :   UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
    1140             :                    InstructionOperand* operand, void* hint,
    1141             :                    UsePositionHintType hint_type, SpillMode spill_mode);
    1142             :   void Use(LifetimePosition block_start, LifetimePosition position,
    1143             :            InstructionOperand* operand, SpillMode spill_mode) {
    1144             :     Use(block_start, position, operand, nullptr, UsePositionHintType::kNone,
    1145    72442067 :         spill_mode);
    1146             :   }
    1147             :   SpillMode SpillModeForBlock(const InstructionBlock* block) const {
    1148    22560356 :     if (data()->is_turbo_control_flow_aware_allocation()) {
    1149             :       return block->IsDeferred() ? SpillMode::kSpillDeferred
    1150           0 :                                  : SpillMode::kSpillAtDefinition;
    1151             :     }
    1152             :     return SpillMode::kSpillAtDefinition;
    1153             :   }
    1154             :   RegisterAllocationData* const data_;
    1155             :   ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
    1156             : 
    1157             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
    1158             : };
    1159             : 
    1160             : class BundleBuilder final : public ZoneObject {
    1161             :  public:
    1162     2642647 :   explicit BundleBuilder(RegisterAllocationData* data) : data_(data) {}
    1163             : 
    1164             :   void BuildBundles();
    1165             : 
    1166             :  private:
    1167             :   RegisterAllocationData* data() const { return data_; }
    1168             :   InstructionSequence* code() const { return data_->code(); }
    1169             :   RegisterAllocationData* data_;
    1170             :   int next_bundle_id_ = 0;
    1171             : };
    1172             : 
    1173             : class RegisterAllocator : public ZoneObject {
    1174             :  public:
    1175             :   RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
    1176             : 
    1177             :  protected:
    1178             :   using SpillMode = RegisterAllocationData::SpillMode;
    1179             :   RegisterAllocationData* data() const { return data_; }
    1180             :   InstructionSequence* code() const { return data()->code(); }
    1181             :   RegisterKind mode() const { return mode_; }
    1182             :   int num_registers() const { return num_registers_; }
    1183             :   int num_allocatable_registers() const { return num_allocatable_registers_; }
    1184             :   const int* allocatable_register_codes() const {
    1185             :     return allocatable_register_codes_;
    1186             :   }
    1187             :   // Returns true iff. we must check float register aliasing.
    1188             :   bool check_fp_aliasing() const { return check_fp_aliasing_; }
    1189             : 
    1190             :   // TODO(mtrofin): explain why splitting in gap START is always OK.
    1191             :   LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
    1192             :                                                   int instruction_index);
    1193             : 
    1194             :   Zone* allocation_zone() const { return data()->allocation_zone(); }
    1195             : 
    1196             :   // Find the optimal split for ranges defined by a memory operand, e.g.
    1197             :   // constants or function parameters passed on the stack.
    1198             :   void SplitAndSpillRangesDefinedByMemoryOperand();
    1199             : 
    1200             :   // Split the given range at the given position.
    1201             :   // If range starts at or after the given position then the
    1202             :   // original range is returned.
    1203             :   // Otherwise returns the live range that starts at pos and contains
    1204             :   // all uses from the original range that follow pos. Uses at pos will
    1205             :   // still be owned by the original range after splitting.
    1206             :   LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
    1207             : 
    1208             :   bool CanProcessRange(LiveRange* range) const {
    1209   384842267 :     return range != nullptr && !range->IsEmpty() && range->kind() == mode();
    1210             :   }
    1211             : 
    1212             :   // Split the given range in a position from the interval [start, end].
    1213             :   LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
    1214             :                           LifetimePosition end);
    1215             : 
    1216             :   // Find a lifetime position in the interval [start, end] which
    1217             :   // is optimal for splitting: it is either header of the outermost
    1218             :   // loop covered by this interval or the latest possible position.
    1219             :   LifetimePosition FindOptimalSplitPos(LifetimePosition start,
    1220             :                                        LifetimePosition end);
    1221             : 
    1222             :   void Spill(LiveRange* range, SpillMode spill_mode);
    1223             : 
    1224             :   // If we are trying to spill a range inside the loop try to
    1225             :   // hoist spill position out to the point just before the loop.
    1226             :   LifetimePosition FindOptimalSpillingPos(LiveRange* range,
    1227             :                                           LifetimePosition pos);
    1228             : 
    1229             :   const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
    1230             :   const char* RegisterName(int allocation_index) const;
    1231             : 
    1232             :  private:
    1233             :   RegisterAllocationData* const data_;
    1234             :   const RegisterKind mode_;
    1235             :   const int num_registers_;
    1236             :   int num_allocatable_registers_;
    1237             :   const int* allocatable_register_codes_;
    1238             :   bool check_fp_aliasing_;
    1239             : 
    1240             :  private:
    1241             :   bool no_combining_;
    1242             : 
    1243             :   DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
    1244             : };
    1245             : 
    1246     2937646 : class LinearScanAllocator final : public RegisterAllocator {
    1247             :  public:
    1248             :   LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
    1249             :                       Zone* local_zone);
    1250             : 
    1251             :   // Phase 4: compute register assignments.
    1252             :   void AllocateRegisters();
    1253             : 
    1254             :  private:
    1255             :   struct RangeWithRegister {
    1256             :     TopLevelLiveRange* range;
    1257             :     int expected_register;
    1258             :     struct Hash {
    1259             :       size_t operator()(const RangeWithRegister item) const {
    1260           0 :         return item.range->vreg();
    1261             :       }
    1262             :     };
    1263             :     struct Equals {
    1264             :       bool operator()(const RangeWithRegister one,
    1265             :                       const RangeWithRegister two) const {
    1266             :         return one.range == two.range;
    1267             :       }
    1268             :     };
    1269             : 
    1270             :     explicit RangeWithRegister(LiveRange* a_range)
    1271             :         : range(a_range->TopLevel()),
    1272           0 :           expected_register(a_range->assigned_register()) {}
    1273             :     RangeWithRegister(TopLevelLiveRange* toplevel, int reg)
    1274           0 :         : range(toplevel), expected_register(reg) {}
    1275             :   };
    1276             : 
    1277             :   using RangeWithRegisterSet =
    1278             :       ZoneUnorderedSet<RangeWithRegister, RangeWithRegister::Hash,
    1279             :                        RangeWithRegister::Equals>;
    1280             : 
    1281             :   void MaybeUndoPreviousSplit(LiveRange* range);
    1282             :   void SpillNotLiveRanges(RangeWithRegisterSet& to_be_live,
    1283             :                           LifetimePosition position, SpillMode spill_mode);
    1284             :   LiveRange* AssignRegisterOnReload(LiveRange* range, int reg);
    1285             :   void ReloadLiveRanges(RangeWithRegisterSet& to_be_live,
    1286             :                         LifetimePosition position);
    1287             : 
    1288             :   void UpdateDeferredFixedRanges(SpillMode spill_mode, InstructionBlock* block);
    1289             :   bool BlockIsDeferredOrImmediatePredecessorIsNotDeferred(
    1290             :       const InstructionBlock* block);
    1291             :   bool HasNonDeferredPredecessor(InstructionBlock* block);
    1292             : 
    1293             :   struct LiveRangeOrdering {
    1294             :     bool operator()(const LiveRange* a, const LiveRange* b) const {
    1295   369109718 :       return a->ShouldBeAllocatedBefore(b);
    1296             :     }
    1297             :   };
    1298             :   using LiveRangeQueue = ZoneMultiset<LiveRange*, LiveRangeOrdering>;
    1299             :   LiveRangeQueue& unhandled_live_ranges() { return unhandled_live_ranges_; }
    1300             :   ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
    1301             :   ZoneVector<LiveRange*>& inactive_live_ranges() {
    1302             :     return inactive_live_ranges_;
    1303             :   }
    1304             : 
    1305             :   void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
    1306             : 
    1307             :   // Helper methods for updating the life range lists.
    1308             :   void AddToActive(LiveRange* range);
    1309             :   void AddToInactive(LiveRange* range);
    1310             :   void AddToUnhandled(LiveRange* range);
    1311             :   ZoneVector<LiveRange*>::iterator ActiveToHandled(
    1312             :       ZoneVector<LiveRange*>::iterator it);
    1313             :   ZoneVector<LiveRange*>::iterator ActiveToInactive(
    1314             :       ZoneVector<LiveRange*>::iterator it, LifetimePosition position);
    1315             :   ZoneVector<LiveRange*>::iterator InactiveToHandled(
    1316             :       ZoneVector<LiveRange*>::iterator it);
    1317             :   ZoneVector<LiveRange*>::iterator InactiveToActive(
    1318             :       ZoneVector<LiveRange*>::iterator it, LifetimePosition position);
    1319             : 
    1320             :   void ForwardStateTo(LifetimePosition position);
    1321             : 
    1322             :   int LastDeferredInstructionIndex(InstructionBlock* start);
    1323             : 
    1324             :   // Helper methods for choosing state after control flow events.
    1325             : 
    1326             :   bool ConsiderBlockForControlFlow(InstructionBlock* current_block,
    1327             :                                    RpoNumber predecessor);
    1328             :   RpoNumber ChooseOneOfTwoPredecessorStates(InstructionBlock* current_block,
    1329             :                                             LifetimePosition boundary);
    1330             :   void ComputeStateFromManyPredecessors(InstructionBlock* current_block,
    1331             :                                         RangeWithRegisterSet* to_be_live);
    1332             : 
    1333             :   // Helper methods for allocating registers.
    1334             :   bool TryReuseSpillForPhi(TopLevelLiveRange* range);
    1335             :   int PickRegisterThatIsAvailableLongest(
    1336             :       LiveRange* current, int hint_reg,
    1337             :       const Vector<LifetimePosition>& free_until_pos);
    1338             :   bool TryAllocateFreeReg(LiveRange* range,
    1339             :                           const Vector<LifetimePosition>& free_until_pos);
    1340             :   bool TryAllocatePreferredReg(LiveRange* range,
    1341             :                                const Vector<LifetimePosition>& free_until_pos);
    1342             :   void GetFPRegisterSet(MachineRepresentation rep, int* num_regs,
    1343             :                         int* num_codes, const int** codes) const;
    1344             :   void FindFreeRegistersForRange(LiveRange* range,
    1345             :                                  Vector<LifetimePosition> free_until_pos);
    1346             :   void ProcessCurrentRange(LiveRange* current, SpillMode spill_mode);
    1347             :   void AllocateBlockedReg(LiveRange* range, SpillMode spill_mode);
    1348             :   bool TrySplitAndSpillSplinter(LiveRange* range);
    1349             : 
    1350             :   // Spill the given life range after position pos.
    1351             :   void SpillAfter(LiveRange* range, LifetimePosition pos, SpillMode spill_mode);
    1352             : 
    1353             :   // Spill the given life range after position [start] and up to position [end].
    1354             :   void SpillBetween(LiveRange* range, LifetimePosition start,
    1355             :                     LifetimePosition end, SpillMode spill_mode);
    1356             : 
    1357             :   // Spill the given life range after position [start] and up to position [end].
    1358             :   // Range is guaranteed to be spilled at least until position [until].
    1359             :   void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
    1360             :                          LifetimePosition until, LifetimePosition end,
    1361             :                          SpillMode spill_mode);
    1362             :   void SplitAndSpillIntersecting(LiveRange* range, SpillMode spill_mode);
    1363             : 
    1364             :   void PrintRangeRow(std::ostream& os, const TopLevelLiveRange* toplevel);
    1365             : 
    1366             :   void PrintRangeOverview(std::ostream& os);
    1367             : 
    1368             :   LiveRangeQueue unhandled_live_ranges_;
    1369             :   ZoneVector<LiveRange*> active_live_ranges_;
    1370             :   ZoneVector<LiveRange*> inactive_live_ranges_;
    1371             : 
    1372             :   // Approximate at what position the set of ranges will change next.
    1373             :   // Used to avoid scanning for updates even if none are present.
    1374             :   LifetimePosition next_active_ranges_change_;
    1375             :   LifetimePosition next_inactive_ranges_change_;
    1376             : 
    1377             : #ifdef DEBUG
    1378             :   LifetimePosition allocation_finger_;
    1379             : #endif
    1380             : 
    1381             :   DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
    1382             : };
    1383             : 
    1384             : class SpillSlotLocator final : public ZoneObject {
    1385             :  public:
    1386             :   explicit SpillSlotLocator(RegisterAllocationData* data);
    1387             : 
    1388             :   void LocateSpillSlots();
    1389             : 
    1390             :  private:
    1391    91531029 :   RegisterAllocationData* data() const { return data_; }
    1392             : 
    1393             :   RegisterAllocationData* const data_;
    1394             : 
    1395             :   DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator);
    1396             : };
    1397             : 
    1398             : class OperandAssigner final : public ZoneObject {
    1399             :  public:
    1400             :   explicit OperandAssigner(RegisterAllocationData* data);
    1401             : 
    1402             :   // Phase 5: final decision on spilling mode.
    1403             :   void DecideSpillingMode();
    1404             : 
    1405             :   // Phase 6: assign spill splots.
    1406             :   void AssignSpillSlots();
    1407             : 
    1408             :   // Phase 7: commit assignment.
    1409             :   void CommitAssignment();
    1410             : 
    1411             :  private:
    1412   125750788 :   RegisterAllocationData* data() const { return data_; }
    1413             : 
    1414             :   RegisterAllocationData* const data_;
    1415             : 
    1416             :   DISALLOW_COPY_AND_ASSIGN(OperandAssigner);
    1417             : };
    1418             : 
    1419             : class ReferenceMapPopulator final : public ZoneObject {
    1420             :  public:
    1421             :   explicit ReferenceMapPopulator(RegisterAllocationData* data);
    1422             : 
    1423             :   // Phase 8: compute values for pointer maps.
    1424             :   void PopulateReferenceMaps();
    1425             : 
    1426             :  private:
    1427   129623628 :   RegisterAllocationData* data() const { return data_; }
    1428             : 
    1429             :   bool SafePointsAreInOrder() const;
    1430             : 
    1431             :   RegisterAllocationData* const data_;
    1432             : 
    1433             :   DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
    1434             : };
    1435             : 
    1436             : class LiveRangeBoundArray;
    1437             : // Insert moves of the form
    1438             : //
    1439             : //          Operand(child_(k+1)) = Operand(child_k)
    1440             : //
    1441             : // where child_k and child_(k+1) are consecutive children of a range (so
    1442             : // child_k->next() == child_(k+1)), and Operand(...) refers to the
    1443             : // assigned operand, be it a register or a slot.
    1444             : class LiveRangeConnector final : public ZoneObject {
    1445             :  public:
    1446             :   explicit LiveRangeConnector(RegisterAllocationData* data);
    1447             : 
    1448             :   // Phase 9: reconnect split ranges with moves, when the control flow
    1449             :   // between the ranges is trivial (no branches).
    1450             :   void ConnectRanges(Zone* local_zone);
    1451             : 
    1452             :   // Phase 10: insert moves to connect ranges across basic blocks, when the
    1453             :   // control flow between them cannot be trivially resolved, such as joining
    1454             :   // branches.
    1455             :   void ResolveControlFlow(Zone* local_zone);
    1456             : 
    1457             :  private:
    1458   363525513 :   RegisterAllocationData* data() const { return data_; }
    1459             :   InstructionSequence* code() const { return data()->code(); }
    1460             :   Zone* code_zone() const { return code()->zone(); }
    1461             : 
    1462             :   bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
    1463             : 
    1464             :   int ResolveControlFlow(const InstructionBlock* block,
    1465             :                          const InstructionOperand& cur_op,
    1466             :                          const InstructionBlock* pred,
    1467             :                          const InstructionOperand& pred_op);
    1468             : 
    1469             :   void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
    1470             :                                     LiveRangeBoundArray* array,
    1471             :                                     Zone* temp_zone);
    1472             : 
    1473             :   RegisterAllocationData* const data_;
    1474             : 
    1475             :   DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
    1476             : };
    1477             : 
    1478             : }  // namespace compiler
    1479             : }  // namespace internal
    1480             : }  // namespace v8
    1481             : 
    1482             : #endif  // V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_

Generated by: LCOV version 1.10