Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
6 : #define V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
7 :
8 : #include "src/base/bits.h"
9 : #include "src/base/compiler-specific.h"
10 : #include "src/compiler/backend/instruction.h"
11 : #include "src/globals.h"
12 : #include "src/ostreams.h"
13 : #include "src/register-configuration.h"
14 : #include "src/zone/zone-containers.h"
15 :
16 : namespace v8 {
17 : namespace internal {
18 : namespace compiler {
19 :
20 : enum RegisterKind { GENERAL_REGISTERS, FP_REGISTERS };
21 :
22 : // This class represents a single point of a InstructionOperand's lifetime. For
23 : // each instruction there are four lifetime positions:
24 : //
25 : // [[START, END], [START, END]]
26 : //
27 : // Where the first half position corresponds to
28 : //
29 : // [GapPosition::START, GapPosition::END]
30 : //
31 : // and the second half position corresponds to
32 : //
33 : // [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
34 : //
35 : class LifetimePosition final {
36 : public:
37 : // Return the lifetime position that corresponds to the beginning of
38 : // the gap with the given index.
39 : static LifetimePosition GapFromInstructionIndex(int index) {
40 232997957 : return LifetimePosition(index * kStep);
41 : }
42 : // Return the lifetime position that corresponds to the beginning of
43 : // the instruction with the given index.
44 : static LifetimePosition InstructionFromInstructionIndex(int index) {
45 244323296 : return LifetimePosition(index * kStep + kHalfStep);
46 : }
47 :
48 : static bool ExistsGapPositionBetween(LifetimePosition pos1,
49 : LifetimePosition pos2) {
50 2793291 : if (pos1 > pos2) std::swap(pos1, pos2);
51 2793291 : LifetimePosition next(pos1.value_ + 1);
52 2793291 : if (next.IsGapPosition()) return next < pos2;
53 : return next.NextFullStart() < pos2;
54 : }
55 :
56 : // Returns a numeric representation of this lifetime position.
57 0 : int value() const { return value_; }
58 :
59 : // Returns the index of the instruction to which this lifetime position
60 : // corresponds.
61 : int ToInstructionIndex() const {
62 : DCHECK(IsValid());
63 865414454 : return value_ / kStep;
64 : }
65 :
66 : // Returns true if this lifetime position corresponds to a START value
67 40366814 : bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
68 : // Returns true if this lifetime position corresponds to an END value
69 : bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
70 : // Returns true if this lifetime position corresponds to a gap START value
71 23387394 : bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
72 :
73 175224746 : bool IsGapPosition() const { return (value_ & 0x2) == 0; }
74 : bool IsInstructionPosition() const { return !IsGapPosition(); }
75 :
76 : // Returns the lifetime position for the current START.
77 : LifetimePosition Start() const {
78 : DCHECK(IsValid());
79 329183314 : return LifetimePosition(value_ & ~(kHalfStep - 1));
80 : }
81 :
82 : // Returns the lifetime position for the current gap START.
83 : LifetimePosition FullStart() const {
84 : DCHECK(IsValid());
85 31949247 : return LifetimePosition(value_ & ~(kStep - 1));
86 : }
87 :
88 : // Returns the lifetime position for the current END.
89 : LifetimePosition End() const {
90 : DCHECK(IsValid());
91 205603373 : return LifetimePosition(Start().value_ + kHalfStep / 2);
92 : }
93 :
94 : // Returns the lifetime position for the beginning of the next START.
95 : LifetimePosition NextStart() const {
96 : DCHECK(IsValid());
97 45496066 : return LifetimePosition(Start().value_ + kHalfStep);
98 : }
99 :
100 : // Returns the lifetime position for the beginning of the next gap START.
101 : LifetimePosition NextFullStart() const {
102 : DCHECK(IsValid());
103 32192650 : return LifetimePosition(FullStart().value_ + kStep);
104 : }
105 :
106 : // Returns the lifetime position for the beginning of the previous START.
107 : LifetimePosition PrevStart() const {
108 : DCHECK(IsValid());
109 : DCHECK_LE(kHalfStep, value_);
110 68428839 : return LifetimePosition(Start().value_ - kHalfStep);
111 : }
112 :
113 : // Constructs the lifetime position which does not correspond to any
114 : // instruction.
115 1685709696 : LifetimePosition() : value_(-1) {}
116 :
117 : // Returns true if this lifetime positions corrensponds to some
118 : // instruction.
119 892239619 : bool IsValid() const { return value_ != -1; }
120 :
121 : bool operator<(const LifetimePosition& that) const {
122 2936837560 : return this->value_ < that.value_;
123 : }
124 :
125 : bool operator<=(const LifetimePosition& that) const {
126 1108714882 : return this->value_ <= that.value_;
127 : }
128 :
129 : bool operator==(const LifetimePosition& that) const {
130 18051435 : return this->value_ == that.value_;
131 : }
132 :
133 : bool operator!=(const LifetimePosition& that) const {
134 : return this->value_ != that.value_;
135 : }
136 :
137 : bool operator>(const LifetimePosition& that) const {
138 44934031 : return this->value_ > that.value_;
139 : }
140 :
141 : bool operator>=(const LifetimePosition& that) const {
142 133014277 : return this->value_ >= that.value_;
143 : }
144 :
145 : void Print() const;
146 :
147 : static inline LifetimePosition Invalid() { return LifetimePosition(); }
148 :
149 : static inline LifetimePosition MaxPosition() {
150 : // We have to use this kind of getter instead of static member due to
151 : // crash bug in GDB.
152 : return LifetimePosition(kMaxInt);
153 : }
154 :
155 : static inline LifetimePosition FromInt(int value) {
156 : return LifetimePosition(value);
157 : }
158 :
159 : private:
160 : static const int kHalfStep = 2;
161 : static const int kStep = 2 * kHalfStep;
162 :
163 : static_assert(base::bits::IsPowerOfTwo(kHalfStep),
164 : "Code relies on kStep and kHalfStep being a power of two");
165 :
166 : explicit LifetimePosition(int value) : value_(value) {}
167 :
168 : int value_;
169 : };
170 :
171 : std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
172 :
173 : // Representation of the non-empty interval [start,end[.
174 : class UseInterval final : public ZoneObject {
175 : public:
176 : UseInterval(LifetimePosition start, LifetimePosition end)
177 254389303 : : start_(start), end_(end), next_(nullptr) {
178 : DCHECK(start < end);
179 : }
180 :
181 : LifetimePosition start() const { return start_; }
182 229139745 : void set_start(LifetimePosition start) { start_ = start; }
183 : LifetimePosition end() const { return end_; }
184 56585376 : void set_end(LifetimePosition end) { end_ = end; }
185 : UseInterval* next() const { return next_; }
186 162146271 : void set_next(UseInterval* next) { next_ = next; }
187 :
188 : // Split this interval at the given position without effecting the
189 : // live range that owns it. The interval must contain the position.
190 : UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
191 :
192 : // If this interval intersects with other return smallest position
193 : // that belongs to both of them.
194 712318702 : LifetimePosition Intersect(const UseInterval* other) const {
195 712318702 : if (other->start() < start_) return other->Intersect(this);
196 531381611 : if (other->start() < end_) return other->start();
197 : return LifetimePosition::Invalid();
198 : }
199 :
200 : bool Contains(LifetimePosition point) const {
201 757508392 : return start_ <= point && point < end_;
202 : }
203 :
204 : // Returns the index of the first gap covered by this interval.
205 : int FirstGapIndex() const {
206 : int ret = start_.ToInstructionIndex();
207 76304772 : if (start_.IsInstructionPosition()) {
208 45765922 : ++ret;
209 : }
210 : return ret;
211 : }
212 :
213 : // Returns the index of the last gap covered by this interval.
214 : int LastGapIndex() const {
215 : int ret = end_.ToInstructionIndex();
216 60449561 : if (end_.IsGapPosition() && end_.IsStart()) {
217 8364551 : --ret;
218 : }
219 : return ret;
220 : }
221 :
222 : private:
223 : LifetimePosition start_;
224 : LifetimePosition end_;
225 : UseInterval* next_;
226 :
227 : DISALLOW_COPY_AND_ASSIGN(UseInterval);
228 : };
229 :
230 : enum class UsePositionType : uint8_t {
231 : kRegisterOrSlot,
232 : kRegisterOrSlotOrConstant,
233 : kRequiresRegister,
234 : kRequiresSlot
235 : };
236 :
237 : enum class UsePositionHintType : uint8_t {
238 : kNone,
239 : kOperand,
240 : kUsePos,
241 : kPhi,
242 : kUnresolved
243 : };
244 :
245 : static const int32_t kUnassignedRegister = RegisterConfiguration::kMaxRegisters;
246 :
247 : // Representation of a use position.
248 : class V8_EXPORT_PRIVATE UsePosition final
249 : : public NON_EXPORTED_BASE(ZoneObject) {
250 : public:
251 : UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
252 : UsePositionHintType hint_type);
253 :
254 : InstructionOperand* operand() const { return operand_; }
255 : bool HasOperand() const { return operand_ != nullptr; }
256 :
257 : bool RegisterIsBeneficial() const {
258 : return RegisterBeneficialField::decode(flags_);
259 : }
260 : UsePositionType type() const { return TypeField::decode(flags_); }
261 : void set_type(UsePositionType type, bool register_beneficial);
262 :
263 : LifetimePosition pos() const { return pos_; }
264 :
265 : UsePosition* next() const { return next_; }
266 138615296 : void set_next(UsePosition* next) { next_ = next; }
267 :
268 : // For hinting only.
269 : void set_assigned_register(int register_code) {
270 80175024 : flags_ = AssignedRegisterField::update(flags_, register_code);
271 : }
272 :
273 : UsePositionHintType hint_type() const {
274 : return HintTypeField::decode(flags_);
275 : }
276 : bool HasHint() const;
277 : bool HintRegister(int* register_code) const;
278 : void SetHint(UsePosition* use_pos);
279 : void ResolveHint(UsePosition* use_pos);
280 0 : bool IsResolved() const {
281 : return hint_type() != UsePositionHintType::kUnresolved;
282 : }
283 : static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
284 :
285 : private:
286 : typedef BitField<UsePositionType, 0, 2> TypeField;
287 : typedef BitField<UsePositionHintType, 2, 3> HintTypeField;
288 : typedef BitField<bool, 5, 1> RegisterBeneficialField;
289 : typedef BitField<int32_t, 6, 6> AssignedRegisterField;
290 :
291 : InstructionOperand* const operand_;
292 : void* hint_;
293 : UsePosition* next_;
294 : LifetimePosition const pos_;
295 : uint32_t flags_;
296 :
297 : DISALLOW_COPY_AND_ASSIGN(UsePosition);
298 : };
299 :
300 : class SpillRange;
301 : class RegisterAllocationData;
302 : class TopLevelLiveRange;
303 : class LiveRangeBundle;
304 :
305 : // Representation of SSA values' live ranges as a collection of (continuous)
306 : // intervals over the instruction ordering.
307 : class V8_EXPORT_PRIVATE LiveRange : public NON_EXPORTED_BASE(ZoneObject) {
308 : public:
309 : UseInterval* first_interval() const { return first_interval_; }
310 : UsePosition* first_pos() const { return first_pos_; }
311 : TopLevelLiveRange* TopLevel() { return top_level_; }
312 : const TopLevelLiveRange* TopLevel() const { return top_level_; }
313 :
314 : bool IsTopLevel() const;
315 :
316 : LiveRange* next() const { return next_; }
317 :
318 : int relative_id() const { return relative_id_; }
319 :
320 757631260 : bool IsEmpty() const { return first_interval() == nullptr; }
321 :
322 : InstructionOperand GetAssignedOperand() const;
323 :
324 : MachineRepresentation representation() const {
325 : return RepresentationField::decode(bits_);
326 : }
327 :
328 : int assigned_register() const { return AssignedRegisterField::decode(bits_); }
329 182901631 : bool HasRegisterAssigned() const {
330 : return assigned_register() != kUnassignedRegister;
331 : }
332 : void set_assigned_register(int reg);
333 : void UnsetAssignedRegister();
334 :
335 : bool spilled() const { return SpilledField::decode(bits_); }
336 : void Spill();
337 :
338 : RegisterKind kind() const;
339 :
340 : // Returns use position in this live range that follows both start
341 : // and last processed use position.
342 : UsePosition* NextUsePosition(LifetimePosition start) const;
343 :
344 : // Returns use position for which register is required in this live
345 : // range and which follows both start and last processed use position
346 : UsePosition* NextRegisterPosition(LifetimePosition start) const;
347 :
348 : // Returns the first use position requiring stack slot, or nullptr.
349 : UsePosition* NextSlotPosition(LifetimePosition start) const;
350 :
351 : // Returns use position for which register is beneficial in this live
352 : // range and which follows both start and last processed use position
353 : UsePosition* NextUsePositionRegisterIsBeneficial(
354 : LifetimePosition start) const;
355 :
356 : // Returns lifetime position for which register is beneficial in this live
357 : // range and which follows both start and last processed use position.
358 : LifetimePosition NextLifetimePositionRegisterIsBeneficial(
359 : const LifetimePosition& start) const;
360 :
361 : // Returns use position for which register is beneficial in this live
362 : // range and which precedes start.
363 : UsePosition* PreviousUsePositionRegisterIsBeneficial(
364 : LifetimePosition start) const;
365 :
366 : // Can this live range be spilled at this position.
367 : bool CanBeSpilled(LifetimePosition pos) const;
368 :
369 : // Splitting primitive used by both splitting and splintering members.
370 : // Performs the split, but does not link the resulting ranges.
371 : // The given position must follow the start of the range.
372 : // All uses following the given position will be moved from this
373 : // live range to the result live range.
374 : // The current range will terminate at position, while result will start from
375 : // position.
376 : enum HintConnectionOption : bool {
377 : DoNotConnectHints = false,
378 : ConnectHints = true
379 : };
380 : UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
381 : Zone* zone, HintConnectionOption connect_hints);
382 :
383 : // Detaches at position, and then links the resulting ranges. Returns the
384 : // child, which starts at position.
385 : LiveRange* SplitAt(LifetimePosition position, Zone* zone);
386 :
387 : // Returns nullptr when no register is hinted, otherwise sets register_index.
388 : UsePosition* FirstHintPosition(int* register_index) const;
389 : UsePosition* FirstHintPosition() const {
390 : int register_index;
391 656866 : return FirstHintPosition(®ister_index);
392 : }
393 :
394 : UsePosition* current_hint_position() const {
395 : DCHECK(current_hint_position_ == FirstHintPosition());
396 : return current_hint_position_;
397 : }
398 :
399 1166819840 : LifetimePosition Start() const {
400 : DCHECK(!IsEmpty());
401 : return first_interval()->start();
402 : }
403 :
404 : LifetimePosition End() const {
405 : DCHECK(!IsEmpty());
406 : return last_interval_->end();
407 : }
408 :
409 : bool ShouldBeAllocatedBefore(const LiveRange* other) const;
410 : bool CanCover(LifetimePosition position) const;
411 : bool Covers(LifetimePosition position) const;
412 : LifetimePosition NextStartAfter(LifetimePosition position) const;
413 : LifetimePosition NextEndAfter(LifetimePosition position) const;
414 : LifetimePosition FirstIntersection(LiveRange* other) const;
415 :
416 : void VerifyChildStructure() const {
417 : VerifyIntervals();
418 0 : VerifyPositions();
419 : }
420 :
421 : void ConvertUsesToOperand(const InstructionOperand& op,
422 : const InstructionOperand& spill_op);
423 : void SetUseHints(int register_index);
424 : void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
425 :
426 : void Print(const RegisterConfiguration* config, bool with_children) const;
427 : void Print(bool with_children) const;
428 :
429 45788954 : void set_bundle(LiveRangeBundle* bundle) { bundle_ = bundle; }
430 : LiveRangeBundle* get_bundle() const { return bundle_; }
431 : bool RegisterFromBundle(int* hint) const;
432 : void UpdateBundleRegister(int reg) const;
433 :
434 : private:
435 : friend class TopLevelLiveRange;
436 : explicit LiveRange(int relative_id, MachineRepresentation rep,
437 : TopLevelLiveRange* top_level);
438 :
439 : void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
440 :
441 81521704 : void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
442 :
443 : UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
444 : void AdvanceLastProcessedMarker(UseInterval* to_start_of,
445 : LifetimePosition but_not_past) const;
446 :
447 : void VerifyPositions() const;
448 : void VerifyIntervals() const;
449 :
450 : typedef BitField<bool, 0, 1> SpilledField;
451 : typedef BitField<int32_t, 6, 6> AssignedRegisterField;
452 : typedef BitField<MachineRepresentation, 12, 8> RepresentationField;
453 :
454 : // Unique among children and splinters of the same virtual register.
455 : int relative_id_;
456 : uint32_t bits_;
457 : UseInterval* last_interval_;
458 : UseInterval* first_interval_;
459 : UsePosition* first_pos_;
460 : TopLevelLiveRange* top_level_;
461 : LiveRange* next_;
462 : // This is used as a cache, it doesn't affect correctness.
463 : mutable UseInterval* current_interval_;
464 : // This is used as a cache, it doesn't affect correctness.
465 : mutable UsePosition* last_processed_use_;
466 : // This is used as a cache, it's invalid outside of BuildLiveRanges.
467 : mutable UsePosition* current_hint_position_;
468 : // Cache the last position splintering stopped at.
469 : mutable UsePosition* splitting_pointer_;
470 : LiveRangeBundle* bundle_ = nullptr;
471 :
472 : DISALLOW_COPY_AND_ASSIGN(LiveRange);
473 : };
474 :
475 : struct LiveRangeOrdering {
476 : bool operator()(const LiveRange* left, const LiveRange* right) const {
477 : return left->Start() < right->Start();
478 : }
479 : };
480 : class LiveRangeBundle : public ZoneObject {
481 : public:
482 : void MergeSpillRanges();
483 :
484 : int id() { return id_; }
485 :
486 : int reg() { return reg_; }
487 :
488 : void set_reg(int reg) {
489 : DCHECK_EQ(reg_, kUnassignedRegister);
490 1617022 : reg_ = reg;
491 : }
492 :
493 : private:
494 : friend class BundleBuilder;
495 :
496 : class Range {
497 : public:
498 : Range(int s, int e) : start(s), end(e) {}
499 : Range(LifetimePosition s, LifetimePosition e)
500 7850460 : : start(s.value()), end(e.value()) {}
501 : int start;
502 : int end;
503 : };
504 :
505 : struct RangeOrdering {
506 : bool operator()(const Range left, const Range right) const {
507 15530292 : return left.start < right.start;
508 : }
509 : };
510 11880423 : bool UsesOverlap(UseInterval* interval) {
511 : auto use = uses_.begin();
512 37158532 : while (interval != nullptr && use != uses_.end()) {
513 11689264 : if (use->end <= interval->start().value()) {
514 : ++use;
515 6123997 : } else if (interval->end().value() <= use->start) {
516 : interval = interval->next();
517 : } else {
518 : return true;
519 : }
520 : }
521 : return false;
522 : }
523 13751826 : void InsertUses(UseInterval* interval) {
524 19653194 : while (interval != nullptr) {
525 7850458 : auto done = uses_.insert({interval->start(), interval->end()});
526 : USE(done);
527 : DCHECK_EQ(done.second, 1);
528 : interval = interval->next();
529 : }
530 5901366 : }
531 : explicit LiveRangeBundle(Zone* zone, int id)
532 1648366 : : ranges_(zone), uses_(zone), id_(id) {}
533 :
534 : bool TryAddRange(LiveRange* range);
535 : bool TryMerge(LiveRangeBundle* other);
536 :
537 : ZoneSet<LiveRange*, LiveRangeOrdering> ranges_;
538 : ZoneSet<Range, RangeOrdering> uses_;
539 : int id_;
540 : int reg_ = kUnassignedRegister;
541 : };
542 :
543 : class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange {
544 : public:
545 : explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
546 : int spill_start_index() const { return spill_start_index_; }
547 :
548 8153673 : bool IsFixed() const { return vreg_ < 0; }
549 :
550 : bool is_phi() const { return IsPhiField::decode(bits_); }
551 4339590 : void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
552 :
553 : bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
554 : void set_is_non_loop_phi(bool value) {
555 2169795 : bits_ = IsNonLoopPhiField::update(bits_, value);
556 : }
557 :
558 : bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
559 : void set_has_slot_use(bool value) {
560 38313047 : bits_ = HasSlotUseField::update(bits_, value);
561 : }
562 :
563 : // Add a new interval or a new use position to this live range.
564 : void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
565 : void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
566 : void AddUsePosition(UsePosition* pos);
567 :
568 : // Shorten the most recently added interval by setting a new start.
569 : void ShortenTo(LifetimePosition start);
570 :
571 : // Detaches between start and end, and attributes the resulting range to
572 : // result.
573 : // The current range is pointed to as "splintered_from". No parent/child
574 : // relationship is established between this and result.
575 : void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone);
576 :
577 : // Assuming other was splintered from this range, embeds other and its
578 : // children as part of the children sequence of this range.
579 : void Merge(TopLevelLiveRange* other, Zone* zone);
580 :
581 : // Spill range management.
582 : void SetSpillRange(SpillRange* spill_range);
583 : enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
584 : void set_spill_type(SpillType value) {
585 48175722 : bits_ = SpillTypeField::update(bits_, value);
586 : }
587 : SpillType spill_type() const { return SpillTypeField::decode(bits_); }
588 : InstructionOperand* GetSpillOperand() const {
589 : DCHECK_EQ(SpillType::kSpillOperand, spill_type());
590 : return spill_operand_;
591 : }
592 :
593 : SpillRange* GetAllocatedSpillRange() const {
594 : DCHECK_NE(SpillType::kSpillOperand, spill_type());
595 : return spill_range_;
596 : }
597 :
598 : SpillRange* GetSpillRange() const {
599 : DCHECK_EQ(SpillType::kSpillRange, spill_type());
600 : return spill_range_;
601 : }
602 70429581 : bool HasNoSpillType() const {
603 : return spill_type() == SpillType::kNoSpillType;
604 : }
605 168773627 : bool HasSpillOperand() const {
606 : return spill_type() == SpillType::kSpillOperand;
607 : }
608 111657882 : bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
609 :
610 : AllocatedOperand GetSpillRangeOperand() const;
611 :
612 : void RecordSpillLocation(Zone* zone, int gap_index,
613 : InstructionOperand* operand);
614 : void SetSpillOperand(InstructionOperand* operand);
615 : void SetSpillStartIndex(int start) {
616 76844338 : spill_start_index_ = Min(start, spill_start_index_);
617 : }
618 :
619 : void CommitSpillMoves(InstructionSequence* sequence,
620 : const InstructionOperand& operand,
621 : bool might_be_duplicated);
622 :
623 : // If all the children of this range are spilled in deferred blocks, and if
624 : // for any non-spilled child with a use position requiring a slot, that range
625 : // is contained in a deferred block, mark the range as
626 : // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
627 : // and instead let the LiveRangeConnector perform the spills within the
628 : // deferred blocks. If so, we insert here spills for non-spilled ranges
629 : // with slot use positions.
630 881028 : void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
631 881028 : spill_start_index_ = -1;
632 881028 : spilled_in_deferred_blocks_ = true;
633 881028 : spill_move_insertion_locations_ = nullptr;
634 : list_of_blocks_requiring_spill_operands_ =
635 881030 : new (zone) BitVector(total_block_count, zone);
636 881042 : }
637 :
638 : void CommitSpillInDeferredBlocks(RegisterAllocationData* data,
639 : const InstructionOperand& spill_operand,
640 : BitVector* necessary_spill_points);
641 :
642 : TopLevelLiveRange* splintered_from() const { return splintered_from_; }
643 : bool IsSplinter() const { return splintered_from_ != nullptr; }
644 : bool MayRequireSpillRange() const {
645 : DCHECK(!IsSplinter());
646 11390447 : return !HasSpillOperand() && spill_range_ == nullptr;
647 : }
648 : void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
649 : int vreg() const { return vreg_; }
650 :
651 : #if DEBUG
652 : int debug_virt_reg() const;
653 : #endif
654 :
655 : void Verify() const;
656 : void VerifyChildrenInOrder() const;
657 :
658 51395574 : int GetNextChildId() {
659 : return IsSplinter() ? splintered_from()->GetNextChildId()
660 51395574 : : ++last_child_id_;
661 : }
662 :
663 5792561 : int GetChildCount() const { return last_child_id_ + 1; }
664 :
665 : bool IsSpilledOnlyInDeferredBlocks() const {
666 : return spilled_in_deferred_blocks_;
667 : }
668 :
669 : struct SpillMoveInsertionList;
670 :
671 : SpillMoveInsertionList* GetSpillMoveInsertionLocations() const {
672 : DCHECK(!IsSpilledOnlyInDeferredBlocks());
673 : return spill_move_insertion_locations_;
674 : }
675 : TopLevelLiveRange* splinter() const { return splinter_; }
676 9176704 : void SetSplinter(TopLevelLiveRange* splinter) {
677 : DCHECK_NULL(splinter_);
678 : DCHECK_NOT_NULL(splinter);
679 :
680 4588352 : splinter_ = splinter;
681 4588352 : splinter->relative_id_ = GetNextChildId();
682 : splinter->set_spill_type(spill_type());
683 4588352 : splinter->SetSplinteredFrom(this);
684 4588400 : if (bundle_ != nullptr) splinter->set_bundle(bundle_);
685 4588400 : }
686 :
687 103536 : void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
688 : bool has_preassigned_slot() const { return has_preassigned_slot_; }
689 :
690 1029724 : void AddBlockRequiringSpillOperand(RpoNumber block_id) {
691 : DCHECK(IsSpilledOnlyInDeferredBlocks());
692 : GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt());
693 514862 : }
694 :
695 : BitVector* GetListOfBlocksRequiringSpillOperands() const {
696 : DCHECK(IsSpilledOnlyInDeferredBlocks());
697 : return list_of_blocks_requiring_spill_operands_;
698 : }
699 :
700 : private:
701 : void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
702 :
703 : typedef BitField<bool, 1, 1> HasSlotUseField;
704 : typedef BitField<bool, 2, 1> IsPhiField;
705 : typedef BitField<bool, 3, 1> IsNonLoopPhiField;
706 : typedef BitField<SpillType, 4, 2> SpillTypeField;
707 :
708 : int vreg_;
709 : int last_child_id_;
710 : TopLevelLiveRange* splintered_from_;
711 : union {
712 : // Correct value determined by spill_type()
713 : InstructionOperand* spill_operand_;
714 : SpillRange* spill_range_;
715 : };
716 :
717 : union {
718 : SpillMoveInsertionList* spill_move_insertion_locations_;
719 : BitVector* list_of_blocks_requiring_spill_operands_;
720 : };
721 :
722 : // TODO(mtrofin): generalize spilling after definition, currently specialized
723 : // just for spill in a single deferred block.
724 : bool spilled_in_deferred_blocks_;
725 : int spill_start_index_;
726 : UsePosition* last_pos_;
727 : TopLevelLiveRange* splinter_;
728 : bool has_preassigned_slot_;
729 :
730 : DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
731 : };
732 :
733 : struct PrintableLiveRange {
734 : const RegisterConfiguration* register_configuration_;
735 : const LiveRange* range_;
736 : };
737 :
738 : std::ostream& operator<<(std::ostream& os,
739 : const PrintableLiveRange& printable_range);
740 :
741 : class SpillRange final : public ZoneObject {
742 : public:
743 : static const int kUnassignedSlot = -1;
744 : SpillRange(TopLevelLiveRange* range, Zone* zone);
745 :
746 : UseInterval* interval() const { return use_interval_; }
747 :
748 : bool IsEmpty() const { return live_ranges_.empty(); }
749 : bool TryMerge(SpillRange* other);
750 : bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
751 :
752 : void set_assigned_slot(int index) {
753 : DCHECK_EQ(kUnassignedSlot, assigned_slot_);
754 3012979 : assigned_slot_ = index;
755 : }
756 : int assigned_slot() {
757 : DCHECK_NE(kUnassignedSlot, assigned_slot_);
758 : return assigned_slot_;
759 : }
760 : const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
761 : return live_ranges_;
762 : }
763 : ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
764 : // Spill slots can be 4, 8, or 16 bytes wide.
765 : int byte_width() const { return byte_width_; }
766 : void Print() const;
767 :
768 : private:
769 : LifetimePosition End() const { return end_position_; }
770 : bool IsIntersectingWith(SpillRange* other) const;
771 : // Merge intervals, making sure the use intervals are sorted
772 : void MergeDisjointIntervals(UseInterval* other);
773 :
774 : ZoneVector<TopLevelLiveRange*> live_ranges_;
775 : UseInterval* use_interval_;
776 : LifetimePosition end_position_;
777 : int assigned_slot_;
778 : int byte_width_;
779 :
780 : DISALLOW_COPY_AND_ASSIGN(SpillRange);
781 : };
782 :
783 : class RegisterAllocationData final : public ZoneObject {
784 : public:
785 : class PhiMapValue : public ZoneObject {
786 : public:
787 : PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
788 :
789 : const PhiInstruction* phi() const { return phi_; }
790 : const InstructionBlock* block() const { return block_; }
791 :
792 : // For hinting.
793 : int assigned_register() const { return assigned_register_; }
794 : void set_assigned_register(int register_code) {
795 : DCHECK_EQ(assigned_register_, kUnassignedRegister);
796 2062964 : assigned_register_ = register_code;
797 : }
798 : void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
799 :
800 : void AddOperand(InstructionOperand* operand);
801 : void CommitAssignment(const InstructionOperand& operand);
802 :
803 : private:
804 : PhiInstruction* const phi_;
805 : const InstructionBlock* const block_;
806 : ZoneVector<InstructionOperand*> incoming_operands_;
807 : int assigned_register_;
808 : };
809 : typedef ZoneMap<int, PhiMapValue*> PhiMap;
810 :
811 : struct DelayedReference {
812 : ReferenceMap* map;
813 : InstructionOperand* operand;
814 : };
815 : typedef ZoneVector<DelayedReference> DelayedReferences;
816 : typedef ZoneVector<std::pair<TopLevelLiveRange*, int>>
817 : RangesWithPreassignedSlots;
818 :
819 : RegisterAllocationData(const RegisterConfiguration* config,
820 : Zone* allocation_zone, Frame* frame,
821 : InstructionSequence* code,
822 : const char* debug_name = nullptr);
823 :
824 : const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
825 : return live_ranges_;
826 : }
827 : ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
828 : const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
829 : return fixed_live_ranges_;
830 : }
831 : ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
832 : return fixed_live_ranges_;
833 : }
834 : ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() {
835 : return fixed_float_live_ranges_;
836 : }
837 : const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() const {
838 : return fixed_float_live_ranges_;
839 : }
840 : ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
841 : return fixed_double_live_ranges_;
842 : }
843 : const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
844 : return fixed_double_live_ranges_;
845 : }
846 : ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() {
847 : return fixed_simd128_live_ranges_;
848 : }
849 : const ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() const {
850 : return fixed_simd128_live_ranges_;
851 : }
852 : ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
853 : ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
854 : ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
855 : DelayedReferences& delayed_references() { return delayed_references_; }
856 : InstructionSequence* code() const { return code_; }
857 : // This zone is for data structures only needed during register allocation
858 : // phases.
859 : Zone* allocation_zone() const { return allocation_zone_; }
860 : // This zone is for InstructionOperands and moves that live beyond register
861 : // allocation.
862 51296455 : Zone* code_zone() const { return code()->zone(); }
863 : Frame* frame() const { return frame_; }
864 : const char* debug_name() const { return debug_name_; }
865 : const RegisterConfiguration* config() const { return config_; }
866 :
867 : MachineRepresentation RepresentationFor(int virtual_register);
868 :
869 : TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
870 : // Creates a new live range.
871 : TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
872 : TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
873 :
874 : SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
875 : SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
876 :
877 : MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
878 : const InstructionOperand& from,
879 : const InstructionOperand& to);
880 :
881 38914268 : bool IsReference(TopLevelLiveRange* top_range) const {
882 : return code()->IsReference(top_range->vreg());
883 : }
884 :
885 : bool ExistsUseWithoutDefinition();
886 : bool RangesDefinedInDeferredStayInDeferred();
887 :
888 : void MarkFixedUse(MachineRepresentation rep, int index);
889 : bool HasFixedUse(MachineRepresentation rep, int index);
890 :
891 : void MarkAllocated(MachineRepresentation rep, int index);
892 :
893 : PhiMapValue* InitializePhiMap(const InstructionBlock* block,
894 : PhiInstruction* phi);
895 : PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
896 : PhiMapValue* GetPhiMapValueFor(int virtual_register);
897 : bool IsBlockBoundary(LifetimePosition pos) const;
898 :
899 : RangesWithPreassignedSlots& preassigned_slot_ranges() {
900 : return preassigned_slot_ranges_;
901 : }
902 :
903 : private:
904 : int GetNextLiveRangeId();
905 :
906 : Zone* const allocation_zone_;
907 : Frame* const frame_;
908 : InstructionSequence* const code_;
909 : const char* const debug_name_;
910 : const RegisterConfiguration* const config_;
911 : PhiMap phi_map_;
912 : ZoneVector<BitVector*> live_in_sets_;
913 : ZoneVector<BitVector*> live_out_sets_;
914 : ZoneVector<TopLevelLiveRange*> live_ranges_;
915 : ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
916 : ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_;
917 : ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
918 : ZoneVector<TopLevelLiveRange*> fixed_simd128_live_ranges_;
919 : ZoneVector<SpillRange*> spill_ranges_;
920 : DelayedReferences delayed_references_;
921 : BitVector* assigned_registers_;
922 : BitVector* assigned_double_registers_;
923 : BitVector* fixed_register_use_;
924 : BitVector* fixed_fp_register_use_;
925 : int virtual_register_count_;
926 : RangesWithPreassignedSlots preassigned_slot_ranges_;
927 :
928 : DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
929 : };
930 :
931 : class ConstraintBuilder final : public ZoneObject {
932 : public:
933 : explicit ConstraintBuilder(RegisterAllocationData* data);
934 :
935 : // Phase 1 : insert moves to account for fixed register operands.
936 : void MeetRegisterConstraints();
937 :
938 : // Phase 2: deconstruct SSA by inserting moves in successors and the headers
939 : // of blocks containing phis.
940 : void ResolvePhis();
941 :
942 : private:
943 333181464 : RegisterAllocationData* data() const { return data_; }
944 193236176 : InstructionSequence* code() const { return data()->code(); }
945 23812473 : Zone* allocation_zone() const { return data()->allocation_zone(); }
946 :
947 : InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
948 : bool is_tagged, bool is_input);
949 : void MeetRegisterConstraints(const InstructionBlock* block);
950 : void MeetConstraintsBefore(int index);
951 : void MeetConstraintsAfter(int index);
952 : void MeetRegisterConstraintsForLastInstructionInBlock(
953 : const InstructionBlock* block);
954 : void ResolvePhis(const InstructionBlock* block);
955 :
956 : RegisterAllocationData* const data_;
957 :
958 : DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder);
959 : };
960 :
961 : class LiveRangeBuilder final : public ZoneObject {
962 : public:
963 : explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone);
964 :
965 : // Phase 3: compute liveness of all virtual register.
966 : void BuildLiveRanges();
967 : static BitVector* ComputeLiveOut(const InstructionBlock* block,
968 : RegisterAllocationData* data);
969 :
970 : private:
971 : RegisterAllocationData* data() const { return data_; }
972 92976091 : InstructionSequence* code() const { return data()->code(); }
973 469754368 : Zone* allocation_zone() const { return data()->allocation_zone(); }
974 : Zone* code_zone() const { return code()->zone(); }
975 180425760 : const RegisterConfiguration* config() const { return data()->config(); }
976 23970047 : ZoneVector<BitVector*>& live_in_sets() const {
977 : return data()->live_in_sets();
978 : }
979 :
980 : // Verification.
981 : void Verify() const;
982 : bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
983 : bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
984 : const TopLevelLiveRange* range) const;
985 : bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
986 :
987 : // Liveness analysis support.
988 : void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
989 : void ProcessInstructions(const InstructionBlock* block, BitVector* live);
990 : void ProcessPhis(const InstructionBlock* block, BitVector* live);
991 : void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
992 :
993 21739929 : static int FixedLiveRangeID(int index) { return -index - 1; }
994 : int FixedFPLiveRangeID(int index, MachineRepresentation rep);
995 : TopLevelLiveRange* FixedLiveRangeFor(int index);
996 : TopLevelLiveRange* FixedFPLiveRangeFor(int index, MachineRepresentation rep);
997 :
998 : void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
999 : void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
1000 :
1001 : UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
1002 : void* hint, UsePositionHintType hint_type);
1003 : UsePosition* NewUsePosition(LifetimePosition pos) {
1004 2194199 : return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
1005 : }
1006 : TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
1007 : // Helper methods for building intervals.
1008 : UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
1009 : void* hint, UsePositionHintType hint_type);
1010 : void Define(LifetimePosition position, InstructionOperand* operand) {
1011 19333682 : Define(position, operand, nullptr, UsePositionHintType::kNone);
1012 : }
1013 : UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
1014 : InstructionOperand* operand, void* hint,
1015 : UsePositionHintType hint_type);
1016 : void Use(LifetimePosition block_start, LifetimePosition position,
1017 : InstructionOperand* operand) {
1018 67646191 : Use(block_start, position, operand, nullptr, UsePositionHintType::kNone);
1019 : }
1020 :
1021 : RegisterAllocationData* const data_;
1022 : ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
1023 :
1024 : DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
1025 : };
1026 :
1027 : class BundleBuilder final : public ZoneObject {
1028 : public:
1029 2949658 : explicit BundleBuilder(RegisterAllocationData* data) : data_(data) {}
1030 :
1031 : void BuildBundles();
1032 :
1033 : private:
1034 : RegisterAllocationData* data() const { return data_; }
1035 23534558 : InstructionSequence* code() const { return data_->code(); }
1036 : RegisterAllocationData* data_;
1037 : int next_bundle_id_ = 0;
1038 : };
1039 :
1040 : class RegisterAllocator : public ZoneObject {
1041 : public:
1042 : RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
1043 :
1044 : protected:
1045 : RegisterAllocationData* data() const { return data_; }
1046 22294132 : InstructionSequence* code() const { return data()->code(); }
1047 : RegisterKind mode() const { return mode_; }
1048 : int num_registers() const { return num_registers_; }
1049 : int num_allocatable_registers() const { return num_allocatable_registers_; }
1050 : const int* allocatable_register_codes() const {
1051 : return allocatable_register_codes_;
1052 : }
1053 : // Returns true iff. we must check float register aliasing.
1054 : bool check_fp_aliasing() const { return check_fp_aliasing_; }
1055 :
1056 : // TODO(mtrofin): explain why splitting in gap START is always OK.
1057 : LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
1058 : int instruction_index);
1059 :
1060 22707933 : Zone* allocation_zone() const { return data()->allocation_zone(); }
1061 :
1062 : // Find the optimal split for ranges defined by a memory operand, e.g.
1063 : // constants or function parameters passed on the stack.
1064 : void SplitAndSpillRangesDefinedByMemoryOperand();
1065 :
1066 : // Split the given range at the given position.
1067 : // If range starts at or after the given position then the
1068 : // original range is returned.
1069 : // Otherwise returns the live range that starts at pos and contains
1070 : // all uses from the original range that follow pos. Uses at pos will
1071 : // still be owned by the original range after splitting.
1072 : LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
1073 :
1074 118184088 : bool CanProcessRange(LiveRange* range) const {
1075 478108892 : return range != nullptr && !range->IsEmpty() && range->kind() == mode();
1076 : }
1077 :
1078 : // Split the given range in a position from the interval [start, end].
1079 : LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
1080 : LifetimePosition end);
1081 :
1082 : // Find a lifetime position in the interval [start, end] which
1083 : // is optimal for splitting: it is either header of the outermost
1084 : // loop covered by this interval or the latest possible position.
1085 : LifetimePosition FindOptimalSplitPos(LifetimePosition start,
1086 : LifetimePosition end);
1087 :
1088 : void Spill(LiveRange* range);
1089 :
1090 : // If we are trying to spill a range inside the loop try to
1091 : // hoist spill position out to the point just before the loop.
1092 : LifetimePosition FindOptimalSpillingPos(LiveRange* range,
1093 : LifetimePosition pos);
1094 :
1095 : const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
1096 : const char* RegisterName(int allocation_index) const;
1097 :
1098 : private:
1099 : RegisterAllocationData* const data_;
1100 : const RegisterKind mode_;
1101 : const int num_registers_;
1102 : int num_allocatable_registers_;
1103 : const int* allocatable_register_codes_;
1104 : bool check_fp_aliasing_;
1105 :
1106 : private:
1107 : bool no_combining_;
1108 :
1109 : DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
1110 : };
1111 :
1112 : class LinearScanAllocator final : public RegisterAllocator {
1113 : public:
1114 : LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
1115 : Zone* local_zone);
1116 :
1117 : // Phase 4: compute register assignments.
1118 : void AllocateRegisters();
1119 :
1120 : private:
1121 : struct LiveRangeOrdering {
1122 : bool operator()(const LiveRange* a, const LiveRange* b) const {
1123 : return a->ShouldBeAllocatedBefore(b);
1124 : }
1125 : };
1126 : using LiveRangeQueue = ZoneMultiset<LiveRange*, LiveRangeOrdering>;
1127 : LiveRangeQueue& unhandled_live_ranges() { return unhandled_live_ranges_; }
1128 : ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
1129 : ZoneVector<LiveRange*>& inactive_live_ranges() {
1130 : return inactive_live_ranges_;
1131 : }
1132 :
1133 : void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
1134 :
1135 : // Helper methods for updating the life range lists.
1136 : void AddToActive(LiveRange* range);
1137 : void AddToInactive(LiveRange* range);
1138 : void AddToUnhandled(LiveRange* range);
1139 : ZoneVector<LiveRange*>::iterator ActiveToHandled(
1140 : ZoneVector<LiveRange*>::iterator it);
1141 : ZoneVector<LiveRange*>::iterator ActiveToInactive(
1142 : ZoneVector<LiveRange*>::iterator it, LifetimePosition position);
1143 : ZoneVector<LiveRange*>::iterator InactiveToHandled(
1144 : ZoneVector<LiveRange*>::iterator it);
1145 : ZoneVector<LiveRange*>::iterator InactiveToActive(
1146 : ZoneVector<LiveRange*>::iterator it, LifetimePosition position);
1147 :
1148 : void ForwardStateTo(LifetimePosition position);
1149 :
1150 : // Helper methods for allocating registers.
1151 : bool TryReuseSpillForPhi(TopLevelLiveRange* range);
1152 : bool TryAllocateFreeReg(LiveRange* range,
1153 : const Vector<LifetimePosition>& free_until_pos);
1154 : bool TryAllocatePreferredReg(LiveRange* range,
1155 : const Vector<LifetimePosition>& free_until_pos);
1156 : void GetFPRegisterSet(MachineRepresentation rep, int* num_regs,
1157 : int* num_codes, const int** codes) const;
1158 : void FindFreeRegistersForRange(LiveRange* range,
1159 : Vector<LifetimePosition> free_until_pos);
1160 : void ProcessCurrentRange(LiveRange* current);
1161 : void AllocateBlockedReg(LiveRange* range);
1162 : bool TrySplitAndSpillSplinter(LiveRange* range);
1163 :
1164 : // Spill the given life range after position pos.
1165 : void SpillAfter(LiveRange* range, LifetimePosition pos);
1166 :
1167 : // Spill the given life range after position [start] and up to position [end].
1168 : void SpillBetween(LiveRange* range, LifetimePosition start,
1169 : LifetimePosition end);
1170 :
1171 : // Spill the given life range after position [start] and up to position [end].
1172 : // Range is guaranteed to be spilled at least until position [until].
1173 : void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
1174 : LifetimePosition until, LifetimePosition end);
1175 :
1176 : void SplitAndSpillIntersecting(LiveRange* range);
1177 :
1178 : void PrintRangeRow(std::ostream& os, const TopLevelLiveRange* toplevel);
1179 :
1180 : void PrintRangeOverview(std::ostream& os);
1181 :
1182 : LiveRangeQueue unhandled_live_ranges_;
1183 : ZoneVector<LiveRange*> active_live_ranges_;
1184 : ZoneVector<LiveRange*> inactive_live_ranges_;
1185 :
1186 : // Approximate at what position the set of ranges will change next.
1187 : // Used to avoid scanning for updates even if none are present.
1188 : LifetimePosition next_active_ranges_change_;
1189 : LifetimePosition next_inactive_ranges_change_;
1190 :
1191 : #ifdef DEBUG
1192 : LifetimePosition allocation_finger_;
1193 : #endif
1194 :
1195 : DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
1196 : };
1197 :
1198 : class SpillSlotLocator final : public ZoneObject {
1199 : public:
1200 : explicit SpillSlotLocator(RegisterAllocationData* data);
1201 :
1202 : void LocateSpillSlots();
1203 :
1204 : private:
1205 85918260 : RegisterAllocationData* data() const { return data_; }
1206 :
1207 : RegisterAllocationData* const data_;
1208 :
1209 : DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator);
1210 : };
1211 :
1212 : class OperandAssigner final : public ZoneObject {
1213 : public:
1214 : explicit OperandAssigner(RegisterAllocationData* data);
1215 :
1216 : // Phase 5: assign spill splots.
1217 : void AssignSpillSlots();
1218 :
1219 : // Phase 6: commit assignment.
1220 : void CommitAssignment();
1221 :
1222 : private:
1223 114661947 : RegisterAllocationData* data() const { return data_; }
1224 :
1225 : RegisterAllocationData* const data_;
1226 :
1227 : DISALLOW_COPY_AND_ASSIGN(OperandAssigner);
1228 : };
1229 :
1230 : class ReferenceMapPopulator final : public ZoneObject {
1231 : public:
1232 : explicit ReferenceMapPopulator(RegisterAllocationData* data);
1233 :
1234 : // Phase 7: compute values for pointer maps.
1235 : void PopulateReferenceMaps();
1236 :
1237 : private:
1238 88868765 : RegisterAllocationData* data() const { return data_; }
1239 :
1240 : bool SafePointsAreInOrder() const;
1241 :
1242 : RegisterAllocationData* const data_;
1243 :
1244 : DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
1245 : };
1246 :
1247 : class LiveRangeBoundArray;
1248 : // Insert moves of the form
1249 : //
1250 : // Operand(child_(k+1)) = Operand(child_k)
1251 : //
1252 : // where child_k and child_(k+1) are consecutive children of a range (so
1253 : // child_k->next() == child_(k+1)), and Operand(...) refers to the
1254 : // assigned operand, be it a register or a slot.
1255 : class LiveRangeConnector final : public ZoneObject {
1256 : public:
1257 : explicit LiveRangeConnector(RegisterAllocationData* data);
1258 :
1259 : // Phase 8: reconnect split ranges with moves, when the control flow
1260 : // between the ranges is trivial (no branches).
1261 : void ConnectRanges(Zone* local_zone);
1262 :
1263 : // Phase 9: insert moves to connect ranges across basic blocks, when the
1264 : // control flow between them cannot be trivially resolved, such as joining
1265 : // branches.
1266 : void ResolveControlFlow(Zone* local_zone);
1267 :
1268 : private:
1269 341298170 : RegisterAllocationData* data() const { return data_; }
1270 141327953 : InstructionSequence* code() const { return data()->code(); }
1271 19096654 : Zone* code_zone() const { return code()->zone(); }
1272 :
1273 : bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1274 :
1275 : int ResolveControlFlow(const InstructionBlock* block,
1276 : const InstructionOperand& cur_op,
1277 : const InstructionBlock* pred,
1278 : const InstructionOperand& pred_op);
1279 :
1280 : void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
1281 : LiveRangeBoundArray* array,
1282 : Zone* temp_zone);
1283 :
1284 : RegisterAllocationData* const data_;
1285 :
1286 : DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
1287 : };
1288 :
1289 : } // namespace compiler
1290 : } // namespace internal
1291 : } // namespace v8
1292 :
1293 : #endif // V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
|