Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
6 : #define V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
7 :
8 : #include "src/base/bits.h"
9 : #include "src/base/compiler-specific.h"
10 : #include "src/compiler/backend/instruction.h"
11 : #include "src/globals.h"
12 : #include "src/ostreams.h"
13 : #include "src/register-configuration.h"
14 : #include "src/zone/zone-containers.h"
15 :
16 : namespace v8 {
17 : namespace internal {
18 : namespace compiler {
19 :
20 : enum RegisterKind { GENERAL_REGISTERS, FP_REGISTERS };
21 :
22 : // This class represents a single point of a InstructionOperand's lifetime. For
23 : // each instruction there are four lifetime positions:
24 : //
25 : // [[START, END], [START, END]]
26 : //
27 : // Where the first half position corresponds to
28 : //
29 : // [GapPosition::START, GapPosition::END]
30 : //
31 : // and the second half position corresponds to
32 : //
33 : // [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
34 : //
35 : class LifetimePosition final {
36 : public:
37 : // Return the lifetime position that corresponds to the beginning of
38 : // the gap with the given index.
39 : static LifetimePosition GapFromInstructionIndex(int index) {
40 254401377 : return LifetimePosition(index * kStep);
41 : }
42 : // Return the lifetime position that corresponds to the beginning of
43 : // the instruction with the given index.
44 : static LifetimePosition InstructionFromInstructionIndex(int index) {
45 274665191 : return LifetimePosition(index * kStep + kHalfStep);
46 : }
47 :
48 : static bool ExistsGapPositionBetween(LifetimePosition pos1,
49 : LifetimePosition pos2) {
50 2817695 : if (pos1 > pos2) std::swap(pos1, pos2);
51 2817695 : LifetimePosition next(pos1.value_ + 1);
52 2817695 : if (next.IsGapPosition()) return next < pos2;
53 : return next.NextFullStart() < pos2;
54 : }
55 :
56 : // Returns a numeric representation of this lifetime position.
57 0 : int value() const { return value_; }
58 :
59 : // Returns the index of the instruction to which this lifetime position
60 : // corresponds.
61 : int ToInstructionIndex() const {
62 : DCHECK(IsValid());
63 634607062 : return value_ / kStep;
64 : }
65 :
66 : // Returns true if this lifetime position corresponds to a START value
67 42898814 : bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
68 : // Returns true if this lifetime position corresponds to an END value
69 : bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
70 : // Returns true if this lifetime position corresponds to a gap START value
71 24954236 : bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
72 :
73 188649365 : bool IsGapPosition() const { return (value_ & 0x2) == 0; }
74 : bool IsInstructionPosition() const { return !IsGapPosition(); }
75 :
76 : // Returns the lifetime position for the current START.
77 : LifetimePosition Start() const {
78 : DCHECK(IsValid());
79 378161888 : return LifetimePosition(value_ & ~(kHalfStep - 1));
80 : }
81 :
82 : // Returns the lifetime position for the current gap START.
83 : LifetimePosition FullStart() const {
84 : DCHECK(IsValid());
85 40045150 : return LifetimePosition(value_ & ~(kStep - 1));
86 : }
87 :
88 : // Returns the lifetime position for the current END.
89 : LifetimePosition End() const {
90 : DCHECK(IsValid());
91 239665265 : return LifetimePosition(Start().value_ + kHalfStep / 2);
92 : }
93 :
94 : // Returns the lifetime position for the beginning of the next START.
95 : LifetimePosition NextStart() const {
96 : DCHECK(IsValid());
97 53652295 : return LifetimePosition(Start().value_ + kHalfStep);
98 : }
99 :
100 : // Returns the lifetime position for the beginning of the next gap START.
101 : LifetimePosition NextFullStart() const {
102 : DCHECK(IsValid());
103 40274667 : return LifetimePosition(FullStart().value_ + kStep);
104 : }
105 :
106 : // Returns the lifetime position for the beginning of the previous START.
107 : LifetimePosition PrevStart() const {
108 : DCHECK(IsValid());
109 : DCHECK_LE(kHalfStep, value_);
110 73809342 : return LifetimePosition(Start().value_ - kHalfStep);
111 : }
112 :
113 : // Constructs the lifetime position which does not correspond to any
114 : // instruction.
115 1859192892 : LifetimePosition() : value_(-1) {}
116 :
117 : // Returns true if this lifetime positions corrensponds to some
118 : // instruction.
119 1099680679 : bool IsValid() const { return value_ != -1; }
120 :
121 : bool operator<(const LifetimePosition& that) const {
122 3540443508 : return this->value_ < that.value_;
123 : }
124 :
125 : bool operator<=(const LifetimePosition& that) const {
126 1317582024 : return this->value_ <= that.value_;
127 : }
128 :
129 : bool operator==(const LifetimePosition& that) const {
130 28786948 : return this->value_ == that.value_;
131 : }
132 :
133 : bool operator!=(const LifetimePosition& that) const {
134 : return this->value_ != that.value_;
135 : }
136 :
137 : bool operator>(const LifetimePosition& that) const {
138 11071633 : return this->value_ > that.value_;
139 : }
140 :
141 : bool operator>=(const LifetimePosition& that) const {
142 148995121 : return this->value_ >= that.value_;
143 : }
144 :
145 : void Print() const;
146 :
147 : static inline LifetimePosition Invalid() { return LifetimePosition(); }
148 :
149 : static inline LifetimePosition MaxPosition() {
150 : // We have to use this kind of getter instead of static member due to
151 : // crash bug in GDB.
152 : return LifetimePosition(kMaxInt);
153 : }
154 :
155 : static inline LifetimePosition FromInt(int value) {
156 : return LifetimePosition(value);
157 : }
158 :
159 : private:
160 : static const int kHalfStep = 2;
161 : static const int kStep = 2 * kHalfStep;
162 :
163 : static_assert(base::bits::IsPowerOfTwo(kHalfStep),
164 : "Code relies on kStep and kHalfStep being a power of two");
165 :
166 : explicit LifetimePosition(int value) : value_(value) {}
167 :
168 : int value_;
169 : };
170 :
171 : std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
172 :
173 : // Representation of the non-empty interval [start,end[.
174 : class UseInterval final : public ZoneObject {
175 : public:
176 : UseInterval(LifetimePosition start, LifetimePosition end)
177 290353034 : : start_(start), end_(end), next_(nullptr) {
178 : DCHECK(start < end);
179 : }
180 :
181 : LifetimePosition start() const { return start_; }
182 257560260 : void set_start(LifetimePosition start) { start_ = start; }
183 : LifetimePosition end() const { return end_; }
184 64792379 : void set_end(LifetimePosition end) { end_ = end; }
185 : UseInterval* next() const { return next_; }
186 202465298 : void set_next(UseInterval* next) { next_ = next; }
187 :
188 : // Split this interval at the given position without effecting the
189 : // live range that owns it. The interval must contain the position.
190 : UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
191 :
192 : // If this interval intersects with other return smallest position
193 : // that belongs to both of them.
194 912111758 : LifetimePosition Intersect(const UseInterval* other) const {
195 912111758 : if (other->start() < start_) return other->Intersect(this);
196 671231787 : if (other->start() < end_) return other->start();
197 : return LifetimePosition::Invalid();
198 : }
199 :
200 : bool Contains(LifetimePosition point) const {
201 872695567 : return start_ <= point && point < end_;
202 : }
203 :
204 : // Returns the index of the first gap covered by this interval.
205 : int FirstGapIndex() const {
206 : int ret = start_.ToInstructionIndex();
207 83499643 : if (start_.IsInstructionPosition()) {
208 51452277 : ++ret;
209 : }
210 : return ret;
211 : }
212 :
213 : // Returns the index of the last gap covered by this interval.
214 : int LastGapIndex() const {
215 : int ret = end_.ToInstructionIndex();
216 65861470 : if (end_.IsGapPosition() && end_.IsStart()) {
217 9084924 : --ret;
218 : }
219 : return ret;
220 : }
221 :
222 : private:
223 : LifetimePosition start_;
224 : LifetimePosition end_;
225 : UseInterval* next_;
226 :
227 : DISALLOW_COPY_AND_ASSIGN(UseInterval);
228 : };
229 :
230 : enum class UsePositionType : uint8_t {
231 : kRegisterOrSlot,
232 : kRegisterOrSlotOrConstant,
233 : kRequiresRegister,
234 : kRequiresSlot
235 : };
236 :
237 : enum class UsePositionHintType : uint8_t {
238 : kNone,
239 : kOperand,
240 : kUsePos,
241 : kPhi,
242 : kUnresolved
243 : };
244 :
245 : static const int32_t kUnassignedRegister = RegisterConfiguration::kMaxRegisters;
246 :
247 : // Representation of a use position.
248 : class V8_EXPORT_PRIVATE UsePosition final
249 : : public NON_EXPORTED_BASE(ZoneObject) {
250 : public:
251 : UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
252 : UsePositionHintType hint_type);
253 :
254 : InstructionOperand* operand() const { return operand_; }
255 : bool HasOperand() const { return operand_ != nullptr; }
256 :
257 : bool RegisterIsBeneficial() const {
258 : return RegisterBeneficialField::decode(flags_);
259 : }
260 : UsePositionType type() const { return TypeField::decode(flags_); }
261 : void set_type(UsePositionType type, bool register_beneficial);
262 :
263 : LifetimePosition pos() const { return pos_; }
264 :
265 : UsePosition* next() const { return next_; }
266 165509847 : void set_next(UsePosition* next) { next_ = next; }
267 :
268 : // For hinting only.
269 : void set_assigned_register(int register_code) {
270 95235683 : flags_ = AssignedRegisterField::update(flags_, register_code);
271 : }
272 :
273 : UsePositionHintType hint_type() const {
274 : return HintTypeField::decode(flags_);
275 : }
276 : bool HasHint() const;
277 : bool HintRegister(int* register_code) const;
278 : void SetHint(UsePosition* use_pos);
279 : void ResolveHint(UsePosition* use_pos);
280 : bool IsResolved() const {
281 : return hint_type() != UsePositionHintType::kUnresolved;
282 : }
283 : static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
284 :
285 : private:
286 : typedef BitField<UsePositionType, 0, 2> TypeField;
287 : typedef BitField<UsePositionHintType, 2, 3> HintTypeField;
288 : typedef BitField<bool, 5, 1> RegisterBeneficialField;
289 : typedef BitField<int32_t, 6, 6> AssignedRegisterField;
290 :
291 : InstructionOperand* const operand_;
292 : void* hint_;
293 : UsePosition* next_;
294 : LifetimePosition const pos_;
295 : uint32_t flags_;
296 :
297 : DISALLOW_COPY_AND_ASSIGN(UsePosition);
298 : };
299 :
300 : class SpillRange;
301 : class RegisterAllocationData;
302 : class TopLevelLiveRange;
303 : class LiveRangeBundle;
304 :
305 : // Representation of SSA values' live ranges as a collection of (continuous)
306 : // intervals over the instruction ordering.
307 : class V8_EXPORT_PRIVATE LiveRange : public NON_EXPORTED_BASE(ZoneObject) {
308 : public:
309 : UseInterval* first_interval() const { return first_interval_; }
310 : UsePosition* first_pos() const { return first_pos_; }
311 : TopLevelLiveRange* TopLevel() { return top_level_; }
312 : const TopLevelLiveRange* TopLevel() const { return top_level_; }
313 :
314 : bool IsTopLevel() const;
315 :
316 : LiveRange* next() const { return next_; }
317 :
318 : int relative_id() const { return relative_id_; }
319 :
320 : bool IsEmpty() const { return first_interval() == nullptr; }
321 :
322 : InstructionOperand GetAssignedOperand() const;
323 :
324 : MachineRepresentation representation() const {
325 : return RepresentationField::decode(bits_);
326 : }
327 :
328 : int assigned_register() const { return AssignedRegisterField::decode(bits_); }
329 : bool HasRegisterAssigned() const {
330 : return assigned_register() != kUnassignedRegister;
331 : }
332 : void set_assigned_register(int reg);
333 : void UnsetAssignedRegister();
334 :
335 : bool ShouldRecombine() const { return RecombineField::decode(bits_); }
336 :
337 0 : void SetRecombine() { bits_ = RecombineField::update(bits_, true); }
338 : void set_controlflow_hint(int reg) {
339 0 : bits_ = ControlFlowRegisterHint::update(bits_, reg);
340 : }
341 : int controlflow_hint() const {
342 : return ControlFlowRegisterHint::decode(bits_);
343 : }
344 : bool RegisterFromControlFlow(int* reg) {
345 : int hint = controlflow_hint();
346 86140187 : if (hint != kUnassignedRegister) {
347 0 : *reg = hint;
348 : return true;
349 : }
350 : return false;
351 : }
352 : bool spilled() const { return SpilledField::decode(bits_); }
353 : void AttachToNext();
354 : void Unspill();
355 : void Spill();
356 :
357 : RegisterKind kind() const;
358 :
359 : // Returns use position in this live range that follows both start
360 : // and last processed use position.
361 : UsePosition* NextUsePosition(LifetimePosition start) const;
362 :
363 : // Returns use position for which register is required in this live
364 : // range and which follows both start and last processed use position
365 : UsePosition* NextRegisterPosition(LifetimePosition start) const;
366 :
367 : // Returns the first use position requiring stack slot, or nullptr.
368 : UsePosition* NextSlotPosition(LifetimePosition start) const;
369 :
370 : // Returns use position for which register is beneficial in this live
371 : // range and which follows both start and last processed use position
372 : UsePosition* NextUsePositionRegisterIsBeneficial(
373 : LifetimePosition start) const;
374 :
375 : // Returns lifetime position for which register is beneficial in this live
376 : // range and which follows both start and last processed use position.
377 : LifetimePosition NextLifetimePositionRegisterIsBeneficial(
378 : const LifetimePosition& start) const;
379 :
380 : // Returns use position for which register is beneficial in this live
381 : // range and which precedes start.
382 : UsePosition* PreviousUsePositionRegisterIsBeneficial(
383 : LifetimePosition start) const;
384 :
385 : // Can this live range be spilled at this position.
386 : bool CanBeSpilled(LifetimePosition pos) const;
387 :
388 : // Splitting primitive used by both splitting and splintering members.
389 : // Performs the split, but does not link the resulting ranges.
390 : // The given position must follow the start of the range.
391 : // All uses following the given position will be moved from this
392 : // live range to the result live range.
393 : // The current range will terminate at position, while result will start from
394 : // position.
395 : enum HintConnectionOption : bool {
396 : DoNotConnectHints = false,
397 : ConnectHints = true
398 : };
399 : UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
400 : Zone* zone, HintConnectionOption connect_hints);
401 :
402 : // Detaches at position, and then links the resulting ranges. Returns the
403 : // child, which starts at position.
404 : LiveRange* SplitAt(LifetimePosition position, Zone* zone);
405 :
406 : // Returns nullptr when no register is hinted, otherwise sets register_index.
407 : UsePosition* FirstHintPosition(int* register_index) const;
408 : UsePosition* FirstHintPosition() const {
409 : int register_index;
410 : return FirstHintPosition(®ister_index);
411 : }
412 :
413 : UsePosition* current_hint_position() const {
414 : DCHECK(current_hint_position_ == FirstHintPosition());
415 : return current_hint_position_;
416 : }
417 :
418 : LifetimePosition Start() const {
419 : DCHECK(!IsEmpty());
420 : return first_interval()->start();
421 : }
422 :
423 : LifetimePosition End() const {
424 : DCHECK(!IsEmpty());
425 : return last_interval_->end();
426 : }
427 :
428 : bool ShouldBeAllocatedBefore(const LiveRange* other) const;
429 : bool CanCover(LifetimePosition position) const;
430 : bool Covers(LifetimePosition position) const;
431 : LifetimePosition NextStartAfter(LifetimePosition position) const;
432 : LifetimePosition NextEndAfter(LifetimePosition position) const;
433 : LifetimePosition FirstIntersection(LiveRange* other) const;
434 :
435 : void VerifyChildStructure() const {
436 : VerifyIntervals();
437 0 : VerifyPositions();
438 : }
439 :
440 : void ConvertUsesToOperand(const InstructionOperand& op,
441 : const InstructionOperand& spill_op);
442 : void SetUseHints(int register_index);
443 : void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
444 :
445 : void Print(const RegisterConfiguration* config, bool with_children) const;
446 : void Print(bool with_children) const;
447 :
448 54224987 : void set_bundle(LiveRangeBundle* bundle) { bundle_ = bundle; }
449 : LiveRangeBundle* get_bundle() const { return bundle_; }
450 : bool RegisterFromBundle(int* hint) const;
451 : void UpdateBundleRegister(int reg) const;
452 :
453 : private:
454 : friend class TopLevelLiveRange;
455 : explicit LiveRange(int relative_id, MachineRepresentation rep,
456 : TopLevelLiveRange* top_level);
457 :
458 : void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
459 :
460 101250816 : void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
461 :
462 : UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
463 : void AdvanceLastProcessedMarker(UseInterval* to_start_of,
464 : LifetimePosition but_not_past) const;
465 :
466 : void VerifyPositions() const;
467 : void VerifyIntervals() const;
468 :
469 : typedef BitField<bool, 0, 1> SpilledField;
470 : // Bits (1,7] are used by TopLevelLiveRange.
471 : typedef BitField<int32_t, 7, 6> AssignedRegisterField;
472 : typedef BitField<MachineRepresentation, 13, 8> RepresentationField;
473 : typedef BitField<bool, 21, 1> RecombineField;
474 : typedef BitField<uint8_t, 22, 6> ControlFlowRegisterHint;
475 :
476 : // Unique among children and splinters of the same virtual register.
477 : int relative_id_;
478 : uint32_t bits_;
479 : UseInterval* last_interval_;
480 : UseInterval* first_interval_;
481 : UsePosition* first_pos_;
482 : TopLevelLiveRange* top_level_;
483 : LiveRange* next_;
484 : // This is used as a cache, it doesn't affect correctness.
485 : mutable UseInterval* current_interval_;
486 : // This is used as a cache, it doesn't affect correctness.
487 : mutable UsePosition* last_processed_use_;
488 : // This is used as a cache, it's invalid outside of BuildLiveRanges.
489 : mutable UsePosition* current_hint_position_;
490 : // Cache the last position splintering stopped at.
491 : mutable UsePosition* splitting_pointer_;
492 : LiveRangeBundle* bundle_ = nullptr;
493 :
494 : DISALLOW_COPY_AND_ASSIGN(LiveRange);
495 : };
496 :
497 : struct LiveRangeOrdering {
498 : bool operator()(const LiveRange* left, const LiveRange* right) const {
499 : return left->Start() < right->Start();
500 : }
501 : };
502 : class LiveRangeBundle : public ZoneObject {
503 : public:
504 : void MergeSpillRanges();
505 :
506 : int id() { return id_; }
507 :
508 : int reg() { return reg_; }
509 :
510 : void set_reg(int reg) {
511 : DCHECK_EQ(reg_, kUnassignedRegister);
512 1624075 : reg_ = reg;
513 : }
514 :
515 : private:
516 : friend class BundleBuilder;
517 :
518 : class Range {
519 : public:
520 : Range(int s, int e) : start(s), end(e) {}
521 : Range(LifetimePosition s, LifetimePosition e)
522 7973721 : : start(s.value()), end(e.value()) {}
523 : int start;
524 : int end;
525 : };
526 :
527 : struct RangeOrdering {
528 : bool operator()(const Range left, const Range right) const {
529 15975029 : return left.start < right.start;
530 : }
531 : };
532 6226312 : bool UsesOverlap(UseInterval* interval) {
533 : auto use = uses_.begin();
534 33874042 : while (interval != nullptr && use != uses_.end()) {
535 13090686 : if (use->end <= interval->start().value()) {
536 : ++use;
537 6181110 : } else if (interval->end().value() <= use->start) {
538 : interval = interval->next();
539 : } else {
540 : return true;
541 : }
542 : }
543 : return false;
544 : }
545 5994967 : void InsertUses(UseInterval* interval) {
546 21942445 : while (interval != nullptr) {
547 7973739 : auto done = uses_.insert({interval->start(), interval->end()});
548 : USE(done);
549 : DCHECK_EQ(done.second, 1);
550 : interval = interval->next();
551 : }
552 5994985 : }
553 : explicit LiveRangeBundle(Zone* zone, int id)
554 1657207 : : ranges_(zone), uses_(zone), id_(id) {}
555 :
556 : bool TryAddRange(LiveRange* range);
557 : bool TryMerge(LiveRangeBundle* other);
558 :
559 : ZoneSet<LiveRange*, LiveRangeOrdering> ranges_;
560 : ZoneSet<Range, RangeOrdering> uses_;
561 : int id_;
562 : int reg_ = kUnassignedRegister;
563 : };
564 :
565 : class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange {
566 : public:
567 : explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
568 : int spill_start_index() const { return spill_start_index_; }
569 :
570 0 : bool IsFixed() const { return vreg_ < 0; }
571 :
572 : bool is_phi() const { return IsPhiField::decode(bits_); }
573 4323678 : void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
574 :
575 : bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
576 : void set_is_non_loop_phi(bool value) {
577 2161839 : bits_ = IsNonLoopPhiField::update(bits_, value);
578 : }
579 :
580 : enum SlotUseKind { kNoSlotUse, kDeferredSlotUse, kGeneralSlotUse };
581 :
582 : bool has_slot_use() const {
583 107814331 : return slot_use_kind() > SlotUseKind::kNoSlotUse;
584 : }
585 :
586 : bool has_non_deferred_slot_use() const {
587 : return slot_use_kind() == SlotUseKind::kGeneralSlotUse;
588 : }
589 :
590 : void reset_slot_use() {
591 6829366 : bits_ = HasSlotUseField::update(bits_, SlotUseKind::kNoSlotUse);
592 : }
593 : void register_slot_use(SlotUseKind value) {
594 23128470 : bits_ = HasSlotUseField::update(bits_, Max(slot_use_kind(), value));
595 : }
596 : SlotUseKind slot_use_kind() const { return HasSlotUseField::decode(bits_); }
597 :
598 : // Add a new interval or a new use position to this live range.
599 : void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
600 : void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
601 : void AddUsePosition(UsePosition* pos);
602 :
603 : // Shorten the most recently added interval by setting a new start.
604 : void ShortenTo(LifetimePosition start);
605 :
606 : // Detaches between start and end, and attributes the resulting range to
607 : // result.
608 : // The current range is pointed to as "splintered_from". No parent/child
609 : // relationship is established between this and result.
610 : void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone);
611 :
612 : // Assuming other was splintered from this range, embeds other and its
613 : // children as part of the children sequence of this range.
614 : void Merge(TopLevelLiveRange* other, Zone* zone);
615 :
616 : // Spill range management.
617 : void SetSpillRange(SpillRange* spill_range);
618 :
619 : // Encodes whether a range is also available from a memory localtion:
620 : // kNoSpillType: not availble in memory location.
621 : // kSpillOperand: computed in a memory location at range start.
622 : // kSpillRange: copied (spilled) to memory location at range start.
623 : // kDeferredSpillRange: copied (spilled) to memory location at entry
624 : // to deferred blocks that have a use from memory.
625 : //
626 : // Ranges either start out at kSpillOperand, which is also their final
627 : // state, or kNoSpillType. When spilled only in deferred code, a range
628 : // ends up with kDeferredSpillRange, while when spilled in regular code,
629 : // a range will be tagged as kSpillRange.
630 : enum class SpillType {
631 : kNoSpillType,
632 : kSpillOperand,
633 : kSpillRange,
634 : kDeferredSpillRange
635 : };
636 : void set_spill_type(SpillType value) {
637 49381465 : bits_ = SpillTypeField::update(bits_, value);
638 : }
639 : SpillType spill_type() const { return SpillTypeField::decode(bits_); }
640 : InstructionOperand* GetSpillOperand() const {
641 : DCHECK_EQ(SpillType::kSpillOperand, spill_type());
642 : return spill_operand_;
643 : }
644 :
645 : SpillRange* GetAllocatedSpillRange() const {
646 : DCHECK_NE(SpillType::kSpillOperand, spill_type());
647 : return spill_range_;
648 : }
649 :
650 : SpillRange* GetSpillRange() const {
651 : DCHECK_GE(spill_type(), SpillType::kSpillRange);
652 : return spill_range_;
653 : }
654 : bool HasNoSpillType() const {
655 : return spill_type() == SpillType::kNoSpillType;
656 : }
657 : bool HasSpillOperand() const {
658 : return spill_type() == SpillType::kSpillOperand;
659 : }
660 : bool HasSpillRange() const { return spill_type() >= SpillType::kSpillRange; }
661 : bool HasGeneralSpillRange() const {
662 : return spill_type() == SpillType::kSpillRange;
663 : }
664 : AllocatedOperand GetSpillRangeOperand() const;
665 :
666 : void RecordSpillLocation(Zone* zone, int gap_index,
667 : InstructionOperand* operand);
668 : void SetSpillOperand(InstructionOperand* operand);
669 : void SetSpillStartIndex(int start) {
670 85286020 : spill_start_index_ = Min(start, spill_start_index_);
671 : }
672 :
673 : void CommitSpillMoves(InstructionSequence* sequence,
674 : const InstructionOperand& operand,
675 : bool might_be_duplicated);
676 :
677 : // If all the children of this range are spilled in deferred blocks, and if
678 : // for any non-spilled child with a use position requiring a slot, that range
679 : // is contained in a deferred block, mark the range as
680 : // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
681 : // and instead let the LiveRangeConnector perform the spills within the
682 : // deferred blocks. If so, we insert here spills for non-spilled ranges
683 : // with slot use positions.
684 1020442 : void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
685 : DCHECK(!FLAG_turbo_control_flow_aware_allocation);
686 1020442 : spill_start_index_ = -1;
687 1020442 : spilled_in_deferred_blocks_ = true;
688 1020442 : spill_move_insertion_locations_ = nullptr;
689 : list_of_blocks_requiring_spill_operands_ =
690 1020446 : new (zone) BitVector(total_block_count, zone);
691 1020447 : }
692 :
693 : // Updates internal data structures to reflect that this range is not
694 : // spilled at definition but instead spilled in some blocks only.
695 0 : void TransitionRangeToDeferredSpill(Zone* zone, int total_block_count) {
696 : DCHECK(FLAG_turbo_control_flow_aware_allocation);
697 0 : spill_start_index_ = -1;
698 0 : spill_move_insertion_locations_ = nullptr;
699 : list_of_blocks_requiring_spill_operands_ =
700 0 : new (zone) BitVector(total_block_count, zone);
701 0 : }
702 :
703 : // Promotes this range to spill at definition if it was marked for spilling
704 : // in deferred blocks before.
705 : void TransitionRangeToSpillAtDefinition() {
706 : DCHECK_NOT_NULL(spill_move_insertion_locations_);
707 0 : if (spill_type() == SpillType::kDeferredSpillRange) {
708 : set_spill_type(SpillType::kSpillRange);
709 : }
710 : }
711 :
712 : TopLevelLiveRange* splintered_from() const { return splintered_from_; }
713 : bool IsSplinter() const { return splintered_from_ != nullptr; }
714 : bool MayRequireSpillRange() const {
715 : DCHECK(!IsSplinter());
716 17381101 : return !HasSpillOperand() && spill_range_ == nullptr;
717 : }
718 : void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
719 : int vreg() const { return vreg_; }
720 :
721 : #if DEBUG
722 : int debug_virt_reg() const;
723 : #endif
724 :
725 : void Verify() const;
726 : void VerifyChildrenInOrder() const;
727 : LiveRange* GetChildCovers(LifetimePosition pos);
728 : int GetNextChildId() {
729 : return IsSplinter() ? splintered_from()->GetNextChildId()
730 64257503 : : ++last_child_id_;
731 : }
732 :
733 6965317 : int GetMaxChildCount() const { return last_child_id_ + 1; }
734 :
735 : bool IsSpilledOnlyInDeferredBlocks() const {
736 149604359 : if (FLAG_turbo_control_flow_aware_allocation) {
737 608 : return spill_type() == SpillType::kDeferredSpillRange;
738 : }
739 149603751 : return spilled_in_deferred_blocks_;
740 : }
741 :
742 : struct SpillMoveInsertionList;
743 :
744 : SpillMoveInsertionList* GetSpillMoveInsertionLocations() const {
745 : DCHECK(!IsSpilledOnlyInDeferredBlocks());
746 : return spill_move_insertion_locations_;
747 : }
748 : TopLevelLiveRange* splinter() const { return splinter_; }
749 5761141 : void SetSplinter(TopLevelLiveRange* splinter) {
750 : DCHECK_NULL(splinter_);
751 : DCHECK_NOT_NULL(splinter);
752 :
753 5761141 : splinter_ = splinter;
754 5761141 : splinter->relative_id_ = GetNextChildId();
755 : splinter->set_spill_type(spill_type());
756 5761141 : splinter->SetSplinteredFrom(this);
757 5761168 : if (bundle_ != nullptr) splinter->set_bundle(bundle_);
758 5761168 : }
759 :
760 441384 : void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
761 : bool has_preassigned_slot() const { return has_preassigned_slot_; }
762 :
763 : void AddBlockRequiringSpillOperand(RpoNumber block_id) {
764 : DCHECK(IsSpilledOnlyInDeferredBlocks());
765 : GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt());
766 : }
767 :
768 : BitVector* GetListOfBlocksRequiringSpillOperands() const {
769 : DCHECK(IsSpilledOnlyInDeferredBlocks());
770 : return list_of_blocks_requiring_spill_operands_;
771 : }
772 :
773 : private:
774 : friend class LiveRange;
775 : void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
776 :
777 : typedef BitField<SlotUseKind, 1, 2> HasSlotUseField;
778 : typedef BitField<bool, 3, 1> IsPhiField;
779 : typedef BitField<bool, 4, 1> IsNonLoopPhiField;
780 : typedef BitField<SpillType, 5, 2> SpillTypeField;
781 :
782 : int vreg_;
783 : int last_child_id_;
784 : TopLevelLiveRange* splintered_from_;
785 : union {
786 : // Correct value determined by spill_type()
787 : InstructionOperand* spill_operand_;
788 : SpillRange* spill_range_;
789 : };
790 :
791 : union {
792 : SpillMoveInsertionList* spill_move_insertion_locations_;
793 : BitVector* list_of_blocks_requiring_spill_operands_;
794 : };
795 :
796 : // TODO(mtrofin): generalize spilling after definition, currently specialized
797 : // just for spill in a single deferred block.
798 : bool spilled_in_deferred_blocks_;
799 : int spill_start_index_;
800 : UsePosition* last_pos_;
801 : LiveRange* last_child_covers_;
802 : TopLevelLiveRange* splinter_;
803 : bool has_preassigned_slot_;
804 :
805 : DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
806 : };
807 :
808 : struct PrintableLiveRange {
809 : const RegisterConfiguration* register_configuration_;
810 : const LiveRange* range_;
811 : };
812 :
813 : std::ostream& operator<<(std::ostream& os,
814 : const PrintableLiveRange& printable_range);
815 :
816 : class SpillRange final : public ZoneObject {
817 : public:
818 : static const int kUnassignedSlot = -1;
819 : SpillRange(TopLevelLiveRange* range, Zone* zone);
820 :
821 : UseInterval* interval() const { return use_interval_; }
822 :
823 : bool IsEmpty() const { return live_ranges_.empty(); }
824 : bool TryMerge(SpillRange* other);
825 : bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
826 :
827 : void set_assigned_slot(int index) {
828 : DCHECK_EQ(kUnassignedSlot, assigned_slot_);
829 3591751 : assigned_slot_ = index;
830 : }
831 : int assigned_slot() {
832 : DCHECK_NE(kUnassignedSlot, assigned_slot_);
833 : return assigned_slot_;
834 : }
835 : const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
836 : return live_ranges_;
837 : }
838 : ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
839 : // Spill slots can be 4, 8, or 16 bytes wide.
840 : int byte_width() const { return byte_width_; }
841 : void Print() const;
842 :
843 : private:
844 : LifetimePosition End() const { return end_position_; }
845 : bool IsIntersectingWith(SpillRange* other) const;
846 : // Merge intervals, making sure the use intervals are sorted
847 : void MergeDisjointIntervals(UseInterval* other);
848 :
849 : ZoneVector<TopLevelLiveRange*> live_ranges_;
850 : UseInterval* use_interval_;
851 : LifetimePosition end_position_;
852 : int assigned_slot_;
853 : int byte_width_;
854 :
855 : DISALLOW_COPY_AND_ASSIGN(SpillRange);
856 : };
857 :
858 : class RegisterAllocationData final : public ZoneObject {
859 : public:
860 : // Encodes whether a spill happens in deferred code (kSpillDeferred) or
861 : // regular code (kSpillAtDefinition).
862 : enum SpillMode { kSpillAtDefinition, kSpillDeferred };
863 : class PhiMapValue : public ZoneObject {
864 : public:
865 : PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
866 :
867 : const PhiInstruction* phi() const { return phi_; }
868 : const InstructionBlock* block() const { return block_; }
869 :
870 : // For hinting.
871 : int assigned_register() const { return assigned_register_; }
872 : void set_assigned_register(int register_code) {
873 : DCHECK_EQ(assigned_register_, kUnassignedRegister);
874 2052601 : assigned_register_ = register_code;
875 : }
876 : void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
877 :
878 : void AddOperand(InstructionOperand* operand);
879 : void CommitAssignment(const InstructionOperand& operand);
880 :
881 : private:
882 : PhiInstruction* const phi_;
883 : const InstructionBlock* const block_;
884 : ZoneVector<InstructionOperand*> incoming_operands_;
885 : int assigned_register_;
886 : };
887 : typedef ZoneMap<int, PhiMapValue*> PhiMap;
888 :
889 : struct DelayedReference {
890 : ReferenceMap* map;
891 : InstructionOperand* operand;
892 : };
893 : typedef ZoneVector<DelayedReference> DelayedReferences;
894 : typedef ZoneVector<std::pair<TopLevelLiveRange*, int>>
895 : RangesWithPreassignedSlots;
896 :
897 : RegisterAllocationData(const RegisterConfiguration* config,
898 : Zone* allocation_zone, Frame* frame,
899 : InstructionSequence* code,
900 : const char* debug_name = nullptr);
901 :
902 : const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
903 : return live_ranges_;
904 : }
905 : ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
906 : const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
907 : return fixed_live_ranges_;
908 : }
909 : ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
910 : return fixed_live_ranges_;
911 : }
912 : ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() {
913 : return fixed_float_live_ranges_;
914 : }
915 : const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() const {
916 : return fixed_float_live_ranges_;
917 : }
918 : ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
919 : return fixed_double_live_ranges_;
920 : }
921 : const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
922 : return fixed_double_live_ranges_;
923 : }
924 : ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() {
925 : return fixed_simd128_live_ranges_;
926 : }
927 : const ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() const {
928 : return fixed_simd128_live_ranges_;
929 : }
930 : ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
931 : ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
932 : ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
933 : DelayedReferences& delayed_references() { return delayed_references_; }
934 : InstructionSequence* code() const { return code_; }
935 : // This zone is for data structures only needed during register allocation
936 : // phases.
937 : Zone* allocation_zone() const { return allocation_zone_; }
938 : // This zone is for InstructionOperands and moves that live beyond register
939 : // allocation.
940 : Zone* code_zone() const { return code()->zone(); }
941 : Frame* frame() const { return frame_; }
942 : const char* debug_name() const { return debug_name_; }
943 : const RegisterConfiguration* config() const { return config_; }
944 :
945 : MachineRepresentation RepresentationFor(int virtual_register);
946 :
947 : TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
948 : // Creates a new live range.
949 : TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
950 : TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
951 :
952 : SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range,
953 : SpillMode spill_mode);
954 : SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
955 :
956 : MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
957 : const InstructionOperand& from,
958 : const InstructionOperand& to);
959 :
960 : bool IsReference(TopLevelLiveRange* top_range) const {
961 : return code()->IsReference(top_range->vreg());
962 : }
963 :
964 : bool ExistsUseWithoutDefinition();
965 : bool RangesDefinedInDeferredStayInDeferred();
966 :
967 : void MarkFixedUse(MachineRepresentation rep, int index);
968 : bool HasFixedUse(MachineRepresentation rep, int index);
969 :
970 : void MarkAllocated(MachineRepresentation rep, int index);
971 :
972 : PhiMapValue* InitializePhiMap(const InstructionBlock* block,
973 : PhiInstruction* phi);
974 : PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
975 : PhiMapValue* GetPhiMapValueFor(int virtual_register);
976 : bool IsBlockBoundary(LifetimePosition pos) const;
977 :
978 : RangesWithPreassignedSlots& preassigned_slot_ranges() {
979 : return preassigned_slot_ranges_;
980 : }
981 :
982 : void RememberSpillState(RpoNumber block,
983 : const ZoneVector<LiveRange*>& state) {
984 : spill_state_[block.ToSize()] = state;
985 : }
986 :
987 : ZoneVector<LiveRange*>& GetSpillState(RpoNumber block) {
988 : auto& result = spill_state_[block.ToSize()];
989 : return result;
990 : }
991 :
992 : void ResetSpillState() { spill_state_.clear(); }
993 :
994 : private:
995 : int GetNextLiveRangeId();
996 :
997 : Zone* const allocation_zone_;
998 : Frame* const frame_;
999 : InstructionSequence* const code_;
1000 : const char* const debug_name_;
1001 : const RegisterConfiguration* const config_;
1002 : PhiMap phi_map_;
1003 : ZoneVector<BitVector*> live_in_sets_;
1004 : ZoneVector<BitVector*> live_out_sets_;
1005 : ZoneVector<TopLevelLiveRange*> live_ranges_;
1006 : ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
1007 : ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_;
1008 : ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
1009 : ZoneVector<TopLevelLiveRange*> fixed_simd128_live_ranges_;
1010 : ZoneVector<SpillRange*> spill_ranges_;
1011 : DelayedReferences delayed_references_;
1012 : BitVector* assigned_registers_;
1013 : BitVector* assigned_double_registers_;
1014 : BitVector* fixed_register_use_;
1015 : BitVector* fixed_fp_register_use_;
1016 : int virtual_register_count_;
1017 : RangesWithPreassignedSlots preassigned_slot_ranges_;
1018 : ZoneVector<ZoneVector<LiveRange*>> spill_state_;
1019 :
1020 : DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
1021 : };
1022 :
1023 : class ConstraintBuilder final : public ZoneObject {
1024 : public:
1025 : explicit ConstraintBuilder(RegisterAllocationData* data);
1026 :
1027 : // Phase 1 : insert moves to account for fixed register operands.
1028 : void MeetRegisterConstraints();
1029 :
1030 : // Phase 2: deconstruct SSA by inserting moves in successors and the headers
1031 : // of blocks containing phis.
1032 : void ResolvePhis();
1033 :
1034 : private:
1035 363522364 : RegisterAllocationData* data() const { return data_; }
1036 : InstructionSequence* code() const { return data()->code(); }
1037 : Zone* allocation_zone() const { return data()->allocation_zone(); }
1038 :
1039 : InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
1040 : bool is_tagged, bool is_input);
1041 : void MeetRegisterConstraints(const InstructionBlock* block);
1042 : void MeetConstraintsBefore(int index);
1043 : void MeetConstraintsAfter(int index);
1044 : void MeetRegisterConstraintsForLastInstructionInBlock(
1045 : const InstructionBlock* block);
1046 : void ResolvePhis(const InstructionBlock* block);
1047 :
1048 : RegisterAllocationData* const data_;
1049 :
1050 : DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder);
1051 : };
1052 :
1053 2517212 : class LiveRangeBuilder final : public ZoneObject {
1054 : public:
1055 : explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone);
1056 :
1057 : // Phase 3: compute liveness of all virtual register.
1058 : void BuildLiveRanges();
1059 : static BitVector* ComputeLiveOut(const InstructionBlock* block,
1060 : RegisterAllocationData* data);
1061 :
1062 : private:
1063 : using SpillMode = RegisterAllocationData::SpillMode;
1064 :
1065 : RegisterAllocationData* data() const { return data_; }
1066 : InstructionSequence* code() const { return data()->code(); }
1067 : Zone* allocation_zone() const { return data()->allocation_zone(); }
1068 : Zone* code_zone() const { return code()->zone(); }
1069 : const RegisterConfiguration* config() const { return data()->config(); }
1070 : ZoneVector<BitVector*>& live_in_sets() const {
1071 : return data()->live_in_sets();
1072 : }
1073 :
1074 : // Verification.
1075 : void Verify() const;
1076 : bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
1077 : bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
1078 : const TopLevelLiveRange* range) const;
1079 : bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
1080 :
1081 : // Liveness analysis support.
1082 : void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
1083 : void ProcessInstructions(const InstructionBlock* block, BitVector* live);
1084 : void ProcessPhis(const InstructionBlock* block, BitVector* live);
1085 : void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
1086 :
1087 23037164 : static int FixedLiveRangeID(int index) { return -index - 1; }
1088 : int FixedFPLiveRangeID(int index, MachineRepresentation rep);
1089 : TopLevelLiveRange* FixedLiveRangeFor(int index);
1090 : TopLevelLiveRange* FixedFPLiveRangeFor(int index, MachineRepresentation rep);
1091 :
1092 : void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
1093 : void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
1094 :
1095 : UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
1096 : void* hint, UsePositionHintType hint_type);
1097 : UsePosition* NewUsePosition(LifetimePosition pos) {
1098 3045508 : return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
1099 : }
1100 : TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
1101 : // Helper methods for building intervals.
1102 : UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
1103 : void* hint, UsePositionHintType hint_type);
1104 : void Define(LifetimePosition position, InstructionOperand* operand) {
1105 21293465 : Define(position, operand, nullptr, UsePositionHintType::kNone);
1106 : }
1107 : UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
1108 : InstructionOperand* operand, void* hint,
1109 : UsePositionHintType hint_type);
1110 : void Use(LifetimePosition block_start, LifetimePosition position,
1111 : InstructionOperand* operand) {
1112 75592596 : Use(block_start, position, operand, nullptr, UsePositionHintType::kNone);
1113 : }
1114 : RegisterAllocationData* const data_;
1115 : ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
1116 :
1117 : DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
1118 : };
1119 :
1120 : class BundleBuilder final : public ZoneObject {
1121 : public:
1122 2517224 : explicit BundleBuilder(RegisterAllocationData* data) : data_(data) {}
1123 :
1124 : void BuildBundles();
1125 :
1126 : private:
1127 : RegisterAllocationData* data() const { return data_; }
1128 : InstructionSequence* code() const { return data_->code(); }
1129 : RegisterAllocationData* data_;
1130 : int next_bundle_id_ = 0;
1131 : };
1132 :
1133 : class RegisterAllocator : public ZoneObject {
1134 : public:
1135 : RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
1136 :
1137 : protected:
1138 : using SpillMode = RegisterAllocationData::SpillMode;
1139 : RegisterAllocationData* data() const { return data_; }
1140 : InstructionSequence* code() const { return data()->code(); }
1141 : RegisterKind mode() const { return mode_; }
1142 : int num_registers() const { return num_registers_; }
1143 : int num_allocatable_registers() const { return num_allocatable_registers_; }
1144 : const int* allocatable_register_codes() const {
1145 : return allocatable_register_codes_;
1146 : }
1147 : // Returns true iff. we must check float register aliasing.
1148 : bool check_fp_aliasing() const { return check_fp_aliasing_; }
1149 :
1150 : // TODO(mtrofin): explain why splitting in gap START is always OK.
1151 : LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
1152 : int instruction_index);
1153 :
1154 : Zone* allocation_zone() const { return data()->allocation_zone(); }
1155 :
1156 : // Find the optimal split for ranges defined by a memory operand, e.g.
1157 : // constants or function parameters passed on the stack.
1158 : void SplitAndSpillRangesDefinedByMemoryOperand();
1159 :
1160 : // Split the given range at the given position.
1161 : // If range starts at or after the given position then the
1162 : // original range is returned.
1163 : // Otherwise returns the live range that starts at pos and contains
1164 : // all uses from the original range that follow pos. Uses at pos will
1165 : // still be owned by the original range after splitting.
1166 : LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
1167 :
1168 : bool CanProcessRange(LiveRange* range) const {
1169 400793528 : return range != nullptr && !range->IsEmpty() && range->kind() == mode();
1170 : }
1171 :
1172 : // Split the given range in a position from the interval [start, end].
1173 : LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
1174 : LifetimePosition end);
1175 :
1176 : // Find a lifetime position in the interval [start, end] which
1177 : // is optimal for splitting: it is either header of the outermost
1178 : // loop covered by this interval or the latest possible position.
1179 : LifetimePosition FindOptimalSplitPos(LifetimePosition start,
1180 : LifetimePosition end);
1181 :
1182 : void Spill(LiveRange* range, SpillMode spill_mode);
1183 :
1184 : // If we are trying to spill a range inside the loop try to
1185 : // hoist spill position out to the point just before the loop.
1186 : LifetimePosition FindOptimalSpillingPos(LiveRange* range,
1187 : LifetimePosition pos);
1188 :
1189 : const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
1190 : const char* RegisterName(int allocation_index) const;
1191 :
1192 : private:
1193 : RegisterAllocationData* const data_;
1194 : const RegisterKind mode_;
1195 : const int num_registers_;
1196 : int num_allocatable_registers_;
1197 : const int* allocatable_register_codes_;
1198 : bool check_fp_aliasing_;
1199 :
1200 : private:
1201 : bool no_combining_;
1202 :
1203 : DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
1204 : };
1205 :
1206 2723852 : class LinearScanAllocator final : public RegisterAllocator {
1207 : public:
1208 : LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
1209 : Zone* local_zone);
1210 :
1211 : // Phase 4: compute register assignments.
1212 : void AllocateRegisters();
1213 :
1214 : private:
1215 : struct RangeWithRegister {
1216 : TopLevelLiveRange* range;
1217 : int expected_register;
1218 : struct Hash {
1219 : size_t operator()(const RangeWithRegister item) const {
1220 0 : return item.range->vreg();
1221 : }
1222 : };
1223 : struct Equals {
1224 : bool operator()(const RangeWithRegister one,
1225 : const RangeWithRegister two) const {
1226 : return one.range == two.range;
1227 : }
1228 : };
1229 :
1230 : explicit RangeWithRegister(LiveRange* a_range)
1231 : : range(a_range->TopLevel()),
1232 0 : expected_register(a_range->assigned_register()) {}
1233 : RangeWithRegister(TopLevelLiveRange* toplevel, int reg)
1234 0 : : range(toplevel), expected_register(reg) {}
1235 : };
1236 :
1237 : using RangeWithRegisterSet =
1238 : ZoneUnorderedSet<RangeWithRegister, RangeWithRegister::Hash,
1239 : RangeWithRegister::Equals>;
1240 :
1241 : void MaybeUndoPreviousSplit(LiveRange* range);
1242 : void SpillNotLiveRanges(RangeWithRegisterSet& to_be_live,
1243 : LifetimePosition position, SpillMode spill_mode);
1244 : LiveRange* AssignRegisterOnReload(LiveRange* range, int reg);
1245 : void ReloadLiveRanges(RangeWithRegisterSet& to_be_live,
1246 : LifetimePosition position);
1247 :
1248 : bool BlockIsDeferredOrImmediatePredecessorIsNotDeferred(
1249 : const InstructionBlock* block);
1250 :
1251 : struct LiveRangeOrdering {
1252 : bool operator()(const LiveRange* a, const LiveRange* b) const {
1253 390422702 : return a->ShouldBeAllocatedBefore(b);
1254 : }
1255 : };
1256 : using LiveRangeQueue = ZoneMultiset<LiveRange*, LiveRangeOrdering>;
1257 : LiveRangeQueue& unhandled_live_ranges() { return unhandled_live_ranges_; }
1258 : ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
1259 : ZoneVector<LiveRange*>& inactive_live_ranges() {
1260 : return inactive_live_ranges_;
1261 : }
1262 :
1263 : void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
1264 :
1265 : // Helper methods for updating the life range lists.
1266 : void AddToActive(LiveRange* range);
1267 : void AddToInactive(LiveRange* range);
1268 : void AddToUnhandled(LiveRange* range);
1269 : ZoneVector<LiveRange*>::iterator ActiveToHandled(
1270 : ZoneVector<LiveRange*>::iterator it);
1271 : ZoneVector<LiveRange*>::iterator ActiveToInactive(
1272 : ZoneVector<LiveRange*>::iterator it, LifetimePosition position);
1273 : ZoneVector<LiveRange*>::iterator InactiveToHandled(
1274 : ZoneVector<LiveRange*>::iterator it);
1275 : ZoneVector<LiveRange*>::iterator InactiveToActive(
1276 : ZoneVector<LiveRange*>::iterator it, LifetimePosition position);
1277 :
1278 : void ForwardStateTo(LifetimePosition position);
1279 :
1280 : int LastDeferredInstructionIndex(InstructionBlock* start);
1281 :
1282 : // Helper methods for choosing state after control flow events.
1283 :
1284 : bool ConsiderBlockForControlFlow(InstructionBlock* current_block,
1285 : RpoNumber predecessor);
1286 : RpoNumber ChooseOneOfTwoPredecessorStates(InstructionBlock* current_block,
1287 : LifetimePosition boundary);
1288 : void ComputeStateFromManyPredecessors(InstructionBlock* current_block,
1289 : RangeWithRegisterSet* to_be_live);
1290 :
1291 : // Helper methods for allocating registers.
1292 : bool TryReuseSpillForPhi(TopLevelLiveRange* range);
1293 : int PickRegisterThatIsAvailableLongest(
1294 : LiveRange* current, int hint_reg,
1295 : const Vector<LifetimePosition>& free_until_pos);
1296 : bool TryAllocateFreeReg(LiveRange* range,
1297 : const Vector<LifetimePosition>& free_until_pos);
1298 : bool TryAllocatePreferredReg(LiveRange* range,
1299 : const Vector<LifetimePosition>& free_until_pos);
1300 : void GetFPRegisterSet(MachineRepresentation rep, int* num_regs,
1301 : int* num_codes, const int** codes) const;
1302 : void FindFreeRegistersForRange(LiveRange* range,
1303 : Vector<LifetimePosition> free_until_pos);
1304 : void ProcessCurrentRange(LiveRange* current, SpillMode spill_mode);
1305 : void AllocateBlockedReg(LiveRange* range, SpillMode spill_mode);
1306 : bool TrySplitAndSpillSplinter(LiveRange* range);
1307 :
1308 : // Spill the given life range after position pos.
1309 : void SpillAfter(LiveRange* range, LifetimePosition pos, SpillMode spill_mode);
1310 :
1311 : // Spill the given life range after position [start] and up to position [end].
1312 : void SpillBetween(LiveRange* range, LifetimePosition start,
1313 : LifetimePosition end, SpillMode spill_mode);
1314 :
1315 : // Spill the given life range after position [start] and up to position [end].
1316 : // Range is guaranteed to be spilled at least until position [until].
1317 : void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
1318 : LifetimePosition until, LifetimePosition end,
1319 : SpillMode spill_mode);
1320 : void SplitAndSpillIntersecting(LiveRange* range, SpillMode spill_mode);
1321 :
1322 : void PrintRangeRow(std::ostream& os, const TopLevelLiveRange* toplevel);
1323 :
1324 : void PrintRangeOverview(std::ostream& os);
1325 :
1326 : LiveRangeQueue unhandled_live_ranges_;
1327 : ZoneVector<LiveRange*> active_live_ranges_;
1328 : ZoneVector<LiveRange*> inactive_live_ranges_;
1329 :
1330 : // Approximate at what position the set of ranges will change next.
1331 : // Used to avoid scanning for updates even if none are present.
1332 : LifetimePosition next_active_ranges_change_;
1333 : LifetimePosition next_inactive_ranges_change_;
1334 :
1335 : #ifdef DEBUG
1336 : LifetimePosition allocation_finger_;
1337 : #endif
1338 :
1339 : DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
1340 : };
1341 :
1342 : class SpillSlotLocator final : public ZoneObject {
1343 : public:
1344 : explicit SpillSlotLocator(RegisterAllocationData* data);
1345 :
1346 : void LocateSpillSlots();
1347 :
1348 : private:
1349 93983142 : RegisterAllocationData* data() const { return data_; }
1350 :
1351 : RegisterAllocationData* const data_;
1352 :
1353 : DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator);
1354 : };
1355 :
1356 : class OperandAssigner final : public ZoneObject {
1357 : public:
1358 : explicit OperandAssigner(RegisterAllocationData* data);
1359 :
1360 : // Phase 5: final decision on spilling mode.
1361 : void DecideSpillingMode();
1362 :
1363 : // Phase 6: assign spill splots.
1364 : void AssignSpillSlots();
1365 :
1366 : // Phase 7: commit assignment.
1367 : void CommitAssignment();
1368 :
1369 : private:
1370 124021445 : RegisterAllocationData* data() const { return data_; }
1371 :
1372 : RegisterAllocationData* const data_;
1373 :
1374 : DISALLOW_COPY_AND_ASSIGN(OperandAssigner);
1375 : };
1376 :
1377 : class ReferenceMapPopulator final : public ZoneObject {
1378 : public:
1379 : explicit ReferenceMapPopulator(RegisterAllocationData* data);
1380 :
1381 : // Phase 8: compute values for pointer maps.
1382 : void PopulateReferenceMaps();
1383 :
1384 : private:
1385 96502726 : RegisterAllocationData* data() const { return data_; }
1386 :
1387 : bool SafePointsAreInOrder() const;
1388 :
1389 : RegisterAllocationData* const data_;
1390 :
1391 : DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
1392 : };
1393 :
1394 : class LiveRangeBoundArray;
1395 : // Insert moves of the form
1396 : //
1397 : // Operand(child_(k+1)) = Operand(child_k)
1398 : //
1399 : // where child_k and child_(k+1) are consecutive children of a range (so
1400 : // child_k->next() == child_(k+1)), and Operand(...) refers to the
1401 : // assigned operand, be it a register or a slot.
1402 : class LiveRangeConnector final : public ZoneObject {
1403 : public:
1404 : explicit LiveRangeConnector(RegisterAllocationData* data);
1405 :
1406 : // Phase 9: reconnect split ranges with moves, when the control flow
1407 : // between the ranges is trivial (no branches).
1408 : void ConnectRanges(Zone* local_zone);
1409 :
1410 : // Phase 10: insert moves to connect ranges across basic blocks, when the
1411 : // control flow between them cannot be trivially resolved, such as joining
1412 : // branches.
1413 : void ResolveControlFlow(Zone* local_zone);
1414 :
1415 : private:
1416 373894980 : RegisterAllocationData* data() const { return data_; }
1417 : InstructionSequence* code() const { return data()->code(); }
1418 : Zone* code_zone() const { return code()->zone(); }
1419 :
1420 : bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1421 :
1422 : int ResolveControlFlow(const InstructionBlock* block,
1423 : const InstructionOperand& cur_op,
1424 : const InstructionBlock* pred,
1425 : const InstructionOperand& pred_op);
1426 :
1427 : void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
1428 : LiveRangeBoundArray* array,
1429 : Zone* temp_zone);
1430 :
1431 : RegisterAllocationData* const data_;
1432 :
1433 : DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
1434 : };
1435 :
1436 : } // namespace compiler
1437 : } // namespace internal
1438 : } // namespace v8
1439 :
1440 : #endif // V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
|