Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
6 : #define V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
7 :
8 : #include "src/base/bits.h"
9 : #include "src/base/compiler-specific.h"
10 : #include "src/compiler/backend/instruction.h"
11 : #include "src/globals.h"
12 : #include "src/ostreams.h"
13 : #include "src/register-configuration.h"
14 : #include "src/zone/zone-containers.h"
15 :
16 : namespace v8 {
17 : namespace internal {
18 : namespace compiler {
19 :
20 : enum RegisterKind { GENERAL_REGISTERS, FP_REGISTERS };
21 :
22 : // This class represents a single point of a InstructionOperand's lifetime. For
23 : // each instruction there are four lifetime positions:
24 : //
25 : // [[START, END], [START, END]]
26 : //
27 : // Where the first half position corresponds to
28 : //
29 : // [GapPosition::START, GapPosition::END]
30 : //
31 : // and the second half position corresponds to
32 : //
33 : // [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
34 : //
35 : class LifetimePosition final {
36 : public:
37 : // Return the lifetime position that corresponds to the beginning of
38 : // the gap with the given index.
39 : static LifetimePosition GapFromInstructionIndex(int index) {
40 244492592 : return LifetimePosition(index * kStep);
41 : }
42 : // Return the lifetime position that corresponds to the beginning of
43 : // the instruction with the given index.
44 : static LifetimePosition InstructionFromInstructionIndex(int index) {
45 262417164 : return LifetimePosition(index * kStep + kHalfStep);
46 : }
47 :
48 : static bool ExistsGapPositionBetween(LifetimePosition pos1,
49 : LifetimePosition pos2) {
50 2449806 : if (pos1 > pos2) std::swap(pos1, pos2);
51 2449806 : LifetimePosition next(pos1.value_ + 1);
52 2449806 : if (next.IsGapPosition()) return next < pos2;
53 : return next.NextFullStart() < pos2;
54 : }
55 :
56 : // Returns a numeric representation of this lifetime position.
57 0 : int value() const { return value_; }
58 :
59 : // Returns the index of the instruction to which this lifetime position
60 : // corresponds.
61 : int ToInstructionIndex() const {
62 : DCHECK(IsValid());
63 931254809 : return value_ / kStep;
64 : }
65 :
66 : // Returns true if this lifetime position corresponds to a START value
67 39400973 : bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
68 : // Returns true if this lifetime position corresponds to an END value
69 : bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
70 : // Returns true if this lifetime position corresponds to a gap START value
71 23118012 : bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
72 :
73 170377807 : bool IsGapPosition() const { return (value_ & 0x2) == 0; }
74 : bool IsInstructionPosition() const { return !IsGapPosition(); }
75 :
76 : // Returns the lifetime position for the current START.
77 : LifetimePosition Start() const {
78 : DCHECK(IsValid());
79 357104898 : return LifetimePosition(value_ & ~(kHalfStep - 1));
80 : }
81 :
82 : // Returns the lifetime position for the current gap START.
83 : LifetimePosition FullStart() const {
84 : DCHECK(IsValid());
85 35048796 : return LifetimePosition(value_ & ~(kStep - 1));
86 : }
87 :
88 : // Returns the lifetime position for the current END.
89 : LifetimePosition End() const {
90 : DCHECK(IsValid());
91 226957781 : return LifetimePosition(Start().value_ + kHalfStep / 2);
92 : }
93 :
94 : // Returns the lifetime position for the beginning of the next START.
95 : LifetimePosition NextStart() const {
96 : DCHECK(IsValid());
97 52574098 : return LifetimePosition(Start().value_ + kHalfStep);
98 : }
99 :
100 : // Returns the lifetime position for the beginning of the next gap START.
101 : LifetimePosition NextFullStart() const {
102 : DCHECK(IsValid());
103 35292333 : return LifetimePosition(FullStart().value_ + kStep);
104 : }
105 :
106 : // Returns the lifetime position for the beginning of the previous START.
107 : LifetimePosition PrevStart() const {
108 : DCHECK(IsValid());
109 : DCHECK_LE(kHalfStep, value_);
110 67193161 : return LifetimePosition(Start().value_ - kHalfStep);
111 : }
112 :
113 : // Constructs the lifetime position which does not correspond to any
114 : // instruction.
115 1692022234 : LifetimePosition() : value_(-1) {}
116 :
117 : // Returns true if this lifetime positions corrensponds to some
118 : // instruction.
119 993367023 : bool IsValid() const { return value_ != -1; }
120 :
121 : bool operator<(const LifetimePosition& that) const {
122 3205130886 : return this->value_ < that.value_;
123 : }
124 :
125 : bool operator<=(const LifetimePosition& that) const {
126 1177301609 : return this->value_ <= that.value_;
127 : }
128 :
129 : bool operator==(const LifetimePosition& that) const {
130 21665258 : return this->value_ == that.value_;
131 : }
132 :
133 : bool operator!=(const LifetimePosition& that) const {
134 : return this->value_ != that.value_;
135 : }
136 :
137 : bool operator>(const LifetimePosition& that) const {
138 13491139 : return this->value_ > that.value_;
139 : }
140 :
141 : bool operator>=(const LifetimePosition& that) const {
142 133293551 : return this->value_ >= that.value_;
143 : }
144 :
145 : void Print() const;
146 :
147 : static inline LifetimePosition Invalid() { return LifetimePosition(); }
148 :
149 : static inline LifetimePosition MaxPosition() {
150 : // We have to use this kind of getter instead of static member due to
151 : // crash bug in GDB.
152 : return LifetimePosition(kMaxInt);
153 : }
154 :
155 : static inline LifetimePosition FromInt(int value) {
156 : return LifetimePosition(value);
157 : }
158 :
159 : private:
160 : static const int kHalfStep = 2;
161 : static const int kStep = 2 * kHalfStep;
162 :
163 : static_assert(base::bits::IsPowerOfTwo(kHalfStep),
164 : "Code relies on kStep and kHalfStep being a power of two");
165 :
166 : explicit LifetimePosition(int value) : value_(value) {}
167 :
168 : int value_;
169 : };
170 :
171 : std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
172 :
173 : // Representation of the non-empty interval [start,end[.
174 : class UseInterval final : public ZoneObject {
175 : public:
176 : UseInterval(LifetimePosition start, LifetimePosition end)
177 263501635 : : start_(start), end_(end), next_(nullptr) {
178 : DCHECK(start < end);
179 : }
180 :
181 : LifetimePosition start() const { return start_; }
182 245258824 : void set_start(LifetimePosition start) { start_ = start; }
183 : LifetimePosition end() const { return end_; }
184 62190728 : void set_end(LifetimePosition end) { end_ = end; }
185 : UseInterval* next() const { return next_; }
186 185057341 : void set_next(UseInterval* next) { next_ = next; }
187 :
188 : // Split this interval at the given position without effecting the
189 : // live range that owns it. The interval must contain the position.
190 : UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
191 :
192 : // If this interval intersects with other return smallest position
193 : // that belongs to both of them.
194 811701798 : LifetimePosition Intersect(const UseInterval* other) const {
195 811701798 : if (other->start() < start_) return other->Intersect(this);
196 606084360 : if (other->start() < end_) return other->start();
197 : return LifetimePosition::Invalid();
198 : }
199 :
200 : bool Contains(LifetimePosition point) const {
201 804321646 : return start_ <= point && point < end_;
202 : }
203 :
204 : // Returns the index of the first gap covered by this interval.
205 : int FirstGapIndex() const {
206 : int ret = start_.ToInstructionIndex();
207 74263538 : if (start_.IsInstructionPosition()) {
208 43719630 : ++ret;
209 : }
210 : return ret;
211 : }
212 :
213 : // Returns the index of the last gap covered by this interval.
214 : int LastGapIndex() const {
215 : int ret = end_.ToInstructionIndex();
216 59276722 : if (end_.IsGapPosition() && end_.IsStart()) {
217 8497350 : --ret;
218 : }
219 : return ret;
220 : }
221 :
222 : private:
223 : LifetimePosition start_;
224 : LifetimePosition end_;
225 : UseInterval* next_;
226 :
227 : DISALLOW_COPY_AND_ASSIGN(UseInterval);
228 : };
229 :
230 : enum class UsePositionType : uint8_t {
231 : kRegisterOrSlot,
232 : kRegisterOrSlotOrConstant,
233 : kRequiresRegister,
234 : kRequiresSlot
235 : };
236 :
237 : enum class UsePositionHintType : uint8_t {
238 : kNone,
239 : kOperand,
240 : kUsePos,
241 : kPhi,
242 : kUnresolved
243 : };
244 :
245 : static const int32_t kUnassignedRegister = RegisterConfiguration::kMaxRegisters;
246 :
247 : // Representation of a use position.
248 : class V8_EXPORT_PRIVATE UsePosition final
249 : : public NON_EXPORTED_BASE(ZoneObject) {
250 : public:
251 : UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
252 : UsePositionHintType hint_type);
253 :
254 : InstructionOperand* operand() const { return operand_; }
255 : bool HasOperand() const { return operand_ != nullptr; }
256 :
257 : bool RegisterIsBeneficial() const {
258 : return RegisterBeneficialField::decode(flags_);
259 : }
260 : UsePositionType type() const { return TypeField::decode(flags_); }
261 : void set_type(UsePositionType type, bool register_beneficial);
262 :
263 : LifetimePosition pos() const { return pos_; }
264 :
265 : UsePosition* next() const { return next_; }
266 147385784 : void set_next(UsePosition* next) { next_ = next; }
267 :
268 : // For hinting only.
269 : void set_assigned_register(int register_code) {
270 86929507 : flags_ = AssignedRegisterField::update(flags_, register_code);
271 : }
272 :
273 : UsePositionHintType hint_type() const {
274 : return HintTypeField::decode(flags_);
275 : }
276 : bool HasHint() const;
277 : bool HintRegister(int* register_code) const;
278 : void SetHint(UsePosition* use_pos);
279 : void ResolveHint(UsePosition* use_pos);
280 0 : bool IsResolved() const {
281 : return hint_type() != UsePositionHintType::kUnresolved;
282 : }
283 : static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
284 :
285 : private:
286 : typedef BitField<UsePositionType, 0, 2> TypeField;
287 : typedef BitField<UsePositionHintType, 2, 3> HintTypeField;
288 : typedef BitField<bool, 5, 1> RegisterBeneficialField;
289 : typedef BitField<int32_t, 6, 6> AssignedRegisterField;
290 :
291 : InstructionOperand* const operand_;
292 : void* hint_;
293 : UsePosition* next_;
294 : LifetimePosition const pos_;
295 : uint32_t flags_;
296 :
297 : DISALLOW_COPY_AND_ASSIGN(UsePosition);
298 : };
299 :
300 : class SpillRange;
301 : class RegisterAllocationData;
302 : class TopLevelLiveRange;
303 : class LiveRangeBundle;
304 :
305 : // Representation of SSA values' live ranges as a collection of (continuous)
306 : // intervals over the instruction ordering.
307 : class V8_EXPORT_PRIVATE LiveRange : public NON_EXPORTED_BASE(ZoneObject) {
308 : public:
309 : UseInterval* first_interval() const { return first_interval_; }
310 : UsePosition* first_pos() const { return first_pos_; }
311 : TopLevelLiveRange* TopLevel() { return top_level_; }
312 : const TopLevelLiveRange* TopLevel() const { return top_level_; }
313 :
314 : bool IsTopLevel() const;
315 :
316 : LiveRange* next() const { return next_; }
317 :
318 : int relative_id() const { return relative_id_; }
319 :
320 765975704 : bool IsEmpty() const { return first_interval() == nullptr; }
321 :
322 : InstructionOperand GetAssignedOperand() const;
323 :
324 : MachineRepresentation representation() const {
325 : return RepresentationField::decode(bits_);
326 : }
327 :
328 : int assigned_register() const { return AssignedRegisterField::decode(bits_); }
329 180150395 : bool HasRegisterAssigned() const {
330 : return assigned_register() != kUnassignedRegister;
331 : }
332 : void set_assigned_register(int reg);
333 : void UnsetAssignedRegister();
334 :
335 : bool ShouldRecombine() const { return RecombineField::decode(bits_); }
336 :
337 0 : void SetRecombine() { bits_ = RecombineField::update(bits_, true); }
338 : bool spilled() const { return SpilledField::decode(bits_); }
339 : void AttachToNext();
340 : void Unspill();
341 : void Spill();
342 :
343 : RegisterKind kind() const;
344 :
345 : // Returns use position in this live range that follows both start
346 : // and last processed use position.
347 : UsePosition* NextUsePosition(LifetimePosition start) const;
348 :
349 : // Returns use position for which register is required in this live
350 : // range and which follows both start and last processed use position
351 : UsePosition* NextRegisterPosition(LifetimePosition start) const;
352 :
353 : // Returns the first use position requiring stack slot, or nullptr.
354 : UsePosition* NextSlotPosition(LifetimePosition start) const;
355 :
356 : // Returns use position for which register is beneficial in this live
357 : // range and which follows both start and last processed use position
358 : UsePosition* NextUsePositionRegisterIsBeneficial(
359 : LifetimePosition start) const;
360 :
361 : // Returns lifetime position for which register is beneficial in this live
362 : // range and which follows both start and last processed use position.
363 : LifetimePosition NextLifetimePositionRegisterIsBeneficial(
364 : const LifetimePosition& start) const;
365 :
366 : // Returns use position for which register is beneficial in this live
367 : // range and which precedes start.
368 : UsePosition* PreviousUsePositionRegisterIsBeneficial(
369 : LifetimePosition start) const;
370 :
371 : // Can this live range be spilled at this position.
372 : bool CanBeSpilled(LifetimePosition pos) const;
373 :
374 : // Splitting primitive used by both splitting and splintering members.
375 : // Performs the split, but does not link the resulting ranges.
376 : // The given position must follow the start of the range.
377 : // All uses following the given position will be moved from this
378 : // live range to the result live range.
379 : // The current range will terminate at position, while result will start from
380 : // position.
381 : enum HintConnectionOption : bool {
382 : DoNotConnectHints = false,
383 : ConnectHints = true
384 : };
385 : UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
386 : Zone* zone, HintConnectionOption connect_hints);
387 :
388 : // Detaches at position, and then links the resulting ranges. Returns the
389 : // child, which starts at position.
390 : LiveRange* SplitAt(LifetimePosition position, Zone* zone);
391 :
392 : // Returns nullptr when no register is hinted, otherwise sets register_index.
393 : UsePosition* FirstHintPosition(int* register_index) const;
394 : UsePosition* FirstHintPosition() const {
395 : int register_index;
396 644614 : return FirstHintPosition(®ister_index);
397 : }
398 :
399 : UsePosition* current_hint_position() const {
400 : DCHECK(current_hint_position_ == FirstHintPosition());
401 : return current_hint_position_;
402 : }
403 :
404 1289077039 : LifetimePosition Start() const {
405 : DCHECK(!IsEmpty());
406 : return first_interval()->start();
407 : }
408 :
409 : LifetimePosition End() const {
410 : DCHECK(!IsEmpty());
411 : return last_interval_->end();
412 : }
413 :
414 : bool ShouldBeAllocatedBefore(const LiveRange* other) const;
415 : bool CanCover(LifetimePosition position) const;
416 : bool Covers(LifetimePosition position) const;
417 : LifetimePosition NextStartAfter(LifetimePosition position) const;
418 : LifetimePosition NextEndAfter(LifetimePosition position) const;
419 : LifetimePosition FirstIntersection(LiveRange* other) const;
420 :
421 : void VerifyChildStructure() const {
422 : VerifyIntervals();
423 0 : VerifyPositions();
424 : }
425 :
426 : void ConvertUsesToOperand(const InstructionOperand& op,
427 : const InstructionOperand& spill_op);
428 : void SetUseHints(int register_index);
429 : void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
430 :
431 : void Print(const RegisterConfiguration* config, bool with_children) const;
432 : void Print(bool with_children) const;
433 :
434 46217229 : void set_bundle(LiveRangeBundle* bundle) { bundle_ = bundle; }
435 : LiveRangeBundle* get_bundle() const { return bundle_; }
436 : bool RegisterFromBundle(int* hint) const;
437 : void UpdateBundleRegister(int reg) const;
438 :
439 : private:
440 : friend class TopLevelLiveRange;
441 : explicit LiveRange(int relative_id, MachineRepresentation rep,
442 : TopLevelLiveRange* top_level);
443 :
444 : void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
445 :
446 83913878 : void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
447 :
448 : UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
449 : void AdvanceLastProcessedMarker(UseInterval* to_start_of,
450 : LifetimePosition but_not_past) const;
451 :
452 : void VerifyPositions() const;
453 : void VerifyIntervals() const;
454 :
455 : typedef BitField<bool, 0, 1> SpilledField;
456 : typedef BitField<int32_t, 6, 6> AssignedRegisterField;
457 : typedef BitField<MachineRepresentation, 12, 8> RepresentationField;
458 : typedef BitField<bool, 20, 1> RecombineField;
459 :
460 : // Unique among children and splinters of the same virtual register.
461 : int relative_id_;
462 : uint32_t bits_;
463 : UseInterval* last_interval_;
464 : UseInterval* first_interval_;
465 : UsePosition* first_pos_;
466 : TopLevelLiveRange* top_level_;
467 : LiveRange* next_;
468 : // This is used as a cache, it doesn't affect correctness.
469 : mutable UseInterval* current_interval_;
470 : // This is used as a cache, it doesn't affect correctness.
471 : mutable UsePosition* last_processed_use_;
472 : // This is used as a cache, it's invalid outside of BuildLiveRanges.
473 : mutable UsePosition* current_hint_position_;
474 : // Cache the last position splintering stopped at.
475 : mutable UsePosition* splitting_pointer_;
476 : LiveRangeBundle* bundle_ = nullptr;
477 :
478 : DISALLOW_COPY_AND_ASSIGN(LiveRange);
479 : };
480 :
481 : struct LiveRangeOrdering {
482 : bool operator()(const LiveRange* left, const LiveRange* right) const {
483 : return left->Start() < right->Start();
484 : }
485 : };
486 : class LiveRangeBundle : public ZoneObject {
487 : public:
488 : void MergeSpillRanges();
489 :
490 : int id() { return id_; }
491 :
492 : int reg() { return reg_; }
493 :
494 : void set_reg(int reg) {
495 : DCHECK_EQ(reg_, kUnassignedRegister);
496 1557990 : reg_ = reg;
497 : }
498 :
499 : private:
500 : friend class BundleBuilder;
501 :
502 : class Range {
503 : public:
504 : Range(int s, int e) : start(s), end(e) {}
505 : Range(LifetimePosition s, LifetimePosition e)
506 7676904 : : start(s.value()), end(e.value()) {}
507 : int start;
508 : int end;
509 : };
510 :
511 : struct RangeOrdering {
512 : bool operator()(const Range left, const Range right) const {
513 15462236 : return left.start < right.start;
514 : }
515 : };
516 11706627 : bool UsesOverlap(UseInterval* interval) {
517 : auto use = uses_.begin();
518 37869331 : while (interval != nullptr && use != uses_.end()) {
519 12235006 : if (use->end <= interval->start().value()) {
520 : ++use;
521 6114108 : } else if (interval->end().value() <= use->start) {
522 : interval = interval->next();
523 : } else {
524 : return true;
525 : }
526 : }
527 : return false;
528 : }
529 13411397 : void InsertUses(UseInterval* interval) {
530 19145882 : while (interval != nullptr) {
531 7676912 : auto done = uses_.insert({interval->start(), interval->end()});
532 : USE(done);
533 : DCHECK_EQ(done.second, 1);
534 : interval = interval->next();
535 : }
536 5734493 : }
537 : explicit LiveRangeBundle(Zone* zone, int id)
538 1589657 : : ranges_(zone), uses_(zone), id_(id) {}
539 :
540 : bool TryAddRange(LiveRange* range);
541 : bool TryMerge(LiveRangeBundle* other);
542 :
543 : ZoneSet<LiveRange*, LiveRangeOrdering> ranges_;
544 : ZoneSet<Range, RangeOrdering> uses_;
545 : int id_;
546 : int reg_ = kUnassignedRegister;
547 : };
548 :
549 : class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange {
550 : public:
551 : explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
552 : int spill_start_index() const { return spill_start_index_; }
553 :
554 16859853 : bool IsFixed() const { return vreg_ < 0; }
555 :
556 : bool is_phi() const { return IsPhiField::decode(bits_); }
557 4197728 : void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
558 :
559 : bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
560 : void set_is_non_loop_phi(bool value) {
561 2098864 : bits_ = IsNonLoopPhiField::update(bits_, value);
562 : }
563 :
564 : bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
565 : void set_has_slot_use(bool value) {
566 44027044 : bits_ = HasSlotUseField::update(bits_, value);
567 : }
568 :
569 : // Add a new interval or a new use position to this live range.
570 : void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
571 : void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
572 : void AddUsePosition(UsePosition* pos);
573 :
574 : // Shorten the most recently added interval by setting a new start.
575 : void ShortenTo(LifetimePosition start);
576 :
577 : // Detaches between start and end, and attributes the resulting range to
578 : // result.
579 : // The current range is pointed to as "splintered_from". No parent/child
580 : // relationship is established between this and result.
581 : void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone);
582 :
583 : // Assuming other was splintered from this range, embeds other and its
584 : // children as part of the children sequence of this range.
585 : void Merge(TopLevelLiveRange* other, Zone* zone);
586 :
587 : // Spill range management.
588 : void SetSpillRange(SpillRange* spill_range);
589 : enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
590 : void set_spill_type(SpillType value) {
591 50212959 : bits_ = SpillTypeField::update(bits_, value);
592 : }
593 : SpillType spill_type() const { return SpillTypeField::decode(bits_); }
594 : InstructionOperand* GetSpillOperand() const {
595 : DCHECK_EQ(SpillType::kSpillOperand, spill_type());
596 : return spill_operand_;
597 : }
598 :
599 : SpillRange* GetAllocatedSpillRange() const {
600 : DCHECK_NE(SpillType::kSpillOperand, spill_type());
601 : return spill_range_;
602 : }
603 :
604 : SpillRange* GetSpillRange() const {
605 : DCHECK_EQ(SpillType::kSpillRange, spill_type());
606 : return spill_range_;
607 : }
608 71319168 : bool HasNoSpillType() const {
609 : return spill_type() == SpillType::kNoSpillType;
610 : }
611 171587132 : bool HasSpillOperand() const {
612 : return spill_type() == SpillType::kSpillOperand;
613 : }
614 112255772 : bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
615 :
616 : AllocatedOperand GetSpillRangeOperand() const;
617 :
618 : void RecordSpillLocation(Zone* zone, int gap_index,
619 : InstructionOperand* operand);
620 : void SetSpillOperand(InstructionOperand* operand);
621 : void SetSpillStartIndex(int start) {
622 74425092 : spill_start_index_ = Min(start, spill_start_index_);
623 : }
624 :
625 : void CommitSpillMoves(InstructionSequence* sequence,
626 : const InstructionOperand& operand,
627 : bool might_be_duplicated);
628 :
629 : // If all the children of this range are spilled in deferred blocks, and if
630 : // for any non-spilled child with a use position requiring a slot, that range
631 : // is contained in a deferred block, mark the range as
632 : // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
633 : // and instead let the LiveRangeConnector perform the spills within the
634 : // deferred blocks. If so, we insert here spills for non-spilled ranges
635 : // with slot use positions.
636 879133 : void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
637 879133 : spill_start_index_ = -1;
638 879133 : spilled_in_deferred_blocks_ = true;
639 879133 : spill_move_insertion_locations_ = nullptr;
640 : list_of_blocks_requiring_spill_operands_ =
641 879135 : new (zone) BitVector(total_block_count, zone);
642 879129 : }
643 :
644 : void CommitSpillInDeferredBlocks(RegisterAllocationData* data,
645 : const InstructionOperand& spill_operand,
646 : BitVector* necessary_spill_points);
647 :
648 : TopLevelLiveRange* splintered_from() const { return splintered_from_; }
649 : bool IsSplinter() const { return splintered_from_ != nullptr; }
650 : bool MayRequireSpillRange() const {
651 : DCHECK(!IsSplinter());
652 13570389 : return !HasSpillOperand() && spill_range_ == nullptr;
653 : }
654 : void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
655 : int vreg() const { return vreg_; }
656 :
657 : #if DEBUG
658 : int debug_virt_reg() const;
659 : #endif
660 :
661 : void Verify() const;
662 : void VerifyChildrenInOrder() const;
663 : LiveRange* GetChildCovers(LifetimePosition pos);
664 52773371 : int GetNextChildId() {
665 : return IsSplinter() ? splintered_from()->GetNextChildId()
666 52773371 : : ++last_child_id_;
667 : }
668 :
669 6798199 : int GetMaxChildCount() const { return last_child_id_ + 1; }
670 :
671 : bool IsSpilledOnlyInDeferredBlocks() const {
672 : return spilled_in_deferred_blocks_;
673 : }
674 :
675 : struct SpillMoveInsertionList;
676 :
677 : SpillMoveInsertionList* GetSpillMoveInsertionLocations() const {
678 : DCHECK(!IsSpilledOnlyInDeferredBlocks());
679 : return spill_move_insertion_locations_;
680 : }
681 : TopLevelLiveRange* splinter() const { return splinter_; }
682 10738790 : void SetSplinter(TopLevelLiveRange* splinter) {
683 : DCHECK_NULL(splinter_);
684 : DCHECK_NOT_NULL(splinter);
685 :
686 5369395 : splinter_ = splinter;
687 5369395 : splinter->relative_id_ = GetNextChildId();
688 : splinter->set_spill_type(spill_type());
689 5369395 : splinter->SetSplinteredFrom(this);
690 5369401 : if (bundle_ != nullptr) splinter->set_bundle(bundle_);
691 5369401 : }
692 :
693 73043 : void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
694 : bool has_preassigned_slot() const { return has_preassigned_slot_; }
695 :
696 1064574 : void AddBlockRequiringSpillOperand(RpoNumber block_id) {
697 : DCHECK(IsSpilledOnlyInDeferredBlocks());
698 : GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt());
699 532287 : }
700 :
701 : BitVector* GetListOfBlocksRequiringSpillOperands() const {
702 : DCHECK(IsSpilledOnlyInDeferredBlocks());
703 : return list_of_blocks_requiring_spill_operands_;
704 : }
705 :
706 : private:
707 : friend class LiveRange;
708 : void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
709 :
710 : typedef BitField<bool, 1, 1> HasSlotUseField;
711 : typedef BitField<bool, 2, 1> IsPhiField;
712 : typedef BitField<bool, 3, 1> IsNonLoopPhiField;
713 : typedef BitField<SpillType, 4, 2> SpillTypeField;
714 :
715 : int vreg_;
716 : int last_child_id_;
717 : TopLevelLiveRange* splintered_from_;
718 : union {
719 : // Correct value determined by spill_type()
720 : InstructionOperand* spill_operand_;
721 : SpillRange* spill_range_;
722 : };
723 :
724 : union {
725 : SpillMoveInsertionList* spill_move_insertion_locations_;
726 : BitVector* list_of_blocks_requiring_spill_operands_;
727 : };
728 :
729 : // TODO(mtrofin): generalize spilling after definition, currently specialized
730 : // just for spill in a single deferred block.
731 : bool spilled_in_deferred_blocks_;
732 : int spill_start_index_;
733 : UsePosition* last_pos_;
734 : LiveRange* last_child_covers_;
735 : TopLevelLiveRange* splinter_;
736 : bool has_preassigned_slot_;
737 :
738 : DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
739 : };
740 :
741 : struct PrintableLiveRange {
742 : const RegisterConfiguration* register_configuration_;
743 : const LiveRange* range_;
744 : };
745 :
746 : std::ostream& operator<<(std::ostream& os,
747 : const PrintableLiveRange& printable_range);
748 :
749 : class SpillRange final : public ZoneObject {
750 : public:
751 : static const int kUnassignedSlot = -1;
752 : SpillRange(TopLevelLiveRange* range, Zone* zone);
753 :
754 : UseInterval* interval() const { return use_interval_; }
755 :
756 : bool IsEmpty() const { return live_ranges_.empty(); }
757 : bool TryMerge(SpillRange* other);
758 : bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
759 :
760 : void set_assigned_slot(int index) {
761 : DCHECK_EQ(kUnassignedSlot, assigned_slot_);
762 2793626 : assigned_slot_ = index;
763 : }
764 : int assigned_slot() {
765 : DCHECK_NE(kUnassignedSlot, assigned_slot_);
766 : return assigned_slot_;
767 : }
768 : const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
769 : return live_ranges_;
770 : }
771 : ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
772 : // Spill slots can be 4, 8, or 16 bytes wide.
773 : int byte_width() const { return byte_width_; }
774 : void Print() const;
775 :
776 : private:
777 : LifetimePosition End() const { return end_position_; }
778 : bool IsIntersectingWith(SpillRange* other) const;
779 : // Merge intervals, making sure the use intervals are sorted
780 : void MergeDisjointIntervals(UseInterval* other);
781 :
782 : ZoneVector<TopLevelLiveRange*> live_ranges_;
783 : UseInterval* use_interval_;
784 : LifetimePosition end_position_;
785 : int assigned_slot_;
786 : int byte_width_;
787 :
788 : DISALLOW_COPY_AND_ASSIGN(SpillRange);
789 : };
790 :
791 : class RegisterAllocationData final : public ZoneObject {
792 : public:
793 : class PhiMapValue : public ZoneObject {
794 : public:
795 : PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
796 :
797 : const PhiInstruction* phi() const { return phi_; }
798 : const InstructionBlock* block() const { return block_; }
799 :
800 : // For hinting.
801 : int assigned_register() const { return assigned_register_; }
802 : void set_assigned_register(int register_code) {
803 : DCHECK_EQ(assigned_register_, kUnassignedRegister);
804 1990674 : assigned_register_ = register_code;
805 : }
806 : void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
807 :
808 : void AddOperand(InstructionOperand* operand);
809 : void CommitAssignment(const InstructionOperand& operand);
810 :
811 : private:
812 : PhiInstruction* const phi_;
813 : const InstructionBlock* const block_;
814 : ZoneVector<InstructionOperand*> incoming_operands_;
815 : int assigned_register_;
816 : };
817 : typedef ZoneMap<int, PhiMapValue*> PhiMap;
818 :
819 : struct DelayedReference {
820 : ReferenceMap* map;
821 : InstructionOperand* operand;
822 : };
823 : typedef ZoneVector<DelayedReference> DelayedReferences;
824 : typedef ZoneVector<std::pair<TopLevelLiveRange*, int>>
825 : RangesWithPreassignedSlots;
826 :
827 : RegisterAllocationData(const RegisterConfiguration* config,
828 : Zone* allocation_zone, Frame* frame,
829 : InstructionSequence* code,
830 : const char* debug_name = nullptr);
831 :
832 : const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
833 : return live_ranges_;
834 : }
835 : ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
836 : const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
837 : return fixed_live_ranges_;
838 : }
839 : ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
840 : return fixed_live_ranges_;
841 : }
842 : ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() {
843 : return fixed_float_live_ranges_;
844 : }
845 : const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() const {
846 : return fixed_float_live_ranges_;
847 : }
848 : ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
849 : return fixed_double_live_ranges_;
850 : }
851 : const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
852 : return fixed_double_live_ranges_;
853 : }
854 : ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() {
855 : return fixed_simd128_live_ranges_;
856 : }
857 : const ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() const {
858 : return fixed_simd128_live_ranges_;
859 : }
860 : ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
861 : ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
862 : ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
863 : DelayedReferences& delayed_references() { return delayed_references_; }
864 : InstructionSequence* code() const { return code_; }
865 : // This zone is for data structures only needed during register allocation
866 : // phases.
867 : Zone* allocation_zone() const { return allocation_zone_; }
868 : // This zone is for InstructionOperands and moves that live beyond register
869 : // allocation.
870 48174639 : Zone* code_zone() const { return code()->zone(); }
871 : Frame* frame() const { return frame_; }
872 : const char* debug_name() const { return debug_name_; }
873 : const RegisterConfiguration* config() const { return config_; }
874 :
875 : MachineRepresentation RepresentationFor(int virtual_register);
876 :
877 : TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
878 : // Creates a new live range.
879 : TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
880 : TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
881 :
882 : SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
883 : SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
884 :
885 : MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
886 : const InstructionOperand& from,
887 : const InstructionOperand& to);
888 :
889 37741539 : bool IsReference(TopLevelLiveRange* top_range) const {
890 : return code()->IsReference(top_range->vreg());
891 : }
892 :
893 : bool ExistsUseWithoutDefinition();
894 : bool RangesDefinedInDeferredStayInDeferred();
895 :
896 : void MarkFixedUse(MachineRepresentation rep, int index);
897 : bool HasFixedUse(MachineRepresentation rep, int index);
898 :
899 : void MarkAllocated(MachineRepresentation rep, int index);
900 :
901 : PhiMapValue* InitializePhiMap(const InstructionBlock* block,
902 : PhiInstruction* phi);
903 : PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
904 : PhiMapValue* GetPhiMapValueFor(int virtual_register);
905 : bool IsBlockBoundary(LifetimePosition pos) const;
906 :
907 : RangesWithPreassignedSlots& preassigned_slot_ranges() {
908 : return preassigned_slot_ranges_;
909 : }
910 :
911 : void RememberSpillState(RpoNumber block,
912 : const ZoneVector<LiveRange*>& state) {
913 0 : spill_state_[block.ToSize()] = state;
914 : }
915 :
916 : ZoneVector<LiveRange*>& GetSpillState(RpoNumber block) {
917 0 : auto& result = spill_state_[block.ToSize()];
918 : return result;
919 : }
920 :
921 : void ResetSpillState() { spill_state_.clear(); }
922 :
923 : private:
924 : int GetNextLiveRangeId();
925 :
926 : Zone* const allocation_zone_;
927 : Frame* const frame_;
928 : InstructionSequence* const code_;
929 : const char* const debug_name_;
930 : const RegisterConfiguration* const config_;
931 : PhiMap phi_map_;
932 : ZoneVector<BitVector*> live_in_sets_;
933 : ZoneVector<BitVector*> live_out_sets_;
934 : ZoneVector<TopLevelLiveRange*> live_ranges_;
935 : ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
936 : ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_;
937 : ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
938 : ZoneVector<TopLevelLiveRange*> fixed_simd128_live_ranges_;
939 : ZoneVector<SpillRange*> spill_ranges_;
940 : DelayedReferences delayed_references_;
941 : BitVector* assigned_registers_;
942 : BitVector* assigned_double_registers_;
943 : BitVector* fixed_register_use_;
944 : BitVector* fixed_fp_register_use_;
945 : int virtual_register_count_;
946 : RangesWithPreassignedSlots preassigned_slot_ranges_;
947 : ZoneVector<ZoneVector<LiveRange*>> spill_state_;
948 :
949 : DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
950 : };
951 :
952 : class ConstraintBuilder final : public ZoneObject {
953 : public:
954 : explicit ConstraintBuilder(RegisterAllocationData* data);
955 :
956 : // Phase 1 : insert moves to account for fixed register operands.
957 : void MeetRegisterConstraints();
958 :
959 : // Phase 2: deconstruct SSA by inserting moves in successors and the headers
960 : // of blocks containing phis.
961 : void ResolvePhis();
962 :
963 : private:
964 329310563 : RegisterAllocationData* data() const { return data_; }
965 189759459 : InstructionSequence* code() const { return data()->code(); }
966 23592128 : Zone* allocation_zone() const { return data()->allocation_zone(); }
967 :
968 : InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
969 : bool is_tagged, bool is_input);
970 : void MeetRegisterConstraints(const InstructionBlock* block);
971 : void MeetConstraintsBefore(int index);
972 : void MeetConstraintsAfter(int index);
973 : void MeetRegisterConstraintsForLastInstructionInBlock(
974 : const InstructionBlock* block);
975 : void ResolvePhis(const InstructionBlock* block);
976 :
977 : RegisterAllocationData* const data_;
978 :
979 : DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder);
980 : };
981 :
982 : class LiveRangeBuilder final : public ZoneObject {
983 : public:
984 : explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone);
985 :
986 : // Phase 3: compute liveness of all virtual register.
987 : void BuildLiveRanges();
988 : static BitVector* ComputeLiveOut(const InstructionBlock* block,
989 : RegisterAllocationData* data);
990 :
991 : private:
992 : RegisterAllocationData* data() const { return data_; }
993 89198975 : InstructionSequence* code() const { return data()->code(); }
994 497119068 : Zone* allocation_zone() const { return data()->allocation_zone(); }
995 : Zone* code_zone() const { return code()->zone(); }
996 187909462 : const RegisterConfiguration* config() const { return data()->config(); }
997 23244854 : ZoneVector<BitVector*>& live_in_sets() const {
998 : return data()->live_in_sets();
999 : }
1000 :
1001 : // Verification.
1002 : void Verify() const;
1003 : bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
1004 : bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
1005 : const TopLevelLiveRange* range) const;
1006 : bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
1007 :
1008 : // Liveness analysis support.
1009 : void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
1010 : void ProcessInstructions(const InstructionBlock* block, BitVector* live);
1011 : void ProcessPhis(const InstructionBlock* block, BitVector* live);
1012 : void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
1013 :
1014 18532416 : static int FixedLiveRangeID(int index) { return -index - 1; }
1015 : int FixedFPLiveRangeID(int index, MachineRepresentation rep);
1016 : TopLevelLiveRange* FixedLiveRangeFor(int index);
1017 : TopLevelLiveRange* FixedFPLiveRangeFor(int index, MachineRepresentation rep);
1018 :
1019 : void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
1020 : void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
1021 :
1022 : UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
1023 : void* hint, UsePositionHintType hint_type);
1024 : UsePosition* NewUsePosition(LifetimePosition pos) {
1025 2140794 : return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
1026 : }
1027 : TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
1028 : // Helper methods for building intervals.
1029 : UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
1030 : void* hint, UsePositionHintType hint_type);
1031 : void Define(LifetimePosition position, InstructionOperand* operand) {
1032 19667312 : Define(position, operand, nullptr, UsePositionHintType::kNone);
1033 : }
1034 : UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
1035 : InstructionOperand* operand, void* hint,
1036 : UsePositionHintType hint_type);
1037 : void Use(LifetimePosition block_start, LifetimePosition position,
1038 : InstructionOperand* operand) {
1039 70712410 : Use(block_start, position, operand, nullptr, UsePositionHintType::kNone);
1040 : }
1041 :
1042 : RegisterAllocationData* const data_;
1043 : ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
1044 :
1045 : DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
1046 : };
1047 :
1048 : class BundleBuilder final : public ZoneObject {
1049 : public:
1050 2141574 : explicit BundleBuilder(RegisterAllocationData* data) : data_(data) {}
1051 :
1052 : void BuildBundles();
1053 :
1054 : private:
1055 : RegisterAllocationData* data() const { return data_; }
1056 21582548 : InstructionSequence* code() const { return data_->code(); }
1057 : RegisterAllocationData* data_;
1058 : int next_bundle_id_ = 0;
1059 : };
1060 :
1061 : class RegisterAllocator : public ZoneObject {
1062 : public:
1063 : RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
1064 :
1065 : protected:
1066 : RegisterAllocationData* data() const { return data_; }
1067 27351916 : InstructionSequence* code() const { return data()->code(); }
1068 : RegisterKind mode() const { return mode_; }
1069 : int num_registers() const { return num_registers_; }
1070 : int num_allocatable_registers() const { return num_allocatable_registers_; }
1071 : const int* allocatable_register_codes() const {
1072 : return allocatable_register_codes_;
1073 : }
1074 : // Returns true iff. we must check float register aliasing.
1075 : bool check_fp_aliasing() const { return check_fp_aliasing_; }
1076 :
1077 : // TODO(mtrofin): explain why splitting in gap START is always OK.
1078 : LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
1079 : int instruction_index);
1080 :
1081 22643379 : Zone* allocation_zone() const { return data()->allocation_zone(); }
1082 :
1083 : // Find the optimal split for ranges defined by a memory operand, e.g.
1084 : // constants or function parameters passed on the stack.
1085 : void SplitAndSpillRangesDefinedByMemoryOperand();
1086 :
1087 : // Split the given range at the given position.
1088 : // If range starts at or after the given position then the
1089 : // original range is returned.
1090 : // Otherwise returns the live range that starts at pos and contains
1091 : // all uses from the original range that follow pos. Uses at pos will
1092 : // still be owned by the original range after splitting.
1093 : LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
1094 :
1095 120460812 : bool CanProcessRange(LiveRange* range) const {
1096 481953689 : return range != nullptr && !range->IsEmpty() && range->kind() == mode();
1097 : }
1098 :
1099 : // Split the given range in a position from the interval [start, end].
1100 : LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
1101 : LifetimePosition end);
1102 :
1103 : // Find a lifetime position in the interval [start, end] which
1104 : // is optimal for splitting: it is either header of the outermost
1105 : // loop covered by this interval or the latest possible position.
1106 : LifetimePosition FindOptimalSplitPos(LifetimePosition start,
1107 : LifetimePosition end);
1108 :
1109 : void Spill(LiveRange* range);
1110 :
1111 : // If we are trying to spill a range inside the loop try to
1112 : // hoist spill position out to the point just before the loop.
1113 : LifetimePosition FindOptimalSpillingPos(LiveRange* range,
1114 : LifetimePosition pos);
1115 :
1116 : const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
1117 : const char* RegisterName(int allocation_index) const;
1118 :
1119 : private:
1120 : RegisterAllocationData* const data_;
1121 : const RegisterKind mode_;
1122 : const int num_registers_;
1123 : int num_allocatable_registers_;
1124 : const int* allocatable_register_codes_;
1125 : bool check_fp_aliasing_;
1126 :
1127 : private:
1128 : bool no_combining_;
1129 :
1130 : DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
1131 : };
1132 :
1133 : class LinearScanAllocator final : public RegisterAllocator {
1134 : public:
1135 : LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
1136 : Zone* local_zone);
1137 :
1138 : // Phase 4: compute register assignments.
1139 : void AllocateRegisters();
1140 :
1141 : private:
1142 : struct RangeWithRegister {
1143 : TopLevelLiveRange* range;
1144 : int expected_register;
1145 : struct Hash {
1146 : size_t operator()(const RangeWithRegister item) const {
1147 0 : return item.range->vreg();
1148 : }
1149 : };
1150 : struct Equals {
1151 : bool operator()(const RangeWithRegister one,
1152 : const RangeWithRegister two) const {
1153 : return one.range == two.range;
1154 : }
1155 : };
1156 :
1157 0 : explicit RangeWithRegister(LiveRange* a_range)
1158 : : range(a_range->TopLevel()),
1159 0 : expected_register(a_range->assigned_register()) {}
1160 : RangeWithRegister(TopLevelLiveRange* toplevel, int reg)
1161 0 : : range(toplevel), expected_register(reg) {}
1162 : };
1163 :
1164 : using RangeWithRegisterSet =
1165 : ZoneUnorderedSet<RangeWithRegister, RangeWithRegister::Hash,
1166 : RangeWithRegister::Equals>;
1167 :
1168 : void MaybeUndoPreviousSplit(LiveRange* range);
1169 : void SpillNotLiveRanges(RangeWithRegisterSet& to_be_live,
1170 : LifetimePosition position);
1171 : LiveRange* AssignRegisterOnReload(LiveRange* range, int reg);
1172 : void ReloadLiveRanges(RangeWithRegisterSet& to_be_live,
1173 : LifetimePosition position);
1174 :
1175 : bool BlockOrImmediatePredecessorIsDeferred(const InstructionBlock* block);
1176 :
1177 : struct LiveRangeOrdering {
1178 : bool operator()(const LiveRange* a, const LiveRange* b) const {
1179 369846239 : return a->ShouldBeAllocatedBefore(b);
1180 : }
1181 : };
1182 : using LiveRangeQueue = ZoneMultiset<LiveRange*, LiveRangeOrdering>;
1183 : LiveRangeQueue& unhandled_live_ranges() { return unhandled_live_ranges_; }
1184 : ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
1185 : ZoneVector<LiveRange*>& inactive_live_ranges() {
1186 : return inactive_live_ranges_;
1187 : }
1188 :
1189 : void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
1190 :
1191 : // Helper methods for updating the life range lists.
1192 : void AddToActive(LiveRange* range);
1193 : void AddToInactive(LiveRange* range);
1194 : void AddToUnhandled(LiveRange* range);
1195 : ZoneVector<LiveRange*>::iterator ActiveToHandled(
1196 : ZoneVector<LiveRange*>::iterator it);
1197 : ZoneVector<LiveRange*>::iterator ActiveToInactive(
1198 : ZoneVector<LiveRange*>::iterator it, LifetimePosition position);
1199 : ZoneVector<LiveRange*>::iterator InactiveToHandled(
1200 : ZoneVector<LiveRange*>::iterator it);
1201 : ZoneVector<LiveRange*>::iterator InactiveToActive(
1202 : ZoneVector<LiveRange*>::iterator it, LifetimePosition position);
1203 :
1204 : void ForwardStateTo(LifetimePosition position);
1205 :
1206 : // Helper methods for choosing state after control flow events.
1207 :
1208 : bool ConsiderBlockForControlFlow(InstructionBlock* current_block,
1209 : RpoNumber predecessor);
1210 : RpoNumber ChooseOneOfTwoPredecessorStates(InstructionBlock* current_block,
1211 : LifetimePosition boundary);
1212 : void ComputeStateFromManyPredecessors(InstructionBlock* current_block,
1213 : RangeWithRegisterSet* to_be_live);
1214 :
1215 : // Helper methods for allocating registers.
1216 : bool TryReuseSpillForPhi(TopLevelLiveRange* range);
1217 : int PickRegisterThatIsAvailableLongest(
1218 : LiveRange* current, int hint_reg,
1219 : const Vector<LifetimePosition>& free_until_pos);
1220 : bool TryAllocateFreeReg(LiveRange* range,
1221 : const Vector<LifetimePosition>& free_until_pos);
1222 : bool TryAllocatePreferredReg(LiveRange* range,
1223 : const Vector<LifetimePosition>& free_until_pos);
1224 : void GetFPRegisterSet(MachineRepresentation rep, int* num_regs,
1225 : int* num_codes, const int** codes) const;
1226 : void FindFreeRegistersForRange(LiveRange* range,
1227 : Vector<LifetimePosition> free_until_pos);
1228 : void ProcessCurrentRange(LiveRange* current);
1229 : void AllocateBlockedReg(LiveRange* range);
1230 : bool TrySplitAndSpillSplinter(LiveRange* range);
1231 :
1232 : // Spill the given life range after position pos.
1233 : void SpillAfter(LiveRange* range, LifetimePosition pos);
1234 :
1235 : // Spill the given life range after position [start] and up to position [end].
1236 : void SpillBetween(LiveRange* range, LifetimePosition start,
1237 : LifetimePosition end);
1238 :
1239 : // Spill the given life range after position [start] and up to position [end].
1240 : // Range is guaranteed to be spilled at least until position [until].
1241 : void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
1242 : LifetimePosition until, LifetimePosition end);
1243 :
1244 : void SplitAndSpillIntersecting(LiveRange* range);
1245 :
1246 : void PrintRangeRow(std::ostream& os, const TopLevelLiveRange* toplevel);
1247 :
1248 : void PrintRangeOverview(std::ostream& os);
1249 :
1250 : LiveRangeQueue unhandled_live_ranges_;
1251 : ZoneVector<LiveRange*> active_live_ranges_;
1252 : ZoneVector<LiveRange*> inactive_live_ranges_;
1253 :
1254 : // Approximate at what position the set of ranges will change next.
1255 : // Used to avoid scanning for updates even if none are present.
1256 : LifetimePosition next_active_ranges_change_;
1257 : LifetimePosition next_inactive_ranges_change_;
1258 :
1259 : #ifdef DEBUG
1260 : LifetimePosition allocation_finger_;
1261 : #endif
1262 :
1263 : DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
1264 : };
1265 :
1266 : class SpillSlotLocator final : public ZoneObject {
1267 : public:
1268 : explicit SpillSlotLocator(RegisterAllocationData* data);
1269 :
1270 : void LocateSpillSlots();
1271 :
1272 : private:
1273 82855675 : RegisterAllocationData* data() const { return data_; }
1274 :
1275 : RegisterAllocationData* const data_;
1276 :
1277 : DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator);
1278 : };
1279 :
1280 : class OperandAssigner final : public ZoneObject {
1281 : public:
1282 : explicit OperandAssigner(RegisterAllocationData* data);
1283 :
1284 : // Phase 5: assign spill splots.
1285 : void AssignSpillSlots();
1286 :
1287 : // Phase 6: commit assignment.
1288 : void CommitAssignment();
1289 :
1290 : private:
1291 109315150 : RegisterAllocationData* data() const { return data_; }
1292 :
1293 : RegisterAllocationData* const data_;
1294 :
1295 : DISALLOW_COPY_AND_ASSIGN(OperandAssigner);
1296 : };
1297 :
1298 : class ReferenceMapPopulator final : public ZoneObject {
1299 : public:
1300 : explicit ReferenceMapPopulator(RegisterAllocationData* data);
1301 :
1302 : // Phase 7: compute values for pointer maps.
1303 : void PopulateReferenceMaps();
1304 :
1305 : private:
1306 84997116 : RegisterAllocationData* data() const { return data_; }
1307 :
1308 : bool SafePointsAreInOrder() const;
1309 :
1310 : RegisterAllocationData* const data_;
1311 :
1312 : DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
1313 : };
1314 :
1315 : class LiveRangeBoundArray;
1316 : // Insert moves of the form
1317 : //
1318 : // Operand(child_(k+1)) = Operand(child_k)
1319 : //
1320 : // where child_k and child_(k+1) are consecutive children of a range (so
1321 : // child_k->next() == child_(k+1)), and Operand(...) refers to the
1322 : // assigned operand, be it a register or a slot.
1323 : class LiveRangeConnector final : public ZoneObject {
1324 : public:
1325 : explicit LiveRangeConnector(RegisterAllocationData* data);
1326 :
1327 : // Phase 8: reconnect split ranges with moves, when the control flow
1328 : // between the ranges is trivial (no branches).
1329 : void ConnectRanges(Zone* local_zone);
1330 :
1331 : // Phase 9: insert moves to connect ranges across basic blocks, when the
1332 : // control flow between them cannot be trivially resolved, such as joining
1333 : // branches.
1334 : void ResolveControlFlow(Zone* local_zone);
1335 :
1336 : private:
1337 344046155 : RegisterAllocationData* data() const { return data_; }
1338 151783547 : InstructionSequence* code() const { return data()->code(); }
1339 18477411 : Zone* code_zone() const { return code()->zone(); }
1340 :
1341 : bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1342 :
1343 : int ResolveControlFlow(const InstructionBlock* block,
1344 : const InstructionOperand& cur_op,
1345 : const InstructionBlock* pred,
1346 : const InstructionOperand& pred_op);
1347 :
1348 : void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
1349 : LiveRangeBoundArray* array,
1350 : Zone* temp_zone);
1351 :
1352 : RegisterAllocationData* const data_;
1353 :
1354 : DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
1355 : };
1356 :
1357 : } // namespace compiler
1358 : } // namespace internal
1359 : } // namespace v8
1360 :
1361 : #endif // V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
|