Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_REGISTER_ALLOCATOR_H_
6 : #define V8_REGISTER_ALLOCATOR_H_
7 :
8 : #include "src/base/bits.h"
9 : #include "src/base/compiler-specific.h"
10 : #include "src/compiler/instruction.h"
11 : #include "src/globals.h"
12 : #include "src/ostreams.h"
13 : #include "src/register-configuration.h"
14 : #include "src/zone/zone-containers.h"
15 :
16 : namespace v8 {
17 : namespace internal {
18 : namespace compiler {
19 :
20 : enum RegisterKind { GENERAL_REGISTERS, FP_REGISTERS };
21 :
22 : // This class represents a single point of a InstructionOperand's lifetime. For
23 : // each instruction there are four lifetime positions:
24 : //
25 : // [[START, END], [START, END]]
26 : //
27 : // Where the first half position corresponds to
28 : //
29 : // [GapPosition::START, GapPosition::END]
30 : //
31 : // and the second half position corresponds to
32 : //
33 : // [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
34 : //
35 : class LifetimePosition final {
36 : public:
37 : // Return the lifetime position that corresponds to the beginning of
38 : // the gap with the given index.
39 : static LifetimePosition GapFromInstructionIndex(int index) {
40 156037515 : return LifetimePosition(index * kStep);
41 : }
42 : // Return the lifetime position that corresponds to the beginning of
43 : // the instruction with the given index.
44 : static LifetimePosition InstructionFromInstructionIndex(int index) {
45 173326124 : return LifetimePosition(index * kStep + kHalfStep);
46 : }
47 :
48 : static bool ExistsGapPositionBetween(LifetimePosition pos1,
49 : LifetimePosition pos2) {
50 2009115 : if (pos1 > pos2) std::swap(pos1, pos2);
51 2009115 : LifetimePosition next(pos1.value_ + 1);
52 2009115 : if (next.IsGapPosition()) return next < pos2;
53 : return next.NextFullStart() < pos2;
54 : }
55 :
56 : // Returns a numeric representation of this lifetime position.
57 0 : int value() const { return value_; }
58 :
59 : // Returns the index of the instruction to which this lifetime position
60 : // corresponds.
61 : int ToInstructionIndex() const {
62 : DCHECK(IsValid());
63 166875058 : return value_ / kStep;
64 : }
65 :
66 : // Returns true if this lifetime position corresponds to a START value
67 29336894 : bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
68 : // Returns true if this lifetime position corresponds to an END value
69 : bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
70 : // Returns true if this lifetime position corresponds to a gap START value
71 15833859 : bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
72 :
73 123937373 : bool IsGapPosition() const { return (value_ & 0x2) == 0; }
74 : bool IsInstructionPosition() const { return !IsGapPosition(); }
75 :
76 : // Returns the lifetime position for the current START.
77 : LifetimePosition Start() const {
78 : DCHECK(IsValid());
79 257035429 : return LifetimePosition(value_ & ~(kHalfStep - 1));
80 : }
81 :
82 : // Returns the lifetime position for the current gap START.
83 : LifetimePosition FullStart() const {
84 : DCHECK(IsValid());
85 23876371 : return LifetimePosition(value_ & ~(kStep - 1));
86 : }
87 :
88 : // Returns the lifetime position for the current END.
89 : LifetimePosition End() const {
90 : DCHECK(IsValid());
91 172158948 : return LifetimePosition(Start().value_ + kHalfStep / 2);
92 : }
93 :
94 : // Returns the lifetime position for the beginning of the next START.
95 : LifetimePosition NextStart() const {
96 : DCHECK(IsValid());
97 30658343 : return LifetimePosition(Start().value_ + kHalfStep);
98 : }
99 :
100 : // Returns the lifetime position for the beginning of the next gap START.
101 : LifetimePosition NextFullStart() const {
102 : DCHECK(IsValid());
103 24015021 : return LifetimePosition(FullStart().value_ + kStep);
104 : }
105 :
106 : // Returns the lifetime position for the beginning of the previous START.
107 : LifetimePosition PrevStart() const {
108 : DCHECK(IsValid());
109 : DCHECK_LE(kHalfStep, value_);
110 46947314 : return LifetimePosition(Start().value_ - kHalfStep);
111 : }
112 :
113 : // Constructs the lifetime position which does not correspond to any
114 : // instruction.
115 1128892760 : LifetimePosition() : value_(-1) {}
116 :
117 : // Returns true if this lifetime positions corrensponds to some
118 : // instruction.
119 658971377 : bool IsValid() const { return value_ != -1; }
120 :
121 : bool operator<(const LifetimePosition& that) const {
122 2247003099 : return this->value_ < that.value_;
123 : }
124 :
125 : bool operator<=(const LifetimePosition& that) const {
126 1335898693 : return this->value_ <= that.value_;
127 : }
128 :
129 : bool operator==(const LifetimePosition& that) const {
130 16528434 : return this->value_ == that.value_;
131 : }
132 :
133 : bool operator!=(const LifetimePosition& that) const {
134 : return this->value_ != that.value_;
135 : }
136 :
137 : bool operator>(const LifetimePosition& that) const {
138 220744311 : return this->value_ > that.value_;
139 : }
140 :
141 : bool operator>=(const LifetimePosition& that) const {
142 30614954 : return this->value_ >= that.value_;
143 : }
144 :
145 : void Print() const;
146 :
147 : static inline LifetimePosition Invalid() { return LifetimePosition(); }
148 :
149 : static inline LifetimePosition MaxPosition() {
150 : // We have to use this kind of getter instead of static member due to
151 : // crash bug in GDB.
152 : return LifetimePosition(kMaxInt);
153 : }
154 :
155 : static inline LifetimePosition FromInt(int value) {
156 : return LifetimePosition(value);
157 : }
158 :
159 : private:
160 : static const int kHalfStep = 2;
161 : static const int kStep = 2 * kHalfStep;
162 :
163 : static_assert(base::bits::IsPowerOfTwo(kHalfStep),
164 : "Code relies on kStep and kHalfStep being a power of two");
165 :
166 : explicit LifetimePosition(int value) : value_(value) {}
167 :
168 : int value_;
169 : };
170 :
171 :
172 : std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
173 :
174 :
175 : // Representation of the non-empty interval [start,end[.
176 : class UseInterval final : public ZoneObject {
177 : public:
178 : UseInterval(LifetimePosition start, LifetimePosition end)
179 199987447 : : start_(start), end_(end), next_(nullptr) {
180 : DCHECK(start < end);
181 : }
182 :
183 : LifetimePosition start() const { return start_; }
184 173101336 : void set_start(LifetimePosition start) { start_ = start; }
185 : LifetimePosition end() const { return end_; }
186 51683170 : void set_end(LifetimePosition end) { end_ = end; }
187 : UseInterval* next() const { return next_; }
188 148545860 : void set_next(UseInterval* next) { next_ = next; }
189 :
190 : // Split this interval at the given position without effecting the
191 : // live range that owns it. The interval must contain the position.
192 : UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
193 :
194 : // If this interval intersects with other return smallest position
195 : // that belongs to both of them.
196 517716875 : LifetimePosition Intersect(const UseInterval* other) const {
197 517716875 : if (other->start() < start_) return other->Intersect(this);
198 397006474 : if (other->start() < end_) return other->start();
199 : return LifetimePosition::Invalid();
200 : }
201 :
202 : bool Contains(LifetimePosition point) const {
203 1406697145 : return start_ <= point && point < end_;
204 : }
205 :
206 : // Returns the index of the first gap covered by this interval.
207 : int FirstGapIndex() const {
208 : int ret = start_.ToInstructionIndex();
209 52369229 : if (start_.IsInstructionPosition()) {
210 31216590 : ++ret;
211 : }
212 : return ret;
213 : }
214 :
215 : // Returns the index of the last gap covered by this interval.
216 : int LastGapIndex() const {
217 : int ret = end_.ToInstructionIndex();
218 43037663 : if (end_.IsGapPosition() && end_.IsStart()) {
219 5789798 : --ret;
220 : }
221 : return ret;
222 : }
223 :
224 : private:
225 : LifetimePosition start_;
226 : LifetimePosition end_;
227 : UseInterval* next_;
228 :
229 : DISALLOW_COPY_AND_ASSIGN(UseInterval);
230 : };
231 :
232 :
233 : enum class UsePositionType : uint8_t { kAny, kRequiresRegister, kRequiresSlot };
234 :
235 :
236 : enum class UsePositionHintType : uint8_t {
237 : kNone,
238 : kOperand,
239 : kUsePos,
240 : kPhi,
241 : kUnresolved
242 : };
243 :
244 :
245 : static const int32_t kUnassignedRegister =
246 : RegisterConfiguration::kMaxGeneralRegisters;
247 :
248 : static_assert(kUnassignedRegister <= RegisterConfiguration::kMaxFPRegisters,
249 : "kUnassignedRegister too small");
250 :
251 : // Representation of a use position.
252 : class V8_EXPORT_PRIVATE UsePosition final
253 : : public NON_EXPORTED_BASE(ZoneObject) {
254 : public:
255 : UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
256 : UsePositionHintType hint_type);
257 :
258 : InstructionOperand* operand() const { return operand_; }
259 : bool HasOperand() const { return operand_ != nullptr; }
260 :
261 : bool RegisterIsBeneficial() const {
262 : return RegisterBeneficialField::decode(flags_);
263 : }
264 : UsePositionType type() const { return TypeField::decode(flags_); }
265 : void set_type(UsePositionType type, bool register_beneficial);
266 :
267 : LifetimePosition pos() const { return pos_; }
268 :
269 : UsePosition* next() const { return next_; }
270 109109063 : void set_next(UsePosition* next) { next_ = next; }
271 :
272 : // For hinting only.
273 : void set_assigned_register(int register_code) {
274 51639794 : flags_ = AssignedRegisterField::update(flags_, register_code);
275 : }
276 :
277 : UsePositionHintType hint_type() const {
278 : return HintTypeField::decode(flags_);
279 : }
280 : bool HasHint() const;
281 : bool HintRegister(int* register_code) const;
282 : void SetHint(UsePosition* use_pos);
283 : void ResolveHint(UsePosition* use_pos);
284 0 : bool IsResolved() const {
285 : return hint_type() != UsePositionHintType::kUnresolved;
286 : }
287 : static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
288 :
289 : private:
290 : typedef BitField<UsePositionType, 0, 2> TypeField;
291 : typedef BitField<UsePositionHintType, 2, 3> HintTypeField;
292 : typedef BitField<bool, 5, 1> RegisterBeneficialField;
293 : typedef BitField<int32_t, 6, 6> AssignedRegisterField;
294 :
295 : InstructionOperand* const operand_;
296 : void* hint_;
297 : UsePosition* next_;
298 : LifetimePosition const pos_;
299 : uint32_t flags_;
300 :
301 : DISALLOW_COPY_AND_ASSIGN(UsePosition);
302 : };
303 :
304 :
305 : class SpillRange;
306 : class RegisterAllocationData;
307 : class TopLevelLiveRange;
308 : class LiveRangeGroup;
309 :
310 : // Representation of SSA values' live ranges as a collection of (continuous)
311 : // intervals over the instruction ordering.
312 : class V8_EXPORT_PRIVATE LiveRange : public NON_EXPORTED_BASE(ZoneObject) {
313 : public:
314 : UseInterval* first_interval() const { return first_interval_; }
315 : UsePosition* first_pos() const { return first_pos_; }
316 : TopLevelLiveRange* TopLevel() { return top_level_; }
317 : const TopLevelLiveRange* TopLevel() const { return top_level_; }
318 :
319 : bool IsTopLevel() const;
320 :
321 : LiveRange* next() const { return next_; }
322 :
323 : int relative_id() const { return relative_id_; }
324 :
325 884610881 : bool IsEmpty() const { return first_interval() == nullptr; }
326 :
327 : InstructionOperand GetAssignedOperand() const;
328 :
329 : MachineRepresentation representation() const {
330 : return RepresentationField::decode(bits_);
331 : }
332 :
333 : int assigned_register() const { return AssignedRegisterField::decode(bits_); }
334 129432046 : bool HasRegisterAssigned() const {
335 : return assigned_register() != kUnassignedRegister;
336 : }
337 : void set_assigned_register(int reg);
338 : void UnsetAssignedRegister();
339 :
340 : bool spilled() const { return SpilledField::decode(bits_); }
341 : void Spill();
342 :
343 : RegisterKind kind() const;
344 :
345 : // Returns use position in this live range that follows both start
346 : // and last processed use position.
347 : UsePosition* NextUsePosition(LifetimePosition start) const;
348 :
349 : // Returns use position for which register is required in this live
350 : // range and which follows both start and last processed use position
351 : UsePosition* NextRegisterPosition(LifetimePosition start) const;
352 :
353 : // Returns the first use position requiring stack slot, or nullptr.
354 : UsePosition* NextSlotPosition(LifetimePosition start) const;
355 :
356 : // Returns use position for which register is beneficial in this live
357 : // range and which follows both start and last processed use position
358 : UsePosition* NextUsePositionRegisterIsBeneficial(
359 : LifetimePosition start) const;
360 :
361 : // Returns lifetime position for which register is beneficial in this live
362 : // range and which follows both start and last processed use position.
363 : LifetimePosition NextLifetimePositionRegisterIsBeneficial(
364 : const LifetimePosition& start) const;
365 :
366 : // Returns use position for which register is beneficial in this live
367 : // range and which precedes start.
368 : UsePosition* PreviousUsePositionRegisterIsBeneficial(
369 : LifetimePosition start) const;
370 :
371 : // Can this live range be spilled at this position.
372 : bool CanBeSpilled(LifetimePosition pos) const;
373 :
374 : // Splitting primitive used by both splitting and splintering members.
375 : // Performs the split, but does not link the resulting ranges.
376 : // The given position must follow the start of the range.
377 : // All uses following the given position will be moved from this
378 : // live range to the result live range.
379 : // The current range will terminate at position, while result will start from
380 : // position.
381 : enum HintConnectionOption : bool {
382 : DoNotConnectHints = false,
383 : ConnectHints = true
384 : };
385 : UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
386 : Zone* zone, HintConnectionOption connect_hints);
387 :
388 : // Detaches at position, and then links the resulting ranges. Returns the
389 : // child, which starts at position.
390 : LiveRange* SplitAt(LifetimePosition position, Zone* zone);
391 :
392 : // Returns nullptr when no register is hinted, otherwise sets register_index.
393 : UsePosition* FirstHintPosition(int* register_index) const;
394 : UsePosition* FirstHintPosition() const {
395 : int register_index;
396 349305 : return FirstHintPosition(®ister_index);
397 : }
398 :
399 : UsePosition* current_hint_position() const {
400 : DCHECK(current_hint_position_ == FirstHintPosition());
401 : return current_hint_position_;
402 : }
403 :
404 899765330 : LifetimePosition Start() const {
405 : DCHECK(!IsEmpty());
406 : return first_interval()->start();
407 : }
408 :
409 : LifetimePosition End() const {
410 : DCHECK(!IsEmpty());
411 : return last_interval_->end();
412 : }
413 :
414 : bool ShouldBeAllocatedBefore(const LiveRange* other) const;
415 : bool CanCover(LifetimePosition position) const;
416 : bool Covers(LifetimePosition position) const;
417 : LifetimePosition FirstIntersection(LiveRange* other) const;
418 :
419 : void VerifyChildStructure() const {
420 : VerifyIntervals();
421 0 : VerifyPositions();
422 : }
423 :
424 : void ConvertUsesToOperand(const InstructionOperand& op,
425 : const InstructionOperand& spill_op);
426 : void SetUseHints(int register_index);
427 : void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
428 :
429 : void Print(const RegisterConfiguration* config, bool with_children) const;
430 : void Print(bool with_children) const;
431 :
432 : private:
433 : friend class TopLevelLiveRange;
434 : explicit LiveRange(int relative_id, MachineRepresentation rep,
435 : TopLevelLiveRange* top_level);
436 :
437 : void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
438 :
439 68706000 : void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
440 :
441 : UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
442 : void AdvanceLastProcessedMarker(UseInterval* to_start_of,
443 : LifetimePosition but_not_past) const;
444 :
445 : void VerifyPositions() const;
446 : void VerifyIntervals() const;
447 :
448 : typedef BitField<bool, 0, 1> SpilledField;
449 : typedef BitField<int32_t, 6, 6> AssignedRegisterField;
450 : typedef BitField<MachineRepresentation, 12, 8> RepresentationField;
451 :
452 : // Unique among children and splinters of the same virtual register.
453 : int relative_id_;
454 : uint32_t bits_;
455 : UseInterval* last_interval_;
456 : UseInterval* first_interval_;
457 : UsePosition* first_pos_;
458 : TopLevelLiveRange* top_level_;
459 : LiveRange* next_;
460 : // This is used as a cache, it doesn't affect correctness.
461 : mutable UseInterval* current_interval_;
462 : // This is used as a cache, it doesn't affect correctness.
463 : mutable UsePosition* last_processed_use_;
464 : // This is used as a cache, it's invalid outside of BuildLiveRanges.
465 : mutable UsePosition* current_hint_position_;
466 : // Cache the last position splintering stopped at.
467 : mutable UsePosition* splitting_pointer_;
468 :
469 : DISALLOW_COPY_AND_ASSIGN(LiveRange);
470 : };
471 :
472 :
473 : class LiveRangeGroup final : public ZoneObject {
474 : public:
475 : explicit LiveRangeGroup(Zone* zone) : ranges_(zone) {}
476 : ZoneVector<LiveRange*>& ranges() { return ranges_; }
477 : const ZoneVector<LiveRange*>& ranges() const { return ranges_; }
478 :
479 : int assigned_register() const { return assigned_register_; }
480 : void set_assigned_register(int reg) { assigned_register_ = reg; }
481 :
482 : private:
483 : ZoneVector<LiveRange*> ranges_;
484 : int assigned_register_;
485 : DISALLOW_COPY_AND_ASSIGN(LiveRangeGroup);
486 : };
487 :
488 : class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange {
489 : public:
490 : explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
491 : int spill_start_index() const { return spill_start_index_; }
492 :
493 4190488 : bool IsFixed() const { return vreg_ < 0; }
494 :
495 : bool is_phi() const { return IsPhiField::decode(bits_); }
496 2853484 : void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
497 :
498 : bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
499 : void set_is_non_loop_phi(bool value) {
500 1426742 : bits_ = IsNonLoopPhiField::update(bits_, value);
501 : }
502 :
503 : bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
504 : void set_has_slot_use(bool value) {
505 39377209 : bits_ = HasSlotUseField::update(bits_, value);
506 : }
507 :
508 : // Add a new interval or a new use position to this live range.
509 : void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
510 : void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
511 : void AddUsePosition(UsePosition* pos);
512 :
513 : // Shorten the most recently added interval by setting a new start.
514 : void ShortenTo(LifetimePosition start);
515 :
516 : // Detaches between start and end, and attributes the resulting range to
517 : // result.
518 : // The current range is pointed to as "splintered_from". No parent/child
519 : // relationship is established between this and result.
520 : void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone);
521 :
522 : // Assuming other was splintered from this range, embeds other and its
523 : // children as part of the children sequence of this range.
524 : void Merge(TopLevelLiveRange* other, Zone* zone);
525 :
526 : // Spill range management.
527 : void SetSpillRange(SpillRange* spill_range);
528 : enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
529 : void set_spill_type(SpillType value) {
530 36195625 : bits_ = SpillTypeField::update(bits_, value);
531 : }
532 : SpillType spill_type() const { return SpillTypeField::decode(bits_); }
533 : InstructionOperand* GetSpillOperand() const {
534 : DCHECK_EQ(SpillType::kSpillOperand, spill_type());
535 : return spill_operand_;
536 : }
537 :
538 : SpillRange* GetAllocatedSpillRange() const {
539 : DCHECK_NE(SpillType::kSpillOperand, spill_type());
540 : return spill_range_;
541 : }
542 :
543 : SpillRange* GetSpillRange() const {
544 : DCHECK_EQ(SpillType::kSpillRange, spill_type());
545 : return spill_range_;
546 : }
547 51908020 : bool HasNoSpillType() const {
548 : return spill_type() == SpillType::kNoSpillType;
549 : }
550 127841586 : bool HasSpillOperand() const {
551 : return spill_type() == SpillType::kSpillOperand;
552 : }
553 46265494 : bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
554 :
555 : AllocatedOperand GetSpillRangeOperand() const;
556 :
557 : void RecordSpillLocation(Zone* zone, int gap_index,
558 : InstructionOperand* operand);
559 : void SetSpillOperand(InstructionOperand* operand);
560 : void SetSpillStartIndex(int start) {
561 52792086 : spill_start_index_ = Min(start, spill_start_index_);
562 : }
563 :
564 : void CommitSpillMoves(InstructionSequence* sequence,
565 : const InstructionOperand& operand,
566 : bool might_be_duplicated);
567 :
568 : // If all the children of this range are spilled in deferred blocks, and if
569 : // for any non-spilled child with a use position requiring a slot, that range
570 : // is contained in a deferred block, mark the range as
571 : // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
572 : // and instead let the LiveRangeConnector perform the spills within the
573 : // deferred blocks. If so, we insert here spills for non-spilled ranges
574 : // with slot use positions.
575 824409 : void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
576 824409 : spill_start_index_ = -1;
577 824409 : spilled_in_deferred_blocks_ = true;
578 824409 : spill_move_insertion_locations_ = nullptr;
579 : list_of_blocks_requiring_spill_operands_ =
580 824408 : new (zone) BitVector(total_block_count, zone);
581 824407 : }
582 :
583 : void CommitSpillInDeferredBlocks(RegisterAllocationData* data,
584 : const InstructionOperand& spill_operand,
585 : BitVector* necessary_spill_points);
586 :
587 : TopLevelLiveRange* splintered_from() const { return splintered_from_; }
588 : bool IsSplinter() const { return splintered_from_ != nullptr; }
589 : bool MayRequireSpillRange() const {
590 : DCHECK(!IsSplinter());
591 10164389 : return !HasSpillOperand() && spill_range_ == nullptr;
592 : }
593 : void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
594 : int vreg() const { return vreg_; }
595 :
596 : #if DEBUG
597 : int debug_virt_reg() const;
598 : #endif
599 :
600 : void Verify() const;
601 : void VerifyChildrenInOrder() const;
602 :
603 41928215 : int GetNextChildId() {
604 : return IsSplinter() ? splintered_from()->GetNextChildId()
605 41928215 : : ++last_child_id_;
606 : }
607 :
608 4469343 : int GetChildCount() const { return last_child_id_ + 1; }
609 :
610 : bool IsSpilledOnlyInDeferredBlocks() const {
611 : return spilled_in_deferred_blocks_;
612 : }
613 :
614 : struct SpillMoveInsertionList;
615 :
616 : SpillMoveInsertionList* GetSpillMoveInsertionLocations() const {
617 : DCHECK(!IsSpilledOnlyInDeferredBlocks());
618 : return spill_move_insertion_locations_;
619 : }
620 : TopLevelLiveRange* splinter() const { return splinter_; }
621 7530954 : void SetSplinter(TopLevelLiveRange* splinter) {
622 : DCHECK_NULL(splinter_);
623 : DCHECK_NOT_NULL(splinter);
624 :
625 3765477 : splinter_ = splinter;
626 3765477 : splinter->relative_id_ = GetNextChildId();
627 : splinter->set_spill_type(spill_type());
628 3765477 : splinter->SetSplinteredFrom(this);
629 3765510 : }
630 :
631 493940 : void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
632 : bool has_preassigned_slot() const { return has_preassigned_slot_; }
633 :
634 1020836 : void AddBlockRequiringSpillOperand(RpoNumber block_id) {
635 : DCHECK(IsSpilledOnlyInDeferredBlocks());
636 : GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt());
637 510418 : }
638 :
639 : BitVector* GetListOfBlocksRequiringSpillOperands() const {
640 : DCHECK(IsSpilledOnlyInDeferredBlocks());
641 : return list_of_blocks_requiring_spill_operands_;
642 : }
643 :
644 : private:
645 : void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
646 :
647 : typedef BitField<bool, 1, 1> HasSlotUseField;
648 : typedef BitField<bool, 2, 1> IsPhiField;
649 : typedef BitField<bool, 3, 1> IsNonLoopPhiField;
650 : typedef BitField<SpillType, 4, 2> SpillTypeField;
651 :
652 : int vreg_;
653 : int last_child_id_;
654 : TopLevelLiveRange* splintered_from_;
655 : union {
656 : // Correct value determined by spill_type()
657 : InstructionOperand* spill_operand_;
658 : SpillRange* spill_range_;
659 : };
660 :
661 : union {
662 : SpillMoveInsertionList* spill_move_insertion_locations_;
663 : BitVector* list_of_blocks_requiring_spill_operands_;
664 : };
665 :
666 : // TODO(mtrofin): generalize spilling after definition, currently specialized
667 : // just for spill in a single deferred block.
668 : bool spilled_in_deferred_blocks_;
669 : int spill_start_index_;
670 : UsePosition* last_pos_;
671 : TopLevelLiveRange* splinter_;
672 : bool has_preassigned_slot_;
673 :
674 : DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
675 : };
676 :
677 :
678 : struct PrintableLiveRange {
679 : const RegisterConfiguration* register_configuration_;
680 : const LiveRange* range_;
681 : };
682 :
683 :
684 : std::ostream& operator<<(std::ostream& os,
685 : const PrintableLiveRange& printable_range);
686 :
687 :
688 : class SpillRange final : public ZoneObject {
689 : public:
690 : static const int kUnassignedSlot = -1;
691 : SpillRange(TopLevelLiveRange* range, Zone* zone);
692 :
693 : UseInterval* interval() const { return use_interval_; }
694 :
695 : bool IsEmpty() const { return live_ranges_.empty(); }
696 : bool TryMerge(SpillRange* other);
697 : bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
698 :
699 : void set_assigned_slot(int index) {
700 : DCHECK_EQ(kUnassignedSlot, assigned_slot_);
701 1658870 : assigned_slot_ = index;
702 : }
703 : int assigned_slot() {
704 : DCHECK_NE(kUnassignedSlot, assigned_slot_);
705 : return assigned_slot_;
706 : }
707 : const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
708 : return live_ranges_;
709 : }
710 : ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
711 : // Spill slots can be 4, 8, or 16 bytes wide.
712 : int byte_width() const { return byte_width_; }
713 : void Print() const;
714 :
715 : private:
716 : LifetimePosition End() const { return end_position_; }
717 : bool IsIntersectingWith(SpillRange* other) const;
718 : // Merge intervals, making sure the use intervals are sorted
719 : void MergeDisjointIntervals(UseInterval* other);
720 :
721 : ZoneVector<TopLevelLiveRange*> live_ranges_;
722 : UseInterval* use_interval_;
723 : LifetimePosition end_position_;
724 : int assigned_slot_;
725 : int byte_width_;
726 :
727 : DISALLOW_COPY_AND_ASSIGN(SpillRange);
728 : };
729 :
730 :
731 : class RegisterAllocationData final : public ZoneObject {
732 : public:
733 : class PhiMapValue : public ZoneObject {
734 : public:
735 : PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
736 :
737 : const PhiInstruction* phi() const { return phi_; }
738 : const InstructionBlock* block() const { return block_; }
739 :
740 : // For hinting.
741 : int assigned_register() const { return assigned_register_; }
742 : void set_assigned_register(int register_code) {
743 : DCHECK_EQ(assigned_register_, kUnassignedRegister);
744 1292845 : assigned_register_ = register_code;
745 : }
746 : void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
747 :
748 : void AddOperand(InstructionOperand* operand);
749 : void CommitAssignment(const InstructionOperand& operand);
750 :
751 : private:
752 : PhiInstruction* const phi_;
753 : const InstructionBlock* const block_;
754 : ZoneVector<InstructionOperand*> incoming_operands_;
755 : int assigned_register_;
756 : };
757 : typedef ZoneMap<int, PhiMapValue*> PhiMap;
758 :
759 : struct DelayedReference {
760 : ReferenceMap* map;
761 : InstructionOperand* operand;
762 : };
763 : typedef ZoneVector<DelayedReference> DelayedReferences;
764 : typedef ZoneVector<std::pair<TopLevelLiveRange*, int>>
765 : RangesWithPreassignedSlots;
766 :
767 : RegisterAllocationData(const RegisterConfiguration* config,
768 : Zone* allocation_zone, Frame* frame,
769 : InstructionSequence* code,
770 : const char* debug_name = nullptr);
771 :
772 : const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
773 : return live_ranges_;
774 : }
775 : ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
776 : const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
777 : return fixed_live_ranges_;
778 : }
779 : ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
780 : return fixed_live_ranges_;
781 : }
782 : ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() {
783 : return fixed_float_live_ranges_;
784 : }
785 : const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() const {
786 : return fixed_float_live_ranges_;
787 : }
788 : ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
789 : return fixed_double_live_ranges_;
790 : }
791 : const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
792 : return fixed_double_live_ranges_;
793 : }
794 : ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() {
795 : return fixed_simd128_live_ranges_;
796 : }
797 : const ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() const {
798 : return fixed_simd128_live_ranges_;
799 : }
800 : ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
801 : ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
802 : ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
803 : DelayedReferences& delayed_references() { return delayed_references_; }
804 : InstructionSequence* code() const { return code_; }
805 : // This zone is for data structures only needed during register allocation
806 : // phases.
807 : Zone* allocation_zone() const { return allocation_zone_; }
808 : // This zone is for InstructionOperands and moves that live beyond register
809 : // allocation.
810 34297710 : Zone* code_zone() const { return code()->zone(); }
811 : Frame* frame() const { return frame_; }
812 : const char* debug_name() const { return debug_name_; }
813 : const RegisterConfiguration* config() const { return config_; }
814 :
815 : MachineRepresentation RepresentationFor(int virtual_register);
816 :
817 : TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
818 : // Creates a new live range.
819 : TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
820 : TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
821 :
822 : SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
823 : SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
824 :
825 : MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
826 : const InstructionOperand& from,
827 : const InstructionOperand& to);
828 :
829 27039904 : bool IsReference(TopLevelLiveRange* top_range) const {
830 : return code()->IsReference(top_range->vreg());
831 : }
832 :
833 : bool ExistsUseWithoutDefinition();
834 : bool RangesDefinedInDeferredStayInDeferred();
835 :
836 : void MarkAllocated(MachineRepresentation rep, int index);
837 :
838 : PhiMapValue* InitializePhiMap(const InstructionBlock* block,
839 : PhiInstruction* phi);
840 : PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
841 : PhiMapValue* GetPhiMapValueFor(int virtual_register);
842 : bool IsBlockBoundary(LifetimePosition pos) const;
843 :
844 : RangesWithPreassignedSlots& preassigned_slot_ranges() {
845 : return preassigned_slot_ranges_;
846 : }
847 :
848 : private:
849 : int GetNextLiveRangeId();
850 :
851 : Zone* const allocation_zone_;
852 : Frame* const frame_;
853 : InstructionSequence* const code_;
854 : const char* const debug_name_;
855 : const RegisterConfiguration* const config_;
856 : PhiMap phi_map_;
857 : ZoneVector<BitVector*> live_in_sets_;
858 : ZoneVector<BitVector*> live_out_sets_;
859 : ZoneVector<TopLevelLiveRange*> live_ranges_;
860 : ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
861 : ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_;
862 : ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
863 : ZoneVector<TopLevelLiveRange*> fixed_simd128_live_ranges_;
864 : ZoneVector<SpillRange*> spill_ranges_;
865 : DelayedReferences delayed_references_;
866 : BitVector* assigned_registers_;
867 : BitVector* assigned_double_registers_;
868 : int virtual_register_count_;
869 : RangesWithPreassignedSlots preassigned_slot_ranges_;
870 :
871 : DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
872 : };
873 :
874 :
875 : class ConstraintBuilder final : public ZoneObject {
876 : public:
877 : explicit ConstraintBuilder(RegisterAllocationData* data);
878 :
879 : // Phase 1 : insert moves to account for fixed register operands.
880 : void MeetRegisterConstraints();
881 :
882 : // Phase 2: deconstruct SSA by inserting moves in successors and the headers
883 : // of blocks containing phis.
884 : void ResolvePhis();
885 :
886 : private:
887 232304869 : RegisterAllocationData* data() const { return data_; }
888 139628507 : InstructionSequence* code() const { return data()->code(); }
889 15243703 : Zone* allocation_zone() const { return data()->allocation_zone(); }
890 :
891 : InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
892 : bool is_tagged);
893 : void MeetRegisterConstraints(const InstructionBlock* block);
894 : void MeetConstraintsBefore(int index);
895 : void MeetConstraintsAfter(int index);
896 : void MeetRegisterConstraintsForLastInstructionInBlock(
897 : const InstructionBlock* block);
898 : void ResolvePhis(const InstructionBlock* block);
899 :
900 : RegisterAllocationData* const data_;
901 :
902 : DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder);
903 : };
904 :
905 :
906 : class LiveRangeBuilder final : public ZoneObject {
907 : public:
908 : explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone);
909 :
910 : // Phase 3: compute liveness of all virtual register.
911 : void BuildLiveRanges();
912 : static BitVector* ComputeLiveOut(const InstructionBlock* block,
913 : RegisterAllocationData* data);
914 :
915 : private:
916 : RegisterAllocationData* data() const { return data_; }
917 62126270 : InstructionSequence* code() const { return data()->code(); }
918 363133472 : Zone* allocation_zone() const { return data()->allocation_zone(); }
919 : Zone* code_zone() const { return code()->zone(); }
920 146233832 : const RegisterConfiguration* config() const { return data()->config(); }
921 14866077 : ZoneVector<BitVector*>& live_in_sets() const {
922 : return data()->live_in_sets();
923 : }
924 :
925 : // Verification.
926 : void Verify() const;
927 : bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
928 : bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
929 : const TopLevelLiveRange* range) const;
930 : bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
931 :
932 : // Liveness analysis support.
933 : void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
934 : void ProcessInstructions(const InstructionBlock* block, BitVector* live);
935 : void ProcessPhis(const InstructionBlock* block, BitVector* live);
936 : void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
937 :
938 10864516 : static int FixedLiveRangeID(int index) { return -index - 1; }
939 : int FixedFPLiveRangeID(int index, MachineRepresentation rep);
940 : TopLevelLiveRange* FixedLiveRangeFor(int index);
941 : TopLevelLiveRange* FixedFPLiveRangeFor(int index, MachineRepresentation rep);
942 :
943 : void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
944 : void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
945 :
946 : UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
947 : void* hint, UsePositionHintType hint_type);
948 : UsePosition* NewUsePosition(LifetimePosition pos) {
949 1928187 : return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
950 : }
951 : TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
952 : // Helper methods for building intervals.
953 : UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
954 : void* hint, UsePositionHintType hint_type);
955 : void Define(LifetimePosition position, InstructionOperand* operand) {
956 17475701 : Define(position, operand, nullptr, UsePositionHintType::kNone);
957 : }
958 : UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
959 : InstructionOperand* operand, void* hint,
960 : UsePositionHintType hint_type);
961 : void Use(LifetimePosition block_start, LifetimePosition position,
962 : InstructionOperand* operand) {
963 53336301 : Use(block_start, position, operand, nullptr, UsePositionHintType::kNone);
964 : }
965 :
966 : RegisterAllocationData* const data_;
967 : ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
968 :
969 : DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
970 : };
971 :
972 :
973 : class RegisterAllocator : public ZoneObject {
974 : public:
975 : RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
976 :
977 : protected:
978 : RegisterAllocationData* data() const { return data_; }
979 20200166 : InstructionSequence* code() const { return data()->code(); }
980 : RegisterKind mode() const { return mode_; }
981 : int num_registers() const { return num_registers_; }
982 : int num_allocatable_registers() const { return num_allocatable_registers_; }
983 : const int* allocatable_register_codes() const {
984 : return allocatable_register_codes_;
985 : }
986 : // Returns true iff. we must check float register aliasing.
987 : bool check_fp_aliasing() const { return check_fp_aliasing_; }
988 :
989 : // TODO(mtrofin): explain why splitting in gap START is always OK.
990 : LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
991 : int instruction_index);
992 :
993 16018937 : Zone* allocation_zone() const { return data()->allocation_zone(); }
994 :
995 : // Find the optimal split for ranges defined by a memory operand, e.g.
996 : // constants or function parameters passed on the stack.
997 : void SplitAndSpillRangesDefinedByMemoryOperand();
998 :
999 : // Split the given range at the given position.
1000 : // If range starts at or after the given position then the
1001 : // original range is returned.
1002 : // Otherwise returns the live range that starts at pos and contains
1003 : // all uses from the original range that follow pos. Uses at pos will
1004 : // still be owned by the original range after splitting.
1005 : LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
1006 :
1007 116691697 : bool CanProcessRange(LiveRange* range) const {
1008 459600658 : return range != nullptr && !range->IsEmpty() && range->kind() == mode();
1009 : }
1010 :
1011 :
1012 : // Split the given range in a position from the interval [start, end].
1013 : LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
1014 : LifetimePosition end);
1015 :
1016 : // Find a lifetime position in the interval [start, end] which
1017 : // is optimal for splitting: it is either header of the outermost
1018 : // loop covered by this interval or the latest possible position.
1019 : LifetimePosition FindOptimalSplitPos(LifetimePosition start,
1020 : LifetimePosition end);
1021 :
1022 : void Spill(LiveRange* range);
1023 :
1024 : // If we are trying to spill a range inside the loop try to
1025 : // hoist spill position out to the point just before the loop.
1026 : LifetimePosition FindOptimalSpillingPos(LiveRange* range,
1027 : LifetimePosition pos);
1028 :
1029 : const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
1030 : const char* RegisterName(int allocation_index) const;
1031 :
1032 : private:
1033 : RegisterAllocationData* const data_;
1034 : const RegisterKind mode_;
1035 : const int num_registers_;
1036 : int num_allocatable_registers_;
1037 : const int* allocatable_register_codes_;
1038 : bool check_fp_aliasing_;
1039 :
1040 : private:
1041 : bool no_combining_;
1042 :
1043 : DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
1044 : };
1045 :
1046 :
1047 : class LinearScanAllocator final : public RegisterAllocator {
1048 : public:
1049 : LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
1050 : Zone* local_zone);
1051 :
1052 : // Phase 4: compute register assignments.
1053 : void AllocateRegisters();
1054 :
1055 : private:
1056 : ZoneVector<LiveRange*>& unhandled_live_ranges() {
1057 : return unhandled_live_ranges_;
1058 : }
1059 : ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
1060 : ZoneVector<LiveRange*>& inactive_live_ranges() {
1061 : return inactive_live_ranges_;
1062 : }
1063 :
1064 : void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
1065 :
1066 : // Helper methods for updating the life range lists.
1067 : void AddToActive(LiveRange* range);
1068 : void AddToInactive(LiveRange* range);
1069 : void AddToUnhandledSorted(LiveRange* range);
1070 : void AddToUnhandledUnsorted(LiveRange* range);
1071 : void SortUnhandled();
1072 : bool UnhandledIsSorted();
1073 : void ActiveToHandled(LiveRange* range);
1074 : void ActiveToInactive(LiveRange* range);
1075 : void InactiveToHandled(LiveRange* range);
1076 : void InactiveToActive(LiveRange* range);
1077 :
1078 : // Helper methods for allocating registers.
1079 : bool TryReuseSpillForPhi(TopLevelLiveRange* range);
1080 : bool TryAllocateFreeReg(LiveRange* range,
1081 : const Vector<LifetimePosition>& free_until_pos);
1082 : bool TryAllocatePreferredReg(LiveRange* range,
1083 : const Vector<LifetimePosition>& free_until_pos);
1084 : void GetFPRegisterSet(MachineRepresentation rep, int* num_regs,
1085 : int* num_codes, const int** codes) const;
1086 : void FindFreeRegistersForRange(LiveRange* range,
1087 : Vector<LifetimePosition> free_until_pos);
1088 : void ProcessCurrentRange(LiveRange* current);
1089 : void AllocateBlockedReg(LiveRange* range);
1090 : bool TrySplitAndSpillSplinter(LiveRange* range);
1091 :
1092 : // Spill the given life range after position pos.
1093 : void SpillAfter(LiveRange* range, LifetimePosition pos);
1094 :
1095 : // Spill the given life range after position [start] and up to position [end].
1096 : void SpillBetween(LiveRange* range, LifetimePosition start,
1097 : LifetimePosition end);
1098 :
1099 : // Spill the given life range after position [start] and up to position [end].
1100 : // Range is guaranteed to be spilled at least until position [until].
1101 : void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
1102 : LifetimePosition until, LifetimePosition end);
1103 :
1104 : void SplitAndSpillIntersecting(LiveRange* range);
1105 :
1106 : ZoneVector<LiveRange*> unhandled_live_ranges_;
1107 : ZoneVector<LiveRange*> active_live_ranges_;
1108 : ZoneVector<LiveRange*> inactive_live_ranges_;
1109 :
1110 : #ifdef DEBUG
1111 : LifetimePosition allocation_finger_;
1112 : #endif
1113 :
1114 : DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
1115 : };
1116 :
1117 :
1118 : class SpillSlotLocator final : public ZoneObject {
1119 : public:
1120 : explicit SpillSlotLocator(RegisterAllocationData* data);
1121 :
1122 : void LocateSpillSlots();
1123 :
1124 : private:
1125 1301620 : RegisterAllocationData* data() const { return data_; }
1126 :
1127 : RegisterAllocationData* const data_;
1128 :
1129 : DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator);
1130 : };
1131 :
1132 :
1133 : class OperandAssigner final : public ZoneObject {
1134 : public:
1135 : explicit OperandAssigner(RegisterAllocationData* data);
1136 :
1137 : // Phase 5: assign spill splots.
1138 : void AssignSpillSlots();
1139 :
1140 : // Phase 6: commit assignment.
1141 : void CommitAssignment();
1142 :
1143 : private:
1144 18119774 : RegisterAllocationData* data() const { return data_; }
1145 :
1146 : RegisterAllocationData* const data_;
1147 :
1148 : DISALLOW_COPY_AND_ASSIGN(OperandAssigner);
1149 : };
1150 :
1151 :
1152 : class ReferenceMapPopulator final : public ZoneObject {
1153 : public:
1154 : explicit ReferenceMapPopulator(RegisterAllocationData* data);
1155 :
1156 : // Phase 7: compute values for pointer maps.
1157 : void PopulateReferenceMaps();
1158 :
1159 : private:
1160 29642940 : RegisterAllocationData* data() const { return data_; }
1161 :
1162 : bool SafePointsAreInOrder() const;
1163 :
1164 : RegisterAllocationData* const data_;
1165 :
1166 : DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
1167 : };
1168 :
1169 :
1170 : class LiveRangeBoundArray;
1171 : // Insert moves of the form
1172 : //
1173 : // Operand(child_(k+1)) = Operand(child_k)
1174 : //
1175 : // where child_k and child_(k+1) are consecutive children of a range (so
1176 : // child_k->next() == child_(k+1)), and Operand(...) refers to the
1177 : // assigned operand, be it a register or a slot.
1178 : class LiveRangeConnector final : public ZoneObject {
1179 : public:
1180 : explicit LiveRangeConnector(RegisterAllocationData* data);
1181 :
1182 : // Phase 8: reconnect split ranges with moves, when the control flow
1183 : // between the ranges is trivial (no branches).
1184 : void ConnectRanges(Zone* local_zone);
1185 :
1186 : // Phase 9: insert moves to connect ranges across basic blocks, when the
1187 : // control flow between them cannot be trivially resolved, such as joining
1188 : // branches.
1189 : void ResolveControlFlow(Zone* local_zone);
1190 :
1191 : private:
1192 115524239 : RegisterAllocationData* data() const { return data_; }
1193 94272665 : InstructionSequence* code() const { return data()->code(); }
1194 13261040 : Zone* code_zone() const { return code()->zone(); }
1195 :
1196 : bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1197 :
1198 : int ResolveControlFlow(const InstructionBlock* block,
1199 : const InstructionOperand& cur_op,
1200 : const InstructionBlock* pred,
1201 : const InstructionOperand& pred_op);
1202 :
1203 : void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
1204 : LiveRangeBoundArray* array,
1205 : Zone* temp_zone);
1206 :
1207 : RegisterAllocationData* const data_;
1208 :
1209 : DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
1210 : };
1211 :
1212 : } // namespace compiler
1213 : } // namespace internal
1214 : } // namespace v8
1215 :
1216 : #endif // V8_REGISTER_ALLOCATOR_H_
|