Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/register-allocator.h"
6 :
7 : #include "src/assembler-inl.h"
8 : #include "src/base/adapters.h"
9 : #include "src/compiler/linkage.h"
10 : #include "src/string-stream.h"
11 :
12 : namespace v8 {
13 : namespace internal {
14 : namespace compiler {
15 :
16 : #define TRACE(...) \
17 : do { \
18 : if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
19 : } while (false)
20 :
21 :
22 : namespace {
23 :
24 : static const int kFloatRepBit =
25 : 1 << static_cast<int>(MachineRepresentation::kFloat32);
26 : static const int kSimd128RepBit =
27 : 1 << static_cast<int>(MachineRepresentation::kSimd128);
28 :
29 65593694 : void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) {
30 65593805 : auto it = std::find(v->begin(), v->end(), range);
31 : DCHECK(it != v->end());
32 65593805 : v->erase(it);
33 65593817 : }
34 :
35 915930 : int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) {
36 : return kind == FP_REGISTERS ? cfg->num_double_registers()
37 1831819 : : cfg->num_general_registers();
38 : }
39 :
40 :
41 915921 : int GetAllocatableRegisterCount(const RegisterConfiguration* cfg,
42 : RegisterKind kind) {
43 : return kind == FP_REGISTERS ? cfg->num_allocatable_double_registers()
44 1831819 : : cfg->num_allocatable_general_registers();
45 : }
46 :
47 :
48 915921 : const int* GetAllocatableRegisterCodes(const RegisterConfiguration* cfg,
49 : RegisterKind kind) {
50 : return kind == FP_REGISTERS ? cfg->allocatable_double_codes()
51 1831819 : : cfg->allocatable_general_codes();
52 : }
53 :
54 :
55 382717 : const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence,
56 : const InstructionBlock* block) {
57 : RpoNumber index = block->loop_header();
58 2156542 : if (!index.IsValid()) return nullptr;
59 382717 : return sequence->InstructionBlockAt(index);
60 : }
61 :
62 :
63 : const InstructionBlock* GetInstructionBlock(const InstructionSequence* code,
64 : LifetimePosition pos) {
65 15098368 : return code->GetInstructionBlock(pos.ToInstructionIndex());
66 : }
67 :
68 :
69 2485414 : Instruction* GetLastInstruction(InstructionSequence* code,
70 2485414 : const InstructionBlock* block) {
71 2485419 : return code->InstructionAt(block->last_instruction_index());
72 : }
73 :
74 : // TODO(dcarney): fix frame to allow frame accesses to half size location.
75 2881069 : int GetByteWidth(MachineRepresentation rep) {
76 2881069 : switch (rep) {
77 : case MachineRepresentation::kBit:
78 : case MachineRepresentation::kWord8:
79 : case MachineRepresentation::kWord16:
80 : case MachineRepresentation::kWord32:
81 : case MachineRepresentation::kTaggedSigned:
82 : case MachineRepresentation::kTaggedPointer:
83 : case MachineRepresentation::kTagged:
84 : case MachineRepresentation::kFloat32:
85 : return kPointerSize;
86 : case MachineRepresentation::kWord64:
87 : case MachineRepresentation::kFloat64:
88 : return kDoubleSize;
89 : case MachineRepresentation::kSimd128:
90 0 : return kSimd128Size;
91 : case MachineRepresentation::kSimd1x4:
92 : case MachineRepresentation::kSimd1x8:
93 : case MachineRepresentation::kSimd1x16:
94 0 : return kSimdMaskRegisters ? kPointerSize : kSimd128Size;
95 : case MachineRepresentation::kNone:
96 : break;
97 : }
98 0 : UNREACHABLE();
99 : return 0;
100 : }
101 :
102 : } // namespace
103 :
104 : class LiveRangeBound {
105 : public:
106 27446328 : explicit LiveRangeBound(LiveRange* range, bool skip)
107 82338984 : : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
108 : DCHECK(!range->IsEmpty());
109 : }
110 :
111 : bool CanCover(LifetimePosition position) {
112 79406104 : return start_ <= position && position < end_;
113 : }
114 :
115 : LiveRange* const range_;
116 : const LifetimePosition start_;
117 : const LifetimePosition end_;
118 : const bool skip_;
119 :
120 : private:
121 : DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
122 : };
123 :
124 :
125 : struct FindResult {
126 : LiveRange* cur_cover_;
127 : LiveRange* pred_cover_;
128 : };
129 :
130 :
131 : class LiveRangeBoundArray {
132 : public:
133 47173510 : LiveRangeBoundArray() : length_(0), start_(nullptr) {}
134 :
135 : bool ShouldInitialize() { return start_ == nullptr; }
136 :
137 4165172 : void Initialize(Zone* zone, TopLevelLiveRange* range) {
138 4165172 : length_ = range->GetChildCount();
139 :
140 4165177 : start_ = zone->NewArray<LiveRangeBound>(length_);
141 : LiveRangeBound* curr = start_;
142 : // Normally, spilled ranges do not need connecting moves, because the spill
143 : // location has been assigned at definition. For ranges spilled in deferred
144 : // blocks, that is not the case, so we need to connect the spilled children.
145 31611505 : for (LiveRange *i = range; i != nullptr; i = i->next(), ++curr) {
146 27446328 : new (curr) LiveRangeBound(i, i->spilled());
147 : }
148 4165177 : }
149 :
150 : LiveRangeBound* Find(const LifetimePosition position) const {
151 : size_t left_index = 0;
152 80348149 : size_t right_index = length_;
153 : while (true) {
154 418018161 : size_t current_index = left_index + (right_index - left_index) / 2;
155 : DCHECK(right_index > current_index);
156 418018161 : LiveRangeBound* bound = &start_[current_index];
157 418018161 : if (bound->start_ <= position) {
158 287668082 : if (position < bound->end_) return bound;
159 : DCHECK(left_index < current_index);
160 : left_index = current_index;
161 : } else {
162 : right_index = current_index;
163 : }
164 : }
165 : }
166 :
167 : LiveRangeBound* FindPred(const InstructionBlock* pred) {
168 : LifetimePosition pred_end =
169 : LifetimePosition::InstructionFromInstructionIndex(
170 : pred->last_instruction_index());
171 : return Find(pred_end);
172 : }
173 :
174 : LiveRangeBound* FindSucc(const InstructionBlock* succ) {
175 : LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
176 : succ->first_instruction_index());
177 : return Find(succ_start);
178 : }
179 :
180 158812208 : bool FindConnectableSubranges(const InstructionBlock* block,
181 79406104 : const InstructionBlock* pred,
182 : FindResult* result) const {
183 : LifetimePosition pred_end =
184 : LifetimePosition::InstructionFromInstructionIndex(
185 : pred->last_instruction_index());
186 : LiveRangeBound* bound = Find(pred_end);
187 79406104 : result->pred_cover_ = bound->range_;
188 : LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
189 : block->first_instruction_index());
190 :
191 79406104 : if (bound->CanCover(cur_start)) {
192 : // Both blocks are covered by the same range, so there is nothing to
193 : // connect.
194 : return false;
195 : }
196 : bound = Find(cur_start);
197 30085810 : if (bound->skip_) {
198 : return false;
199 : }
200 4709744 : result->cur_cover_ = bound->range_;
201 : DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
202 4709744 : return (result->cur_cover_ != result->pred_cover_);
203 : }
204 :
205 : private:
206 : size_t length_;
207 : LiveRangeBound* start_;
208 :
209 : DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
210 : };
211 :
212 :
213 : class LiveRangeFinder {
214 : public:
215 915802 : explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
216 : : data_(data),
217 915802 : bounds_length_(static_cast<int>(data_->live_ranges().size())),
218 915802 : bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
219 2747531 : zone_(zone) {
220 48089449 : for (int i = 0; i < bounds_length_; ++i) {
221 47173522 : new (&bounds_[i]) LiveRangeBoundArray();
222 : }
223 915927 : }
224 :
225 52123782 : LiveRangeBoundArray* ArrayFor(int operand_index) {
226 : DCHECK(operand_index < bounds_length_);
227 104247564 : TopLevelLiveRange* range = data_->live_ranges()[operand_index];
228 : DCHECK(range != nullptr && !range->IsEmpty());
229 52123782 : LiveRangeBoundArray* array = &bounds_[operand_index];
230 52123782 : if (array->ShouldInitialize()) {
231 4165175 : array->Initialize(zone_, range);
232 : }
233 52123788 : return array;
234 : }
235 :
236 : private:
237 : const RegisterAllocationData* const data_;
238 : const int bounds_length_;
239 : LiveRangeBoundArray* const bounds_;
240 : Zone* const zone_;
241 :
242 : DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
243 : };
244 :
245 :
246 : typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
247 :
248 :
249 : struct DelayedInsertionMapCompare {
250 : bool operator()(const DelayedInsertionMapKey& a,
251 : const DelayedInsertionMapKey& b) const {
252 171 : if (a.first == b.first) {
253 : return a.second.Compare(b.second);
254 : }
255 171 : return a.first < b.first;
256 : }
257 : };
258 :
259 :
260 : typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
261 : DelayedInsertionMapCompare> DelayedInsertionMap;
262 :
263 :
264 67972172 : UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
265 : void* hint, UsePositionHintType hint_type)
266 67972172 : : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) {
267 : DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone);
268 : bool register_beneficial = true;
269 : UsePositionType type = UsePositionType::kAny;
270 134580323 : if (operand_ != nullptr && operand_->IsUnallocated()) {
271 : const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
272 66608223 : if (unalloc->HasRegisterPolicy()) {
273 : type = UsePositionType::kRequiresRegister;
274 45841337 : } else if (unalloc->HasSlotPolicy()) {
275 : type = UsePositionType::kRequiresSlot;
276 : register_beneficial = false;
277 : } else {
278 33701020 : register_beneficial = !unalloc->HasAnyPolicy();
279 : }
280 : }
281 135944344 : flags_ = TypeField::encode(type) | HintTypeField::encode(hint_type) |
282 67972172 : RegisterBeneficialField::encode(register_beneficial) |
283 67972172 : AssignedRegisterField::encode(kUnassignedRegister);
284 : DCHECK(pos_.IsValid());
285 67972172 : }
286 :
287 :
288 0 : bool UsePosition::HasHint() const {
289 : int hint_register;
290 68084596 : return HintRegister(&hint_register);
291 : }
292 :
293 :
294 146223726 : bool UsePosition::HintRegister(int* register_code) const {
295 146223726 : if (hint_ == nullptr) return false;
296 89990636 : switch (HintTypeField::decode(flags_)) {
297 : case UsePositionHintType::kNone:
298 : case UsePositionHintType::kUnresolved:
299 : return false;
300 : case UsePositionHintType::kUsePos: {
301 : UsePosition* use_pos = reinterpret_cast<UsePosition*>(hint_);
302 10852480 : int assigned_register = AssignedRegisterField::decode(use_pos->flags_);
303 10852480 : if (assigned_register == kUnassignedRegister) return false;
304 4733413 : *register_code = assigned_register;
305 4733413 : return true;
306 : }
307 : case UsePositionHintType::kOperand: {
308 : InstructionOperand* operand =
309 : reinterpret_cast<InstructionOperand*>(hint_);
310 28029318 : *register_code = LocationOperand::cast(operand)->register_code();
311 28029318 : return true;
312 : }
313 : case UsePositionHintType::kPhi: {
314 670424 : RegisterAllocationData::PhiMapValue* phi =
315 : reinterpret_cast<RegisterAllocationData::PhiMapValue*>(hint_);
316 : int assigned_register = phi->assigned_register();
317 670424 : if (assigned_register == kUnassignedRegister) return false;
318 112068 : *register_code = assigned_register;
319 112068 : return true;
320 : }
321 : }
322 0 : UNREACHABLE();
323 : return false;
324 : }
325 :
326 :
327 31110004 : UsePositionHintType UsePosition::HintTypeForOperand(
328 : const InstructionOperand& op) {
329 31110004 : switch (op.kind()) {
330 : case InstructionOperand::CONSTANT:
331 : case InstructionOperand::IMMEDIATE:
332 : case InstructionOperand::EXPLICIT:
333 : return UsePositionHintType::kNone;
334 : case InstructionOperand::UNALLOCATED:
335 13395240 : return UsePositionHintType::kUnresolved;
336 : case InstructionOperand::ALLOCATED:
337 18608074 : if (op.IsRegister() || op.IsFPRegister()) {
338 : return UsePositionHintType::kOperand;
339 : } else {
340 : DCHECK(op.IsStackSlot() || op.IsFPStackSlot());
341 788805 : return UsePositionHintType::kNone;
342 : }
343 : case InstructionOperand::INVALID:
344 : break;
345 : }
346 0 : UNREACHABLE();
347 : return UsePositionHintType::kNone;
348 : }
349 :
350 0 : void UsePosition::SetHint(UsePosition* use_pos) {
351 : DCHECK_NOT_NULL(use_pos);
352 8032361 : hint_ = use_pos;
353 16064722 : flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
354 0 : }
355 :
356 0 : void UsePosition::ResolveHint(UsePosition* use_pos) {
357 : DCHECK_NOT_NULL(use_pos);
358 9278126 : if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return;
359 4639063 : hint_ = use_pos;
360 4639063 : flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
361 : }
362 :
363 :
364 0 : void UsePosition::set_type(UsePositionType type, bool register_beneficial) {
365 : DCHECK_IMPLIES(type == UsePositionType::kRequiresSlot, !register_beneficial);
366 : DCHECK_EQ(kUnassignedRegister, AssignedRegisterField::decode(flags_));
367 13153047 : flags_ = TypeField::encode(type) |
368 13153047 : RegisterBeneficialField::encode(register_beneficial) |
369 13153047 : HintTypeField::encode(HintTypeField::decode(flags_)) |
370 13153047 : AssignedRegisterField::encode(kUnassignedRegister);
371 0 : }
372 :
373 :
374 31809912 : UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
375 : DCHECK(Contains(pos) && pos != start());
376 31809912 : UseInterval* after = new (zone) UseInterval(pos, end_);
377 31809950 : after->next_ = next_;
378 31809950 : next_ = nullptr;
379 31809950 : end_ = pos;
380 31809950 : return after;
381 : }
382 :
383 :
384 0 : void LifetimePosition::Print() const {
385 0 : OFStream os(stdout);
386 0 : os << *this << std::endl;
387 0 : }
388 :
389 :
390 0 : std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) {
391 0 : os << '@' << pos.ToInstructionIndex();
392 0 : if (pos.IsGapPosition()) {
393 : os << 'g';
394 : } else {
395 : os << 'i';
396 : }
397 0 : if (pos.IsStart()) {
398 : os << 's';
399 : } else {
400 : os << 'e';
401 : }
402 0 : return os;
403 : }
404 :
405 0 : LiveRange::LiveRange(int relative_id, MachineRepresentation rep,
406 : TopLevelLiveRange* top_level)
407 : : relative_id_(relative_id),
408 : bits_(0),
409 : last_interval_(nullptr),
410 : first_interval_(nullptr),
411 : first_pos_(nullptr),
412 : top_level_(top_level),
413 : next_(nullptr),
414 : current_interval_(nullptr),
415 : last_processed_use_(nullptr),
416 : current_hint_position_(nullptr),
417 92161264 : splitting_pointer_(nullptr) {
418 : DCHECK(AllocatedOperand::IsSupportedRepresentation(rep));
419 92161264 : bits_ = AssignedRegisterField::encode(kUnassignedRegister) |
420 92161264 : RepresentationField::encode(rep);
421 0 : }
422 :
423 :
424 0 : void LiveRange::VerifyPositions() const {
425 : // Walk the positions, verifying that each is in an interval.
426 0 : UseInterval* interval = first_interval_;
427 0 : for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
428 0 : CHECK(Start() <= pos->pos());
429 0 : CHECK(pos->pos() <= End());
430 0 : CHECK_NOT_NULL(interval);
431 0 : while (!interval->Contains(pos->pos()) && interval->end() != pos->pos()) {
432 : interval = interval->next();
433 0 : CHECK_NOT_NULL(interval);
434 : }
435 : }
436 0 : }
437 :
438 :
439 0 : void LiveRange::VerifyIntervals() const {
440 : DCHECK(first_interval()->start() == Start());
441 : LifetimePosition last_end = first_interval()->end();
442 0 : for (UseInterval* interval = first_interval()->next(); interval != nullptr;
443 : interval = interval->next()) {
444 : DCHECK(last_end <= interval->start());
445 : last_end = interval->end();
446 : }
447 : DCHECK(last_end == End());
448 0 : }
449 :
450 :
451 0 : void LiveRange::set_assigned_register(int reg) {
452 : DCHECK(!HasRegisterAssigned() && !spilled());
453 81190525 : bits_ = AssignedRegisterField::update(bits_, reg);
454 0 : }
455 :
456 :
457 0 : void LiveRange::UnsetAssignedRegister() {
458 : DCHECK(HasRegisterAssigned() && !spilled());
459 0 : bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
460 0 : }
461 :
462 :
463 0 : void LiveRange::Spill() {
464 : DCHECK(!spilled());
465 : DCHECK(!TopLevel()->HasNoSpillType());
466 : set_spilled(true);
467 16902331 : bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
468 0 : }
469 :
470 :
471 102324937 : RegisterKind LiveRange::kind() const {
472 102324937 : return IsFloatingPoint(representation()) ? FP_REGISTERS : GENERAL_REGISTERS;
473 : }
474 :
475 :
476 29509434 : UsePosition* LiveRange::FirstHintPosition(int* register_index) const {
477 91705892 : for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
478 78144639 : if (pos->HintRegister(register_index)) return pos;
479 : }
480 : return nullptr;
481 : }
482 :
483 :
484 47583905 : UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
485 36625948 : UsePosition* use_pos = last_processed_use_;
486 24020536 : if (use_pos == nullptr || use_pos->pos() > start) {
487 : use_pos = first_pos();
488 : }
489 36625948 : while (use_pos != nullptr && use_pos->pos() < start) {
490 : use_pos = use_pos->next();
491 : }
492 24020536 : last_processed_use_ = use_pos;
493 24020536 : return use_pos;
494 : }
495 :
496 :
497 13620028 : UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
498 : LifetimePosition start) const {
499 43012208 : UsePosition* pos = NextUsePosition(start);
500 56632264 : while (pos != nullptr && !pos->RegisterIsBeneficial()) {
501 : pos = pos->next();
502 : }
503 13620042 : return pos;
504 : }
505 :
506 2397318 : LifetimePosition LiveRange::NextLifetimePositionRegisterIsBeneficial(
507 485736 : const LifetimePosition& start) const {
508 2397318 : UsePosition* next_use = NextUsePositionRegisterIsBeneficial(start);
509 2397324 : if (next_use == nullptr) return End();
510 : return next_use->pos();
511 : }
512 :
513 0 : UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
514 37041 : LifetimePosition start) const {
515 279988 : UsePosition* pos = first_pos();
516 : UsePosition* prev = nullptr;
517 177035 : while (pos != nullptr && pos->pos() < start) {
518 139994 : if (pos->RegisterIsBeneficial()) prev = pos;
519 : pos = pos->next();
520 : }
521 0 : return prev;
522 : }
523 :
524 :
525 9236340 : UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const {
526 39132789 : UsePosition* pos = NextUsePosition(start);
527 48369155 : while (pos != nullptr && pos->type() != UsePositionType::kRequiresRegister) {
528 : pos = pos->next();
529 : }
530 9236353 : return pos;
531 : }
532 :
533 :
534 900109 : UsePosition* LiveRange::NextSlotPosition(LifetimePosition start) const {
535 4387438 : for (UsePosition* pos = NextUsePosition(start); pos != nullptr;
536 : pos = pos->next()) {
537 3487562 : if (pos->type() != UsePositionType::kRequiresSlot) continue;
538 : return pos;
539 : }
540 : return nullptr;
541 : }
542 :
543 :
544 2584637 : bool LiveRange::CanBeSpilled(LifetimePosition pos) const {
545 : // We cannot spill a live range that has a use requiring a register
546 : // at the current or the immediate next position.
547 2584637 : UsePosition* use_pos = NextRegisterPosition(pos);
548 2584643 : if (use_pos == nullptr) return true;
549 : return use_pos->pos() > pos.NextStart().End();
550 : }
551 :
552 :
553 48689392 : bool LiveRange::IsTopLevel() const { return top_level_ == this; }
554 :
555 :
556 133827970 : InstructionOperand LiveRange::GetAssignedOperand() const {
557 90089289 : if (HasRegisterAssigned()) {
558 : DCHECK(!spilled());
559 : return AllocatedOperand(LocationOperand::REGISTER, representation(),
560 46350608 : assigned_register());
561 : }
562 : DCHECK(spilled());
563 : DCHECK(!HasRegisterAssigned());
564 43738681 : if (TopLevel()->HasSpillOperand()) {
565 33329413 : InstructionOperand* op = TopLevel()->GetSpillOperand();
566 : DCHECK(!op->IsUnallocated());
567 33329413 : return *op;
568 : }
569 10409268 : return TopLevel()->GetSpillRangeOperand();
570 : }
571 :
572 :
573 0 : UseInterval* LiveRange::FirstSearchIntervalForPosition(
574 : LifetimePosition position) const {
575 632517796 : if (current_interval_ == nullptr) return first_interval_;
576 539685205 : if (current_interval_->start() > position) {
577 1926669 : current_interval_ = nullptr;
578 1621464 : return first_interval_;
579 : }
580 : return current_interval_;
581 : }
582 :
583 :
584 0 : void LiveRange::AdvanceLastProcessedMarker(
585 : UseInterval* to_start_of, LifetimePosition but_not_past) const {
586 750159936 : if (to_start_of == nullptr) return;
587 750166939 : if (to_start_of->start() > but_not_past) return;
588 410094010 : LifetimePosition start = current_interval_ == nullptr
589 : ? LifetimePosition::Invalid()
590 410094010 : : current_interval_->start();
591 410094010 : if (to_start_of->start() > start) {
592 74949407 : current_interval_ = to_start_of;
593 : }
594 : }
595 :
596 :
597 117419838 : LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
598 : int new_id = TopLevel()->GetNextChildId();
599 : LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel());
600 : // If we split, we do so because we're about to switch registers or move
601 : // to/from a slot, so there's no value in connecting hints.
602 29354923 : DetachAt(position, child, zone, DoNotConnectHints);
603 :
604 29355033 : child->top_level_ = TopLevel();
605 29355033 : child->next_ = next_;
606 29355033 : next_ = child;
607 29355033 : return child;
608 : }
609 :
610 48224178 : UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
611 : Zone* zone,
612 40611694 : HintConnectionOption connect_hints) {
613 : DCHECK(Start() < position);
614 : DCHECK(End() > position);
615 : DCHECK(result->IsEmpty());
616 : // Find the last interval that ends before the position. If the
617 : // position is contained in one of the intervals in the chain, we
618 : // split that interval and use the first part.
619 30731431 : UseInterval* current = FirstSearchIntervalForPosition(position);
620 :
621 : // If the split position coincides with the beginning of a use interval
622 : // we need to split use positons in a special way.
623 : bool split_at_start = false;
624 :
625 48224178 : if (current->start() == position) {
626 : // When splitting at start we need to locate the previous use interval.
627 966 : current = first_interval_;
628 : }
629 :
630 : UseInterval* after = nullptr;
631 62540295 : while (current != nullptr) {
632 62540246 : if (current->Contains(position)) {
633 31808815 : after = current->SplitAt(position, zone);
634 31809832 : break;
635 : }
636 : UseInterval* next = current->next();
637 30731431 : if (next->start() >= position) {
638 : split_at_start = (next->start() == position);
639 : after = next;
640 : current->set_next(nullptr);
641 : break;
642 : }
643 : current = next;
644 : }
645 : DCHECK(nullptr != after);
646 :
647 : // Partition original use intervals to the two live ranges.
648 : UseInterval* before = current;
649 : result->last_interval_ =
650 48225195 : (last_interval_ == before)
651 : ? after // Only interval in the range after split.
652 48225195 : : last_interval_; // Last interval of the original range.
653 48225195 : result->first_interval_ = after;
654 48225195 : last_interval_ = before;
655 :
656 : // Find the last use position before the split and the first use
657 : // position after it.
658 42474649 : UsePosition* use_after =
659 56966715 : splitting_pointer_ == nullptr || splitting_pointer_->pos() > position
660 : ? first_pos()
661 88836889 : : splitting_pointer_;
662 : UsePosition* use_before = nullptr;
663 48225195 : if (split_at_start) {
664 : // The split position coincides with the beginning of a use interval (the
665 : // end of a lifetime hole). Use at this position should be attributed to
666 : // the split child because split child owns use interval covering it.
667 2052971 : while (use_after != nullptr && use_after->pos() < position) {
668 : use_before = use_after;
669 : use_after = use_after->next();
670 : }
671 : } else {
672 88646873 : while (use_after != nullptr && use_after->pos() <= position) {
673 : use_before = use_after;
674 : use_after = use_after->next();
675 : }
676 : }
677 :
678 : // Partition original use positions to the two live ranges.
679 48225195 : if (use_before != nullptr) {
680 : use_before->set_next(nullptr);
681 : } else {
682 28696904 : first_pos_ = nullptr;
683 : }
684 48225195 : result->first_pos_ = use_after;
685 :
686 : // Discard cached iteration state. It might be pointing
687 : // to the use that no longer belongs to this live range.
688 48225195 : last_processed_use_ = nullptr;
689 48225195 : current_interval_ = nullptr;
690 :
691 58010451 : if (connect_hints == ConnectHints && use_before != nullptr &&
692 9785256 : use_after != nullptr) {
693 : use_after->SetHint(use_before);
694 : }
695 : #ifdef DEBUG
696 : VerifyChildStructure();
697 : result->VerifyChildStructure();
698 : #endif
699 48225195 : return use_before;
700 : }
701 :
702 :
703 0 : void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
704 26047072 : LiveRange* child = this;
705 29333072 : for (; child != nullptr; child = child->next()) {
706 26047072 : child->top_level_ = new_top_level;
707 : }
708 0 : }
709 :
710 :
711 54935675 : void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
712 54935675 : const InstructionOperand& spill_op) {
713 188453003 : for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
714 : DCHECK(Start() <= pos->pos() && pos->pos() <= End());
715 66908181 : if (!pos->HasOperand()) continue;
716 66609147 : switch (pos->type()) {
717 : case UsePositionType::kRequiresSlot:
718 : DCHECK(spill_op.IsStackSlot() || spill_op.IsFPStackSlot());
719 : InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
720 : break;
721 : case UsePositionType::kRequiresRegister:
722 : DCHECK(op.IsRegister() || op.IsFPRegister());
723 : // Fall through.
724 : case UsePositionType::kAny:
725 : InstructionOperand::ReplaceWith(pos->operand(), &op);
726 : break;
727 : }
728 : }
729 54935675 : }
730 :
731 :
732 : // This implements an ordering on live ranges so that they are ordered by their
733 : // start positions. This is needed for the correctness of the register
734 : // allocation algorithm. If two live ranges start at the same offset then there
735 : // is a tie breaker based on where the value is first used. This part of the
736 : // ordering is merely a heuristic.
737 22204366 : bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
738 : LifetimePosition start = Start();
739 : LifetimePosition other_start = other->Start();
740 355578946 : if (start == other_start) {
741 : UsePosition* pos = first_pos();
742 11868309 : if (pos == nullptr) return false;
743 : UsePosition* other_pos = other->first_pos();
744 10336057 : if (other_pos == nullptr) return true;
745 : return pos->pos() < other_pos->pos();
746 : }
747 0 : return start < other_start;
748 : }
749 :
750 :
751 22479850 : void LiveRange::SetUseHints(int register_index) {
752 112536026 : for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
753 45177629 : if (!pos->HasOperand()) continue;
754 44878547 : switch (pos->type()) {
755 : case UsePositionType::kRequiresSlot:
756 : break;
757 : case UsePositionType::kRequiresRegister:
758 : case UsePositionType::kAny:
759 : pos->set_assigned_register(register_index);
760 : break;
761 : }
762 : }
763 22479850 : }
764 :
765 :
766 378988261 : bool LiveRange::CanCover(LifetimePosition position) const {
767 412060579 : if (IsEmpty()) return false;
768 791049653 : return Start() <= position && position < End();
769 : }
770 :
771 :
772 410797540 : bool LiveRange::Covers(LifetimePosition position) const {
773 410797540 : if (!CanCover(position)) return false;
774 : UseInterval* start_search = FirstSearchIntervalForPosition(position);
775 669463846 : for (UseInterval* interval = start_search; interval != nullptr;
776 : interval = interval->next()) {
777 : DCHECK(interval->next() == nullptr ||
778 : interval->next()->start() >= interval->start());
779 : AdvanceLastProcessedMarker(interval, position);
780 669422760 : if (interval->Contains(position)) return true;
781 568777613 : if (interval->start() > position) return false;
782 : }
783 : return false;
784 : }
785 :
786 :
787 1105778527 : LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
788 51999185 : UseInterval* b = other->first_interval();
789 224284765 : if (b == nullptr) return LifetimePosition::Invalid();
790 : LifetimePosition advance_last_processed_up_to = b->start();
791 224282129 : UseInterval* a = FirstSearchIntervalForPosition(b->start());
792 581305891 : while (a != nullptr && b != nullptr) {
793 338326747 : if (a->start() > other->End()) break;
794 318915332 : if (b->start() > End()) break;
795 315173383 : LifetimePosition cur_intersection = a->Intersect(b);
796 315212376 : if (cur_intersection.IsValid()) {
797 38931062 : return cur_intersection;
798 : }
799 276281314 : if (a->start() < b->start()) {
800 : a = a->next();
801 448533812 : if (a == nullptr || a->start() > other->End()) break;
802 : AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
803 : } else {
804 : b = b->next();
805 : }
806 : }
807 : return LifetimePosition::Invalid();
808 : }
809 :
810 0 : void LiveRange::Print(const RegisterConfiguration* config,
811 : bool with_children) const {
812 0 : OFStream os(stdout);
813 : PrintableLiveRange wrapper;
814 0 : wrapper.register_configuration_ = config;
815 0 : for (const LiveRange* i = this; i != nullptr; i = i->next()) {
816 0 : wrapper.range_ = i;
817 0 : os << wrapper << std::endl;
818 0 : if (!with_children) break;
819 0 : }
820 0 : }
821 :
822 :
823 0 : void LiveRange::Print(bool with_children) const {
824 0 : Print(RegisterConfiguration::Turbofan(), with_children);
825 0 : }
826 :
827 :
828 : struct TopLevelLiveRange::SpillMoveInsertionList : ZoneObject {
829 : SpillMoveInsertionList(int gap_index, InstructionOperand* operand,
830 : SpillMoveInsertionList* next)
831 12892258 : : gap_index(gap_index), operand(operand), next(next) {}
832 : const int gap_index;
833 : InstructionOperand* const operand;
834 : SpillMoveInsertionList* const next;
835 : };
836 :
837 :
838 71 : TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineRepresentation rep)
839 : : LiveRange(0, rep, this),
840 : vreg_(vreg),
841 : last_child_id_(0),
842 : splintered_from_(nullptr),
843 : spill_operand_(nullptr),
844 : spill_move_insertion_locations_(nullptr),
845 : spilled_in_deferred_blocks_(false),
846 : spill_start_index_(kMaxInt),
847 : last_pos_(nullptr),
848 : splinter_(nullptr),
849 53721278 : has_preassigned_slot_(false) {
850 : bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
851 71 : }
852 :
853 :
854 : #if DEBUG
855 : int TopLevelLiveRange::debug_virt_reg() const {
856 : return IsSplinter() ? splintered_from()->vreg() : vreg();
857 : }
858 : #endif
859 :
860 :
861 0 : void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index,
862 : InstructionOperand* operand) {
863 : DCHECK(HasNoSpillType());
864 : spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList(
865 25784516 : gap_index, operand, spill_move_insertion_locations_);
866 0 : }
867 :
868 11767913 : void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
869 : const InstructionOperand& op,
870 14306496 : bool might_be_duplicated) {
871 : DCHECK_IMPLIES(op.IsConstant(), GetSpillMoveInsertionLocations() == nullptr);
872 : Zone* zone = sequence->zone();
873 :
874 13777814 : for (SpillMoveInsertionList* to_spill = GetSpillMoveInsertionLocations();
875 : to_spill != nullptr; to_spill = to_spill->next) {
876 2009900 : Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
877 : ParallelMove* move =
878 2009899 : instr->GetOrCreateParallelMove(Instruction::START, zone);
879 : // Skip insertion if it's possible that the move exists already as a
880 : // constraint move from a fixed output register to a slot.
881 2538585 : if (might_be_duplicated || has_preassigned_slot()) {
882 : bool found = false;
883 3186798 : for (MoveOperands* move_op : *move) {
884 1136388 : if (move_op->IsEliminated()) continue;
885 3279079 : if (move_op->source().Equals(*to_spill->operand) &&
886 : move_op->destination().Equals(op)) {
887 : found = true;
888 950652 : if (has_preassigned_slot()) move_op->Eliminate();
889 : break;
890 : }
891 : }
892 1500531 : if (found) continue;
893 : }
894 1059249 : if (!has_preassigned_slot()) {
895 1038662 : move->AddMove(*to_spill->operand, op);
896 : }
897 : }
898 11767914 : }
899 :
900 :
901 0 : void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) {
902 : DCHECK(HasNoSpillType());
903 : DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
904 : set_spill_type(SpillType::kSpillOperand);
905 9757933 : spill_operand_ = operand;
906 0 : }
907 :
908 :
909 0 : void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
910 : DCHECK(!HasSpillOperand());
911 : DCHECK(spill_range);
912 5631137 : spill_range_ = spill_range;
913 0 : }
914 :
915 :
916 15139855 : AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
917 15139855 : SpillRange* spill_range = GetSpillRange();
918 : int index = spill_range->assigned_slot();
919 630570 : return AllocatedOperand(LocationOperand::STACK_SLOT, representation(), index);
920 : }
921 :
922 :
923 9785206 : void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end,
924 40677532 : Zone* zone) {
925 : DCHECK(start != Start() || end != End());
926 : DCHECK(start < end);
927 :
928 28655475 : TopLevelLiveRange splinter_temp(-1, representation());
929 : UsePosition* last_in_splinter = nullptr;
930 : // Live ranges defined in deferred blocks stay in deferred blocks, so we
931 : // don't need to splinter them. That means that start should always be
932 : // after the beginning of the range.
933 : DCHECK(start > Start());
934 :
935 9785206 : if (end >= End()) {
936 : DCHECK(start > Start());
937 700213 : DetachAt(start, &splinter_temp, zone, ConnectHints);
938 700241 : next_ = nullptr;
939 : } else {
940 : DCHECK(start < End() && Start() < end);
941 :
942 : const int kInvalidId = std::numeric_limits<int>::max();
943 :
944 9084993 : UsePosition* last = DetachAt(start, &splinter_temp, zone, ConnectHints);
945 :
946 : LiveRange end_part(kInvalidId, this->representation(), nullptr);
947 : // The last chunk exits the deferred region, and we don't want to connect
948 : // hints here, because the non-deferred region shouldn't be affected
949 : // by allocation decisions on the deferred path.
950 : last_in_splinter =
951 9085063 : splinter_temp.DetachAt(end, &end_part, zone, DoNotConnectHints);
952 :
953 9085065 : next_ = end_part.next_;
954 9085065 : last_interval_->set_next(end_part.first_interval_);
955 : // The next splinter will happen either at or after the current interval.
956 : // We can optimize DetachAt by setting current_interval_ accordingly,
957 : // which will then be picked up by FirstSearchIntervalForPosition.
958 9085065 : current_interval_ = last_interval_;
959 9085065 : last_interval_ = end_part.last_interval_;
960 :
961 9085065 : if (first_pos_ == nullptr) {
962 902446 : first_pos_ = end_part.first_pos_;
963 : } else {
964 8182619 : splitting_pointer_ = last;
965 8182619 : if (last != nullptr) last->set_next(end_part.first_pos_);
966 : }
967 : }
968 :
969 9785306 : if (splinter()->IsEmpty()) {
970 3285996 : splinter()->first_interval_ = splinter_temp.first_interval_;
971 3285996 : splinter()->last_interval_ = splinter_temp.last_interval_;
972 : } else {
973 6499310 : splinter()->last_interval_->set_next(splinter_temp.first_interval_);
974 6499310 : splinter()->last_interval_ = splinter_temp.last_interval_;
975 : }
976 9785306 : if (splinter()->first_pos() == nullptr) {
977 7134269 : splinter()->first_pos_ = splinter_temp.first_pos_;
978 : } else {
979 2651037 : splinter()->last_pos_->set_next(splinter_temp.first_pos_);
980 : }
981 9785306 : if (last_in_splinter != nullptr) {
982 1978000 : splinter()->last_pos_ = last_in_splinter;
983 : } else {
984 10536198 : if (splinter()->first_pos() != nullptr &&
985 2728892 : splinter()->last_pos_ == nullptr) {
986 660294 : splinter()->last_pos_ = splinter()->first_pos();
987 1536308 : for (UsePosition* pos = splinter()->first_pos(); pos != nullptr;
988 : pos = pos->next()) {
989 876014 : splinter()->last_pos_ = pos;
990 : }
991 : }
992 : }
993 : #if DEBUG
994 : Verify();
995 : splinter()->Verify();
996 : #endif
997 9785306 : }
998 :
999 :
1000 3285924 : void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) {
1001 3285924 : splintered_from_ = splinter_parent;
1002 3285924 : if (!HasSpillOperand() && splinter_parent->spill_range_ != nullptr) {
1003 1577647 : SetSpillRange(splinter_parent->spill_range_);
1004 : }
1005 3285924 : }
1006 :
1007 :
1008 3737980 : void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) {
1009 : DCHECK(merged->TopLevel() == this);
1010 :
1011 3978634 : if (HasNoSpillType() && merged->HasSpillRange()) {
1012 : set_spill_type(merged->spill_type());
1013 : DCHECK(GetSpillRange()->live_ranges().size() > 0);
1014 451980 : merged->spill_range_ = nullptr;
1015 : merged->bits_ =
1016 903960 : SpillTypeField::update(merged->bits_, SpillType::kNoSpillType);
1017 : }
1018 3286000 : }
1019 :
1020 :
1021 5635655 : void TopLevelLiveRange::Merge(TopLevelLiveRange* other, Zone* zone) {
1022 : DCHECK(Start() < other->Start());
1023 : DCHECK(other->splintered_from() == this);
1024 :
1025 47955447 : LiveRange* first = this;
1026 15557395 : LiveRange* second = other;
1027 : DCHECK(first->Start() < second->Start());
1028 43676389 : while (first != nullptr && second != nullptr) {
1029 : DCHECK(first != second);
1030 : // Make sure the ranges are in order each time we iterate.
1031 37104442 : if (second->Start() < first->Start()) {
1032 : LiveRange* tmp = second;
1033 : second = first;
1034 : first = tmp;
1035 : continue;
1036 : }
1037 :
1038 21520134 : if (first->End() <= second->Start()) {
1039 8639480 : if (first->next() == nullptr ||
1040 : first->next()->Start() > second->Start()) {
1041 : // First is in order before second.
1042 : LiveRange* temp = first->next();
1043 3312995 : first->next_ = second;
1044 : first = temp;
1045 : } else {
1046 : // First is in order before its successor (or second), so advance first.
1047 : first = first->next();
1048 : }
1049 : continue;
1050 : }
1051 :
1052 : DCHECK(first->Start() < second->Start());
1053 : // If first and second intersect, split first.
1054 15557395 : if (first->Start() < second->End() && second->Start() < first->End()) {
1055 15557381 : LiveRange* temp = first->SplitAt(second->Start(), zone);
1056 15557434 : CHECK(temp != first);
1057 : temp->set_spilled(first->spilled());
1058 15557434 : if (!temp->spilled())
1059 : temp->set_assigned_register(first->assigned_register());
1060 :
1061 15557434 : first->next_ = second;
1062 : first = temp;
1063 15557434 : continue;
1064 : }
1065 : DCHECK(first->End() <= second->Start());
1066 : }
1067 :
1068 9858000 : TopLevel()->UpdateParentForAllChildren(TopLevel());
1069 3286000 : TopLevel()->UpdateSpillRangePostMerge(other);
1070 8921708 : TopLevel()->set_has_slot_use(TopLevel()->has_slot_use() ||
1071 : other->has_slot_use());
1072 :
1073 : #if DEBUG
1074 : Verify();
1075 : #endif
1076 3286000 : }
1077 :
1078 :
1079 0 : void TopLevelLiveRange::VerifyChildrenInOrder() const {
1080 0 : LifetimePosition last_end = End();
1081 0 : for (const LiveRange* child = this->next(); child != nullptr;
1082 : child = child->next()) {
1083 : DCHECK(last_end <= child->Start());
1084 : last_end = child->End();
1085 : }
1086 0 : }
1087 :
1088 :
1089 0 : void TopLevelLiveRange::Verify() const {
1090 : VerifyChildrenInOrder();
1091 0 : for (const LiveRange* child = this; child != nullptr; child = child->next()) {
1092 : VerifyChildStructure();
1093 : }
1094 0 : }
1095 :
1096 :
1097 40578525 : void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
1098 40578525 : TRACE("Shorten live range %d to [%d\n", vreg(), start.value());
1099 : DCHECK(first_interval_ != nullptr);
1100 : DCHECK(first_interval_->start() <= start);
1101 : DCHECK(start < first_interval_->end());
1102 40578525 : first_interval_->set_start(start);
1103 40578525 : }
1104 :
1105 :
1106 1112337 : void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
1107 0 : LifetimePosition end, Zone* zone) {
1108 1112337 : TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
1109 : end.value());
1110 : LifetimePosition new_end = end;
1111 4228450 : while (first_interval_ != nullptr && first_interval_->start() <= end) {
1112 2003776 : if (first_interval_->end() > end) {
1113 : new_end = first_interval_->end();
1114 : }
1115 2003776 : first_interval_ = first_interval_->next();
1116 : }
1117 :
1118 1112337 : UseInterval* new_interval = new (zone) UseInterval(start, new_end);
1119 1112337 : new_interval->set_next(first_interval_);
1120 1112337 : first_interval_ = new_interval;
1121 1112337 : if (new_interval->next() == nullptr) {
1122 646368 : last_interval_ = new_interval;
1123 : }
1124 1112337 : }
1125 :
1126 :
1127 240056669 : void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
1128 0 : LifetimePosition end, Zone* zone) {
1129 240056669 : TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(),
1130 : end.value());
1131 240074751 : if (first_interval_ == nullptr) {
1132 39595615 : UseInterval* interval = new (zone) UseInterval(start, end);
1133 39595700 : first_interval_ = interval;
1134 39595700 : last_interval_ = interval;
1135 : } else {
1136 200479136 : if (end == first_interval_->start()) {
1137 : first_interval_->set_start(start);
1138 127697100 : } else if (end < first_interval_->start()) {
1139 83829630 : UseInterval* interval = new (zone) UseInterval(start, end);
1140 83829498 : interval->set_next(first_interval_);
1141 83829498 : first_interval_ = interval;
1142 : } else {
1143 : // Order of instruction's processing (see ProcessInstructions) guarantees
1144 : // that each new use interval either precedes, intersects with or touches
1145 : // the last added interval.
1146 : DCHECK(start <= first_interval_->end());
1147 : first_interval_->set_start(Min(start, first_interval_->start()));
1148 : first_interval_->set_end(Max(end, first_interval_->end()));
1149 : }
1150 : }
1151 240074704 : }
1152 :
1153 :
1154 67972644 : void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
1155 : LifetimePosition pos = use_pos->pos();
1156 67972644 : TRACE("Add to live range %d use position %d\n", vreg(), pos.value());
1157 : UsePosition* prev_hint = nullptr;
1158 111471 : UsePosition* prev = nullptr;
1159 68084571 : UsePosition* current = first_pos_;
1160 136057665 : while (current != nullptr && current->pos() < pos) {
1161 111477 : prev_hint = current->HasHint() ? current : prev_hint;
1162 : prev = current;
1163 : current = current->next();
1164 : }
1165 :
1166 67973094 : if (prev == nullptr) {
1167 67861623 : use_pos->set_next(first_pos_);
1168 67861623 : first_pos_ = use_pos;
1169 : } else {
1170 : use_pos->set_next(prev->next());
1171 : prev->set_next(use_pos);
1172 : }
1173 :
1174 135946186 : if (prev_hint == nullptr && use_pos->HasHint()) {
1175 16926393 : current_hint_position_ = use_pos;
1176 : }
1177 67973067 : }
1178 :
1179 :
1180 23321601 : static bool AreUseIntervalsIntersecting(UseInterval* interval1,
1181 6687250 : UseInterval* interval2) {
1182 42969084 : while (interval1 != nullptr && interval2 != nullptr) {
1183 29802335 : if (interval1->start() < interval2->start()) {
1184 15566904 : if (interval1->end() > interval2->start()) {
1185 : return true;
1186 : }
1187 : interval1 = interval1->next();
1188 : } else {
1189 14235431 : if (interval2->end() > interval1->start()) {
1190 : return true;
1191 : }
1192 : interval2 = interval2->next();
1193 : }
1194 : }
1195 : return false;
1196 : }
1197 :
1198 :
1199 0 : std::ostream& operator<<(std::ostream& os,
1200 : const PrintableLiveRange& printable_range) {
1201 0 : const LiveRange* range = printable_range.range_;
1202 0 : os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id()
1203 0 : << " ";
1204 0 : if (range->TopLevel()->is_phi()) os << "phi ";
1205 0 : if (range->TopLevel()->is_non_loop_phi()) os << "nlphi ";
1206 :
1207 0 : os << "{" << std::endl;
1208 0 : UseInterval* interval = range->first_interval();
1209 0 : UsePosition* use_pos = range->first_pos();
1210 : PrintableInstructionOperand pio;
1211 0 : pio.register_configuration_ = printable_range.register_configuration_;
1212 0 : while (use_pos != nullptr) {
1213 0 : if (use_pos->HasOperand()) {
1214 0 : pio.op_ = *use_pos->operand();
1215 0 : os << pio << use_pos->pos() << " ";
1216 : }
1217 : use_pos = use_pos->next();
1218 : }
1219 : os << std::endl;
1220 :
1221 0 : while (interval != nullptr) {
1222 0 : os << '[' << interval->start() << ", " << interval->end() << ')'
1223 : << std::endl;
1224 : interval = interval->next();
1225 : }
1226 0 : os << "}";
1227 0 : return os;
1228 : }
1229 :
1230 2881077 : SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone)
1231 : : live_ranges_(zone),
1232 : assigned_slot_(kUnassignedSlot),
1233 5762154 : byte_width_(GetByteWidth(parent->representation())) {
1234 : // Spill ranges are created for top level, non-splintered ranges. This is so
1235 : // that, when merging decisions are made, we consider the full extent of the
1236 : // virtual register, and avoid clobbering it.
1237 : DCHECK(!parent->IsSplinter());
1238 : UseInterval* result = nullptr;
1239 : UseInterval* node = nullptr;
1240 : // Copy the intervals for all ranges.
1241 6193575 : for (LiveRange* range = parent; range != nullptr; range = range->next()) {
1242 5714101 : UseInterval* src = range->first_interval();
1243 12339039 : while (src != nullptr) {
1244 : UseInterval* new_node = new (zone) UseInterval(src->start(), src->end());
1245 5714101 : if (result == nullptr) {
1246 : result = new_node;
1247 : } else {
1248 : node->set_next(new_node);
1249 : }
1250 : node = new_node;
1251 : src = src->next();
1252 : }
1253 : }
1254 2881106 : use_interval_ = result;
1255 2881106 : live_ranges().push_back(parent);
1256 2881087 : end_position_ = node->end();
1257 2881087 : parent->SetSpillRange(this);
1258 2881087 : }
1259 :
1260 13895166 : bool SpillRange::IsIntersectingWith(SpillRange* other) const {
1261 41685538 : if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
1262 27766249 : this->End() <= other->use_interval_->start() ||
1263 : other->End() <= this->use_interval_->start()) {
1264 : return false;
1265 : }
1266 12931144 : return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
1267 : }
1268 :
1269 :
1270 43240211 : bool SpillRange::TryMerge(SpillRange* other) {
1271 29345089 : if (HasSlot() || other->HasSlot()) return false;
1272 13895122 : if (byte_width() != other->byte_width() || IsIntersectingWith(other))
1273 : return false;
1274 :
1275 : LifetimePosition max = LifetimePosition::MaxPosition();
1276 1141496 : if (End() < other->End() && other->End() != max) {
1277 28118 : end_position_ = other->End();
1278 : }
1279 1141496 : other->end_position_ = max;
1280 :
1281 1141496 : MergeDisjointIntervals(other->use_interval_);
1282 1141496 : other->use_interval_ = nullptr;
1283 :
1284 3455395 : for (TopLevelLiveRange* range : other->live_ranges()) {
1285 : DCHECK(range->GetSpillRange() == other);
1286 : range->SetSpillRange(this);
1287 : }
1288 :
1289 : live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
1290 1141496 : other->live_ranges().end());
1291 : other->live_ranges().clear();
1292 :
1293 1141495 : return true;
1294 : }
1295 :
1296 :
1297 1141496 : void SpillRange::MergeDisjointIntervals(UseInterval* other) {
1298 : UseInterval* tail = nullptr;
1299 1141496 : UseInterval* current = use_interval_;
1300 6451991 : while (other != nullptr) {
1301 : // Make sure the 'current' list starts first
1302 8337998 : if (current == nullptr || current->start() > other->start()) {
1303 : std::swap(current, other);
1304 : }
1305 : // Check disjointness
1306 : DCHECK(other == nullptr || current->end() <= other->start());
1307 : // Append the 'current' node to the result accumulator and move forward
1308 4168999 : if (tail == nullptr) {
1309 1141493 : use_interval_ = current;
1310 : } else {
1311 : tail->set_next(current);
1312 : }
1313 : tail = current;
1314 : current = current->next();
1315 : }
1316 : // Other list is empty => we are done
1317 1141496 : }
1318 :
1319 :
1320 0 : void SpillRange::Print() const {
1321 0 : OFStream os(stdout);
1322 0 : os << "{" << std::endl;
1323 0 : for (TopLevelLiveRange* range : live_ranges()) {
1324 0 : os << range->vreg() << " ";
1325 : }
1326 : os << std::endl;
1327 :
1328 0 : for (UseInterval* i = interval(); i != nullptr; i = i->next()) {
1329 0 : os << '[' << i->start() << ", " << i->end() << ')' << std::endl;
1330 : }
1331 0 : os << "}" << std::endl;
1332 0 : }
1333 :
1334 :
1335 0 : RegisterAllocationData::PhiMapValue::PhiMapValue(PhiInstruction* phi,
1336 : const InstructionBlock* block,
1337 : Zone* zone)
1338 : : phi_(phi),
1339 : block_(block),
1340 : incoming_operands_(zone),
1341 2667212 : assigned_register_(kUnassignedRegister) {
1342 2667212 : incoming_operands_.reserve(phi->operands().size());
1343 0 : }
1344 :
1345 :
1346 0 : void RegisterAllocationData::PhiMapValue::AddOperand(
1347 : InstructionOperand* operand) {
1348 3325579 : incoming_operands_.push_back(operand);
1349 0 : }
1350 :
1351 :
1352 0 : void RegisterAllocationData::PhiMapValue::CommitAssignment(
1353 : const InstructionOperand& assigned) {
1354 7984648 : for (InstructionOperand* operand : incoming_operands_) {
1355 : InstructionOperand::ReplaceWith(operand, &assigned);
1356 : }
1357 0 : }
1358 :
1359 915927 : RegisterAllocationData::RegisterAllocationData(
1360 : const RegisterConfiguration* config, Zone* zone, Frame* frame,
1361 12822787 : InstructionSequence* code, const char* debug_name)
1362 : : allocation_zone_(zone),
1363 : frame_(frame),
1364 : code_(code),
1365 : debug_name_(debug_name),
1366 : config_(config),
1367 : phi_map_(allocation_zone()),
1368 : live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1369 : live_out_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1370 915897 : live_ranges_(code->VirtualRegisterCount() * 2, nullptr,
1371 : allocation_zone()),
1372 915904 : fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
1373 : allocation_zone()),
1374 : fixed_float_live_ranges_(allocation_zone()),
1375 915915 : fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
1376 : allocation_zone()),
1377 : fixed_simd128_live_ranges_(allocation_zone()),
1378 : spill_ranges_(code->VirtualRegisterCount(), nullptr, allocation_zone()),
1379 : delayed_references_(allocation_zone()),
1380 : assigned_registers_(nullptr),
1381 : assigned_double_registers_(nullptr),
1382 : virtual_register_count_(code->VirtualRegisterCount()),
1383 8243217 : preassigned_slot_ranges_(zone) {
1384 : if (!kSimpleFPAliasing) {
1385 : fixed_float_live_ranges_.resize(this->config()->num_float_registers(),
1386 : nullptr);
1387 : fixed_simd128_live_ranges_.resize(this->config()->num_simd128_registers(),
1388 : nullptr);
1389 : }
1390 :
1391 : assigned_registers_ = new (code_zone())
1392 1831837 : BitVector(this->config()->num_general_registers(), code_zone());
1393 : assigned_double_registers_ = new (code_zone())
1394 1831828 : BitVector(this->config()->num_double_registers(), code_zone());
1395 915919 : this->frame()->SetAllocatedRegisters(assigned_registers_);
1396 915919 : this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1397 915919 : }
1398 :
1399 :
1400 25846214 : MoveOperands* RegisterAllocationData::AddGapMove(
1401 : int index, Instruction::GapPosition position,
1402 25846214 : const InstructionOperand& from, const InstructionOperand& to) {
1403 : Instruction* instr = code()->InstructionAt(index);
1404 25846189 : ParallelMove* moves = instr->GetOrCreateParallelMove(position, code_zone());
1405 25846029 : return moves->AddMove(from, to);
1406 : }
1407 :
1408 :
1409 0 : MachineRepresentation RegisterAllocationData::RepresentationFor(
1410 42121401 : int virtual_register) {
1411 : DCHECK_LT(virtual_register, code()->VirtualRegisterCount());
1412 42121401 : return code()->GetRepresentation(virtual_register);
1413 : }
1414 :
1415 :
1416 203007295 : TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) {
1417 406014590 : if (index >= static_cast<int>(live_ranges().size())) {
1418 0 : live_ranges().resize(index + 1, nullptr);
1419 : }
1420 406014590 : TopLevelLiveRange* result = live_ranges()[index];
1421 203007295 : if (result == nullptr) {
1422 23350677 : result = NewLiveRange(index, RepresentationFor(index));
1423 46701408 : live_ranges()[index] = result;
1424 : }
1425 203007287 : return result;
1426 : }
1427 :
1428 :
1429 43936660 : TopLevelLiveRange* RegisterAllocationData::NewLiveRange(
1430 43936660 : int index, MachineRepresentation rep) {
1431 43936001 : return new (allocation_zone()) TopLevelLiveRange(index, rep);
1432 : }
1433 :
1434 :
1435 3285923 : int RegisterAllocationData::GetNextLiveRangeId() {
1436 3285923 : int vreg = virtual_register_count_++;
1437 6571846 : if (vreg >= static_cast<int>(live_ranges().size())) {
1438 0 : live_ranges().resize(vreg + 1, nullptr);
1439 : }
1440 3285923 : return vreg;
1441 : }
1442 :
1443 :
1444 3285904 : TopLevelLiveRange* RegisterAllocationData::NextLiveRange(
1445 : MachineRepresentation rep) {
1446 3285904 : int vreg = GetNextLiveRangeId();
1447 3285966 : TopLevelLiveRange* ret = NewLiveRange(vreg, rep);
1448 3285945 : return ret;
1449 : }
1450 :
1451 :
1452 1333603 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap(
1453 2667209 : const InstructionBlock* block, PhiInstruction* phi) {
1454 : RegisterAllocationData::PhiMapValue* map_value = new (allocation_zone())
1455 : RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
1456 : auto res =
1457 2667209 : phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
1458 : DCHECK(res.second);
1459 : USE(res);
1460 1333603 : return map_value;
1461 : }
1462 :
1463 :
1464 0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1465 : int virtual_register) {
1466 : auto it = phi_map_.find(virtual_register);
1467 : DCHECK(it != phi_map_.end());
1468 3966927 : return it->second;
1469 : }
1470 :
1471 :
1472 0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1473 3567218 : TopLevelLiveRange* top_range) {
1474 0 : return GetPhiMapValueFor(top_range->vreg());
1475 : }
1476 :
1477 :
1478 42 : bool RegisterAllocationData::ExistsUseWithoutDefinition() {
1479 : bool found = false;
1480 42 : BitVector::Iterator iterator(live_in_sets()[0]);
1481 84 : while (!iterator.Done()) {
1482 : found = true;
1483 0 : int operand_index = iterator.Current();
1484 : PrintF("Register allocator error: live v%d reached first block.\n",
1485 0 : operand_index);
1486 0 : LiveRange* range = GetOrCreateLiveRangeFor(operand_index);
1487 0 : PrintF(" (first use is at %d)\n", range->first_pos()->pos().value());
1488 0 : if (debug_name() == nullptr) {
1489 0 : PrintF("\n");
1490 : } else {
1491 0 : PrintF(" (function: %s)\n", debug_name());
1492 : }
1493 0 : iterator.Advance();
1494 : }
1495 42 : return found;
1496 : }
1497 :
1498 :
1499 : // If a range is defined in a deferred block, we can expect all the range
1500 : // to only cover positions in deferred blocks. Otherwise, a block on the
1501 : // hot path would be dominated by a deferred block, meaning it is unreachable
1502 : // without passing through the deferred block, which is contradictory.
1503 : // In particular, when such a range contributes a result back on the hot
1504 : // path, it will be as one of the inputs of a phi. In that case, the value
1505 : // will be transferred via a move in the Gap::END's of the last instruction
1506 : // of a deferred block.
1507 250 : bool RegisterAllocationData::RangesDefinedInDeferredStayInDeferred() {
1508 546 : for (const TopLevelLiveRange* range : live_ranges()) {
1509 901 : if (range == nullptr || range->IsEmpty() ||
1510 : !code()
1511 : ->GetInstructionBlock(range->Start().ToInstructionIndex())
1512 208 : ->IsDeferred()) {
1513 : continue;
1514 : }
1515 0 : for (const UseInterval* i = range->first_interval(); i != nullptr;
1516 : i = i->next()) {
1517 : int first = i->FirstGapIndex();
1518 : int last = i->LastGapIndex();
1519 0 : for (int instr = first; instr <= last;) {
1520 0 : const InstructionBlock* block = code()->GetInstructionBlock(instr);
1521 0 : if (!block->IsDeferred()) return false;
1522 : instr = block->last_instruction_index() + 1;
1523 : }
1524 : }
1525 : }
1526 : return true;
1527 : }
1528 :
1529 2802197 : SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
1530 9808654 : TopLevelLiveRange* range) {
1531 : DCHECK(!range->HasSpillOperand());
1532 :
1533 : SpillRange* spill_range = range->GetAllocatedSpillRange();
1534 2802197 : if (spill_range == nullptr) {
1535 : DCHECK(!range->IsSplinter());
1536 2015761 : spill_range = new (allocation_zone()) SpillRange(range, allocation_zone());
1537 : }
1538 : range->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange);
1539 :
1540 : int spill_range_index =
1541 2802200 : range->IsSplinter() ? range->splintered_from()->vreg() : range->vreg();
1542 :
1543 5604400 : spill_ranges()[spill_range_index] = spill_range;
1544 :
1545 2802200 : return spill_range;
1546 : }
1547 :
1548 :
1549 865297 : SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
1550 865297 : TopLevelLiveRange* range) {
1551 : DCHECK(!range->HasSpillOperand());
1552 : DCHECK(!range->IsSplinter());
1553 : SpillRange* spill_range =
1554 865324 : new (allocation_zone()) SpillRange(range, allocation_zone());
1555 865318 : return spill_range;
1556 : }
1557 :
1558 39780498 : void RegisterAllocationData::MarkAllocated(MachineRepresentation rep,
1559 : int index) {
1560 39780498 : switch (rep) {
1561 : case MachineRepresentation::kFloat32:
1562 : case MachineRepresentation::kSimd128:
1563 : if (kSimpleFPAliasing) {
1564 10723373 : assigned_double_registers_->Add(index);
1565 : } else {
1566 : int alias_base_index = -1;
1567 : int aliases = config()->GetAliases(
1568 : rep, index, MachineRepresentation::kFloat64, &alias_base_index);
1569 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
1570 : while (aliases--) {
1571 : int aliased_reg = alias_base_index + aliases;
1572 : assigned_double_registers_->Add(aliased_reg);
1573 : }
1574 : }
1575 : break;
1576 : case MachineRepresentation::kFloat64:
1577 10662452 : assigned_double_registers_->Add(index);
1578 : break;
1579 : default:
1580 : DCHECK(!IsFloatingPoint(rep));
1581 29057125 : assigned_registers_->Add(index);
1582 : break;
1583 : }
1584 39780498 : }
1585 :
1586 24295720 : bool RegisterAllocationData::IsBlockBoundary(LifetimePosition pos) const {
1587 24295752 : return pos.IsFullStart() &&
1588 10789278 : code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
1589 13506474 : pos.ToInstructionIndex();
1590 : }
1591 :
1592 :
1593 1831811 : ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data)
1594 1831811 : : data_(data) {}
1595 :
1596 :
1597 18783493 : InstructionOperand* ConstraintBuilder::AllocateFixed(
1598 : UnallocatedOperand* operand, int pos, bool is_tagged) {
1599 18783493 : TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
1600 : DCHECK(operand->HasFixedPolicy());
1601 : InstructionOperand allocated;
1602 : MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
1603 : int virtual_register = operand->virtual_register();
1604 18783556 : if (virtual_register != InstructionOperand::kInvalidVirtualRegister) {
1605 : rep = data()->RepresentationFor(virtual_register);
1606 : }
1607 18783540 : if (operand->HasFixedSlotPolicy()) {
1608 : allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, rep,
1609 : operand->fixed_slot_index());
1610 17994731 : } else if (operand->HasFixedRegisterPolicy()) {
1611 : DCHECK(!IsFloatingPoint(rep));
1612 : allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1613 : operand->fixed_register_index());
1614 104264 : } else if (operand->HasFixedFPRegisterPolicy()) {
1615 : DCHECK(IsFloatingPoint(rep));
1616 : DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
1617 : allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1618 : operand->fixed_register_index());
1619 : } else {
1620 0 : UNREACHABLE();
1621 : }
1622 : InstructionOperand::ReplaceWith(operand, &allocated);
1623 18783540 : if (is_tagged) {
1624 12500263 : TRACE("Fixed reg is tagged at %d\n", pos);
1625 12500257 : Instruction* instr = code()->InstructionAt(pos);
1626 12500257 : if (instr->HasReferenceMap()) {
1627 9950730 : instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand));
1628 : }
1629 : }
1630 18783546 : return operand;
1631 : }
1632 :
1633 :
1634 915894 : void ConstraintBuilder::MeetRegisterConstraints() {
1635 13293228 : for (InstructionBlock* block : code()->instruction_blocks()) {
1636 11461411 : MeetRegisterConstraints(block);
1637 : }
1638 915923 : }
1639 :
1640 :
1641 11461417 : void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
1642 : int start = block->first_instruction_index();
1643 : int end = block->last_instruction_index();
1644 : DCHECK_NE(-1, start);
1645 50588110 : for (int i = start; i <= end; ++i) {
1646 39126605 : MeetConstraintsBefore(i);
1647 39126708 : if (i != end) MeetConstraintsAfter(i);
1648 : }
1649 : // Meet register constraints for the instruction in the end.
1650 11461505 : MeetRegisterConstraintsForLastInstructionInBlock(block);
1651 11461430 : }
1652 :
1653 :
1654 11461454 : void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
1655 11461454 : const InstructionBlock* block) {
1656 : int end = block->last_instruction_index();
1657 11463892 : Instruction* last_instruction = code()->InstructionAt(end);
1658 22927784 : for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
1659 : InstructionOperand* output_operand = last_instruction->OutputAt(i);
1660 : DCHECK(!output_operand->IsConstant());
1661 2483 : UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
1662 : int output_vreg = output->virtual_register();
1663 2483 : TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1664 : bool assigned = false;
1665 2483 : if (output->HasFixedPolicy()) {
1666 131 : AllocateFixed(output, -1, false);
1667 : // This value is produced on the stack, we never need to spill it.
1668 131 : if (output->IsStackSlot()) {
1669 : DCHECK(LocationOperand::cast(output)->index() <
1670 : data()->frame()->GetSpillSlotCount());
1671 : range->SetSpillOperand(LocationOperand::cast(output));
1672 : range->SetSpillStartIndex(end);
1673 : assigned = true;
1674 : }
1675 :
1676 264 : for (const RpoNumber& succ : block->successors()) {
1677 2 : const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1678 : DCHECK(successor->PredecessorCount() == 1);
1679 : int gap_index = successor->first_instruction_index();
1680 : // Create an unconstrained operand for the same virtual register
1681 : // and insert a gap move from the fixed output to the operand.
1682 : UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
1683 4 : data()->AddGapMove(gap_index, Instruction::START, *output, output_copy);
1684 : }
1685 : }
1686 :
1687 2483 : if (!assigned) {
1688 9666 : for (const RpoNumber& succ : block->successors()) {
1689 4704 : const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1690 : DCHECK(successor->PredecessorCount() == 1);
1691 : int gap_index = successor->first_instruction_index();
1692 : range->RecordSpillLocation(allocation_zone(), gap_index, output);
1693 : range->SetSpillStartIndex(gap_index);
1694 : }
1695 : }
1696 : }
1697 11461409 : }
1698 :
1699 :
1700 27665567 : void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
1701 77355479 : Instruction* first = code()->InstructionAt(instr_index);
1702 : // Handle fixed temporaries.
1703 56756158 : for (size_t i = 0; i < first->TempCount(); i++) {
1704 712528 : UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i));
1705 712528 : if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false);
1706 : }
1707 : // Handle constant/fixed output operands.
1708 70289249 : for (size_t i = 0; i < first->OutputCount(); i++) {
1709 21311879 : InstructionOperand* output = first->OutputAt(i);
1710 21311879 : if (output->IsConstant()) {
1711 : int output_vreg = ConstantOperand::cast(output)->virtual_register();
1712 9047076 : TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1713 9047088 : range->SetSpillStartIndex(instr_index + 1);
1714 : range->SetSpillOperand(output);
1715 : continue;
1716 : }
1717 : UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
1718 : TopLevelLiveRange* range =
1719 12264803 : data()->GetOrCreateLiveRangeFor(first_output->virtual_register());
1720 : bool assigned = false;
1721 12264791 : if (first_output->HasFixedPolicy()) {
1722 : int output_vreg = first_output->virtual_register();
1723 : UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
1724 : bool is_tagged = code()->IsReference(output_vreg);
1725 5430770 : if (first_output->HasSecondaryStorage()) {
1726 : range->MarkHasPreassignedSlot();
1727 : data()->preassigned_slot_ranges().push_back(
1728 1315385 : std::make_pair(range, first_output->GetSecondaryStorage()));
1729 : }
1730 5430766 : AllocateFixed(first_output, instr_index, is_tagged);
1731 :
1732 : // This value is produced on the stack, we never need to spill it.
1733 5430778 : if (first_output->IsStackSlot()) {
1734 : DCHECK(LocationOperand::cast(first_output)->index() <
1735 : data()->frame()->GetTotalFrameSlotCount());
1736 : range->SetSpillOperand(LocationOperand::cast(first_output));
1737 710843 : range->SetSpillStartIndex(instr_index + 1);
1738 : assigned = true;
1739 : }
1740 : data()->AddGapMove(instr_index + 1, Instruction::START, *first_output,
1741 10861556 : output_copy);
1742 : }
1743 : // Make sure we add a gap move for spilling (if we have not done
1744 : // so already).
1745 12264795 : if (!assigned) {
1746 : range->RecordSpillLocation(allocation_zone(), instr_index + 1,
1747 11553980 : first_output);
1748 : range->SetSpillStartIndex(instr_index + 1);
1749 : }
1750 : }
1751 27665521 : }
1752 :
1753 :
1754 39126697 : void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
1755 182137615 : Instruction* second = code()->InstructionAt(instr_index);
1756 : // Handle fixed input operands of second instruction.
1757 242761574 : for (size_t i = 0; i < second->InputCount(); i++) {
1758 82254151 : InstructionOperand* input = second->InputAt(i);
1759 82254151 : if (input->IsImmediate() || input->IsExplicit()) {
1760 : continue; // Ignore immediates and explicitly reserved registers.
1761 : }
1762 : UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
1763 46028467 : if (cur_input->HasFixedPolicy()) {
1764 : int input_vreg = cur_input->virtual_register();
1765 : UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1766 : bool is_tagged = code()->IsReference(input_vreg);
1767 13339845 : AllocateFixed(cur_input, instr_index, is_tagged);
1768 26679688 : data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
1769 : }
1770 : }
1771 : // Handle "output same as input" for second instruction.
1772 81755322 : for (size_t i = 0; i < second->OutputCount(); i++) {
1773 21314273 : InstructionOperand* output = second->OutputAt(i);
1774 40975947 : if (!output->IsUnallocated()) continue;
1775 : UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
1776 12267230 : if (!second_output->HasSameAsInputPolicy()) continue;
1777 : DCHECK(i == 0); // Only valid for first output.
1778 : UnallocatedOperand* cur_input =
1779 1652599 : UnallocatedOperand::cast(second->InputAt(0));
1780 : int output_vreg = second_output->virtual_register();
1781 : int input_vreg = cur_input->virtual_register();
1782 : UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
1783 : cur_input->set_virtual_register(second_output->virtual_register());
1784 : MoveOperands* gap_move = data()->AddGapMove(instr_index, Instruction::END,
1785 3305198 : input_copy, *cur_input);
1786 1968574 : if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) {
1787 315849 : if (second->HasReferenceMap()) {
1788 : RegisterAllocationData::DelayedReference delayed_reference = {
1789 0 : second->reference_map(), &gap_move->source()};
1790 0 : data()->delayed_references().push_back(delayed_reference);
1791 : }
1792 1336876 : } else if (!code()->IsReference(input_vreg) &&
1793 : code()->IsReference(output_vreg)) {
1794 : // The input is assumed to immediately have a tagged representation,
1795 : // before the pointer map can be used. I.e. the pointer map at the
1796 : // instruction will include the output operand (whose value at the
1797 : // beginning of the instruction is equal to the input operand). If
1798 : // this is not desired, then the pointer map at this instruction needs
1799 : // to be adjusted manually.
1800 : }
1801 : }
1802 39126706 : }
1803 :
1804 :
1805 915955 : void ConstraintBuilder::ResolvePhis() {
1806 : // Process the blocks in reverse order.
1807 24754964 : for (InstructionBlock* block : base::Reversed(code()->instruction_blocks())) {
1808 11461542 : ResolvePhis(block);
1809 : }
1810 915925 : }
1811 :
1812 :
1813 12795093 : void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) {
1814 24256580 : for (PhiInstruction* phi : block->phis()) {
1815 : int phi_vreg = phi->virtual_register();
1816 : RegisterAllocationData::PhiMapValue* map_value =
1817 1333605 : data()->InitializePhiMap(block, phi);
1818 1333602 : InstructionOperand& output = phi->output();
1819 : // Map the destination operands, so the commitment phase can find them.
1820 9318368 : for (size_t i = 0; i < phi->operands().size(); ++i) {
1821 3325582 : InstructionBlock* cur_block =
1822 6651154 : code()->InstructionBlockAt(block->predecessors()[i]);
1823 3325582 : UnallocatedOperand input(UnallocatedOperand::ANY, phi->operands()[i]);
1824 : MoveOperands* move = data()->AddGapMove(
1825 3325582 : cur_block->last_instruction_index(), Instruction::END, input, output);
1826 3325579 : map_value->AddOperand(&move->destination());
1827 : DCHECK(!code()
1828 : ->InstructionAt(cur_block->last_instruction_index())
1829 : ->HasReferenceMap());
1830 : }
1831 1333607 : TopLevelLiveRange* live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
1832 : int gap_index = block->first_instruction_index();
1833 : live_range->RecordSpillLocation(allocation_zone(), gap_index, &output);
1834 : live_range->SetSpillStartIndex(gap_index);
1835 : // We use the phi-ness of some nodes in some later heuristics.
1836 : live_range->set_is_phi(true);
1837 1333608 : live_range->set_is_non_loop_phi(!block->IsLoopHeader());
1838 : }
1839 11461489 : }
1840 :
1841 :
1842 915893 : LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data,
1843 : Zone* local_zone)
1844 1831786 : : data_(data), phi_hints_(local_zone) {}
1845 :
1846 :
1847 11461465 : BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block,
1848 11461482 : RegisterAllocationData* data) {
1849 : size_t block_index = block->rpo_number().ToSize();
1850 40142743 : BitVector* live_out = data->live_out_sets()[block_index];
1851 11461465 : if (live_out == nullptr) {
1852 : // Compute live out for the given block, except not including backward
1853 : // successor edges.
1854 : Zone* zone = data->allocation_zone();
1855 25574691 : const InstructionSequence* code = data->code();
1856 :
1857 11461497 : live_out = new (zone) BitVector(code->VirtualRegisterCount(), zone);
1858 :
1859 : // Process all successor blocks.
1860 37155536 : for (const RpoNumber& succ : block->successors()) {
1861 : // Add values live on entry to the successor.
1862 14232682 : if (succ <= block->rpo_number()) continue;
1863 28226418 : BitVector* live_in = data->live_in_sets()[succ.ToSize()];
1864 14113209 : if (live_in != nullptr) live_out->Union(*live_in);
1865 :
1866 : // All phi input operands corresponding to this successor edge are live
1867 : // out from this block.
1868 14113209 : const InstructionBlock* successor = code->InstructionBlockAt(succ);
1869 14113222 : size_t index = successor->PredecessorIndexOf(block->rpo_number());
1870 : DCHECK(index < successor->PredecessorCount());
1871 31333011 : for (PhiInstruction* phi : successor->phis()) {
1872 6213234 : live_out->Add(phi->operands()[index]);
1873 : }
1874 : }
1875 22922842 : data->live_out_sets()[block_index] = live_out;
1876 : }
1877 11461404 : return live_out;
1878 : }
1879 :
1880 :
1881 22922836 : void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
1882 78191014 : BitVector* live_out) {
1883 : // Add an interval that includes the entire block to the live range for
1884 : // each live_out value.
1885 : LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
1886 11461418 : block->first_instruction_index());
1887 : LifetimePosition end = LifetimePosition::InstructionFromInstructionIndex(
1888 : block->last_instruction_index())
1889 11461418 : .NextStart();
1890 : BitVector::Iterator iterator(live_out);
1891 179304934 : while (!iterator.Done()) {
1892 78191014 : int operand_index = iterator.Current();
1893 78191014 : TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
1894 78190995 : range->AddUseInterval(start, end, allocation_zone());
1895 78190893 : iterator.Advance();
1896 : }
1897 11461453 : }
1898 :
1899 9316587 : int LiveRangeBuilder::FixedFPLiveRangeID(int index, MachineRepresentation rep) {
1900 9316587 : int result = -index - 1;
1901 9316587 : switch (rep) {
1902 : case MachineRepresentation::kSimd128:
1903 0 : result -= config()->num_float_registers();
1904 : // Fall through.
1905 : case MachineRepresentation::kFloat32:
1906 8347 : result -= config()->num_double_registers();
1907 : // Fall through.
1908 : case MachineRepresentation::kFloat64:
1909 9316587 : result -= config()->num_general_registers();
1910 : break;
1911 : default:
1912 0 : UNREACHABLE();
1913 : break;
1914 : }
1915 9316587 : return result;
1916 : }
1917 :
1918 167082684 : TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
1919 : DCHECK(index < config()->num_general_registers());
1920 226668870 : TopLevelLiveRange* result = data()->fixed_live_ranges()[index];
1921 75556290 : if (result == nullptr) {
1922 : MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
1923 7985320 : result = data()->NewLiveRange(FixedLiveRangeID(index), rep);
1924 : DCHECK(result->IsFixed());
1925 : result->set_assigned_register(index);
1926 7984860 : data()->MarkAllocated(rep, index);
1927 15970488 : data()->fixed_live_ranges()[index] = result;
1928 : }
1929 75556214 : return result;
1930 : }
1931 :
1932 51249161 : TopLevelLiveRange* LiveRangeBuilder::FixedFPLiveRangeFor(
1933 60565128 : int index, MachineRepresentation rep) {
1934 : int num_regs = config()->num_double_registers();
1935 : ZoneVector<TopLevelLiveRange*>* live_ranges =
1936 : &data()->fixed_double_live_ranges();
1937 : if (!kSimpleFPAliasing) {
1938 : switch (rep) {
1939 : case MachineRepresentation::kFloat32:
1940 : num_regs = config()->num_float_registers();
1941 : live_ranges = &data()->fixed_float_live_ranges();
1942 : break;
1943 : case MachineRepresentation::kSimd128:
1944 : num_regs = config()->num_simd128_registers();
1945 : live_ranges = &data()->fixed_simd128_live_ranges();
1946 : break;
1947 : default:
1948 : break;
1949 : }
1950 : }
1951 :
1952 : DCHECK(index < num_regs);
1953 : USE(num_regs);
1954 111814471 : TopLevelLiveRange* result = (*live_ranges)[index];
1955 51249161 : if (result == nullptr) {
1956 9316599 : result = data()->NewLiveRange(FixedFPLiveRangeID(index, rep), rep);
1957 : DCHECK(result->IsFixed());
1958 : result->set_assigned_register(index);
1959 9315967 : data()->MarkAllocated(rep, index);
1960 9316149 : (*live_ranges)[index] = result;
1961 : }
1962 51248711 : return result;
1963 : }
1964 :
1965 187822559 : TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
1966 112166939 : if (operand->IsUnallocated()) {
1967 : return data()->GetOrCreateLiveRangeFor(
1968 66608409 : UnallocatedOperand::cast(operand)->virtual_register());
1969 45558530 : } else if (operand->IsConstant()) {
1970 : return data()->GetOrCreateLiveRangeFor(
1971 9047211 : ConstantOperand::cast(operand)->virtual_register());
1972 36511319 : } else if (operand->IsRegister()) {
1973 : return FixedLiveRangeFor(
1974 34725238 : LocationOperand::cast(operand)->GetRegister().code());
1975 1786081 : } else if (operand->IsFPRegister()) {
1976 : LocationOperand* op = LocationOperand::cast(operand);
1977 208476 : return FixedFPLiveRangeFor(op->register_code(), op->representation());
1978 : } else {
1979 : return nullptr;
1980 : }
1981 : }
1982 :
1983 :
1984 67972243 : UsePosition* LiveRangeBuilder::NewUsePosition(LifetimePosition pos,
1985 : InstructionOperand* operand,
1986 : void* hint,
1987 : UsePositionHintType hint_type) {
1988 135944485 : return new (allocation_zone()) UsePosition(pos, operand, hint, hint_type);
1989 : }
1990 :
1991 :
1992 42731013 : UsePosition* LiveRangeBuilder::Define(LifetimePosition position,
1993 : InstructionOperand* operand, void* hint,
1994 : UsePositionHintType hint_type) {
1995 42731013 : TopLevelLiveRange* range = LiveRangeFor(operand);
1996 42731241 : if (range == nullptr) return nullptr;
1997 :
1998 41942588 : if (range->IsEmpty() || range->Start() > position) {
1999 : // Can happen if there is a definition without use.
2000 1364018 : range->AddUseInterval(position, position.NextStart(), allocation_zone());
2001 1364007 : range->AddUsePosition(NewUsePosition(position.NextStart()));
2002 : } else {
2003 40578570 : range->ShortenTo(position);
2004 : }
2005 41942740 : if (!operand->IsUnallocated()) return nullptr;
2006 : UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
2007 : UsePosition* use_pos =
2008 14900847 : NewUsePosition(position, unalloc_operand, hint, hint_type);
2009 14900816 : range->AddUsePosition(use_pos);
2010 14900863 : return use_pos;
2011 : }
2012 :
2013 :
2014 69435738 : UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start,
2015 : LifetimePosition position,
2016 : InstructionOperand* operand, void* hint,
2017 : UsePositionHintType hint_type) {
2018 69435738 : TopLevelLiveRange* range = LiveRangeFor(operand);
2019 69436080 : if (range == nullptr) return nullptr;
2020 : UsePosition* use_pos = nullptr;
2021 68647348 : if (operand->IsUnallocated()) {
2022 : UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
2023 51708724 : use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type);
2024 51708667 : range->AddUsePosition(use_pos);
2025 : }
2026 68647643 : range->AddUseInterval(block_start, position, allocation_zone());
2027 68647028 : return use_pos;
2028 : }
2029 :
2030 :
2031 44237602 : void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
2032 102951544 : BitVector* live) {
2033 : int block_start = block->first_instruction_index();
2034 : LifetimePosition block_start_position =
2035 : LifetimePosition::GapFromInstructionIndex(block_start);
2036 : bool fixed_float_live_ranges = false;
2037 : bool fixed_simd128_live_ranges = false;
2038 : if (!kSimpleFPAliasing) {
2039 : int mask = data()->code()->representation_mask();
2040 : fixed_float_live_ranges = (mask & kFloatRepBit) != 0;
2041 : fixed_simd128_live_ranges = (mask & kSimd128RepBit) != 0;
2042 : }
2043 :
2044 50588386 : for (int index = block->last_instruction_index(); index >= block_start;
2045 : index--) {
2046 : LifetimePosition curr_position =
2047 : LifetimePosition::InstructionFromInstructionIndex(index);
2048 221662133 : Instruction* instr = code()->InstructionAt(index);
2049 : DCHECK(instr != nullptr);
2050 : DCHECK(curr_position.IsInstructionPosition());
2051 : // Process output, inputs, and temps of this instruction.
2052 120881718 : for (size_t i = 0; i < instr->OutputCount(); i++) {
2053 21314434 : InstructionOperand* output = instr->OutputAt(i);
2054 21314434 : if (output->IsUnallocated()) {
2055 : // Unsupported.
2056 : DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
2057 : int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
2058 : live->Remove(out_vreg);
2059 14478005 : } else if (output->IsConstant()) {
2060 : int out_vreg = ConstantOperand::cast(output)->virtual_register();
2061 : live->Remove(out_vreg);
2062 : }
2063 21974331 : if (block->IsHandler() && index == block_start && output->IsAllocated() &&
2064 21523722 : output->IsRegister() &&
2065 : AllocatedOperand::cast(output)->GetRegister().is(
2066 : v8::internal::kReturnRegister0)) {
2067 : // The register defined here is blocked from gap start - it is the
2068 : // exception value.
2069 : // TODO(mtrofin): should we explore an explicit opcode for
2070 : // the first instruction in the handler?
2071 : Define(LifetimePosition::GapFromInstructionIndex(index), output);
2072 : } else {
2073 : Define(curr_position, output);
2074 : }
2075 : }
2076 :
2077 39126425 : if (instr->ClobbersRegisters()) {
2078 85067526 : for (int i = 0; i < config()->num_allocatable_general_registers(); ++i) {
2079 : // Create a UseInterval at this instruction for all fixed registers,
2080 : // (including the instruction outputs). Adding another UseInterval here
2081 : // is OK because AddUseInterval will just merge it with the existing
2082 : // one at the end of the range.
2083 40832387 : int code = config()->GetAllocatableGeneralCode(i);
2084 40832387 : TopLevelLiveRange* range = FixedLiveRangeFor(code);
2085 : range->AddUseInterval(curr_position, curr_position.End(),
2086 40832323 : allocation_zone());
2087 : }
2088 : }
2089 :
2090 39126420 : if (instr->ClobbersDoubleRegisters()) {
2091 105484143 : for (int i = 0; i < config()->num_allocatable_double_registers(); ++i) {
2092 : // Add a UseInterval for all DoubleRegisters. See comment above for
2093 : // general registers.
2094 51040784 : int code = config()->GetAllocatableDoubleCode(i);
2095 : TopLevelLiveRange* range =
2096 51040784 : FixedFPLiveRangeFor(code, MachineRepresentation::kFloat64);
2097 : range->AddUseInterval(curr_position, curr_position.End(),
2098 51040231 : allocation_zone());
2099 : }
2100 : // Clobber fixed float registers on archs with non-simple aliasing.
2101 : if (!kSimpleFPAliasing) {
2102 : if (fixed_float_live_ranges) {
2103 : for (int i = 0; i < config()->num_allocatable_float_registers();
2104 : ++i) {
2105 : // Add a UseInterval for all FloatRegisters. See comment above for
2106 : // general registers.
2107 : int code = config()->GetAllocatableFloatCode(i);
2108 : TopLevelLiveRange* range =
2109 : FixedFPLiveRangeFor(code, MachineRepresentation::kFloat32);
2110 : range->AddUseInterval(curr_position, curr_position.End(),
2111 : allocation_zone());
2112 : }
2113 : }
2114 : if (fixed_simd128_live_ranges) {
2115 : for (int i = 0; i < config()->num_allocatable_simd128_registers();
2116 : ++i) {
2117 : int code = config()->GetAllocatableSimd128Code(i);
2118 : TopLevelLiveRange* range =
2119 : FixedFPLiveRangeFor(code, MachineRepresentation::kSimd128);
2120 : range->AddUseInterval(curr_position, curr_position.End(),
2121 : allocation_zone());
2122 : }
2123 : }
2124 : }
2125 : }
2126 :
2127 203631986 : for (size_t i = 0; i < instr->InputCount(); i++) {
2128 82252680 : InstructionOperand* input = instr->InputAt(i);
2129 82252680 : if (input->IsImmediate() || input->IsExplicit()) {
2130 : continue; // Ignore immediates and explicitly reserved registers.
2131 : }
2132 : LifetimePosition use_pos;
2133 78716301 : if (input->IsUnallocated() &&
2134 : UnallocatedOperand::cast(input)->IsUsedAtStart()) {
2135 : use_pos = curr_position;
2136 : } else {
2137 : use_pos = curr_position.End();
2138 : }
2139 :
2140 46027894 : if (input->IsUnallocated()) {
2141 : UnallocatedOperand* unalloc = UnallocatedOperand::cast(input);
2142 : int vreg = unalloc->virtual_register();
2143 : live->Add(vreg);
2144 32688435 : if (unalloc->HasSlotPolicy()) {
2145 12140979 : data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(true);
2146 : }
2147 : }
2148 : Use(block_start_position, use_pos, input);
2149 : }
2150 :
2151 40557772 : for (size_t i = 0; i < instr->TempCount(); i++) {
2152 715646 : InstructionOperand* temp = instr->TempAt(i);
2153 : // Unsupported.
2154 : DCHECK_IMPLIES(temp->IsUnallocated(),
2155 : !UnallocatedOperand::cast(temp)->HasSlotPolicy());
2156 715646 : if (instr->ClobbersTemps()) {
2157 0 : if (temp->IsRegister()) continue;
2158 0 : if (temp->IsUnallocated()) {
2159 : UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
2160 0 : if (temp_unalloc->HasFixedPolicy()) {
2161 : continue;
2162 : }
2163 : }
2164 : }
2165 : Use(block_start_position, curr_position.End(), temp);
2166 : Define(curr_position, temp);
2167 : }
2168 :
2169 : // Process the moves of the instruction's gaps, making their sources live.
2170 : const Instruction::GapPosition kPositions[] = {Instruction::END,
2171 39126476 : Instruction::START};
2172 : curr_position = curr_position.PrevStart();
2173 : DCHECK(curr_position.IsGapPosition());
2174 117379665 : for (const Instruction::GapPosition& position : kPositions) {
2175 78252863 : ParallelMove* move = instr->GetParallelMove(position);
2176 78252863 : if (move == nullptr) continue;
2177 14218209 : if (position == Instruction::END) {
2178 : curr_position = curr_position.End();
2179 : } else {
2180 : curr_position = curr_position.Start();
2181 : }
2182 52185344 : for (MoveOperands* cur : *move) {
2183 23748600 : InstructionOperand& from = cur->source();
2184 23748600 : InstructionOperand& to = cur->destination();
2185 : void* hint = &to;
2186 23748600 : UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to);
2187 : UsePosition* to_use = nullptr;
2188 : int phi_vreg = -1;
2189 23748863 : if (to.IsUnallocated()) {
2190 : int to_vreg = UnallocatedOperand::cast(to).virtual_register();
2191 10409091 : TopLevelLiveRange* to_range =
2192 10409018 : data()->GetOrCreateLiveRangeFor(to_vreg);
2193 10409091 : if (to_range->is_phi()) {
2194 : phi_vreg = to_vreg;
2195 3325583 : if (to_range->is_non_loop_phi()) {
2196 2925871 : hint = to_range->current_hint_position();
2197 : hint_type = hint == nullptr ? UsePositionHintType::kNone
2198 2925871 : : UsePositionHintType::kUsePos;
2199 : } else {
2200 : hint_type = UsePositionHintType::kPhi;
2201 : hint = data()->GetPhiMapValueFor(to_vreg);
2202 : }
2203 : } else {
2204 7083508 : if (live->Contains(to_vreg)) {
2205 : to_use = Define(curr_position, &to, &from,
2206 6028069 : UsePosition::HintTypeForOperand(from));
2207 : live->Remove(to_vreg);
2208 : } else {
2209 : cur->Eliminate();
2210 : continue;
2211 : }
2212 : }
2213 : } else {
2214 : Define(curr_position, &to);
2215 : }
2216 : UsePosition* from_use =
2217 22693536 : Use(block_start_position, curr_position, &from, hint, hint_type);
2218 : // Mark range live.
2219 22693411 : if (from.IsUnallocated()) {
2220 : live->Add(UnallocatedOperand::cast(from).virtual_register());
2221 : }
2222 : // Resolve use position hints just created.
2223 22693411 : if (to_use != nullptr && from_use != nullptr) {
2224 : to_use->ResolveHint(from_use);
2225 : from_use->ResolveHint(to_use);
2226 : }
2227 : DCHECK_IMPLIES(to_use != nullptr, to_use->IsResolved());
2228 : DCHECK_IMPLIES(from_use != nullptr, from_use->IsResolved());
2229 : // Potentially resolve phi hint.
2230 22693411 : if (phi_vreg != -1) ResolvePhiHint(&from, from_use);
2231 : }
2232 : }
2233 : }
2234 11461523 : }
2235 :
2236 12795041 : void LiveRangeBuilder::ProcessPhis(const InstructionBlock* block,
2237 1333606 : BitVector* live) {
2238 24256476 : for (PhiInstruction* phi : block->phis()) {
2239 : // The live range interval already ends at the first instruction of the
2240 : // block.
2241 : int phi_vreg = phi->virtual_register();
2242 : live->Remove(phi_vreg);
2243 : // Select a hint from a predecessor block that preceeds this block in the
2244 : // rpo order. In order of priority:
2245 : // - Avoid hints from deferred blocks.
2246 : // - Prefer hints from allocated (or explicit) operands.
2247 : // - Prefer hints from empty blocks (containing just parallel moves and a
2248 : // jump). In these cases, if we can elide the moves, the jump threader
2249 : // is likely to be able to elide the jump.
2250 : // The enforcement of hinting in rpo order is required because hint
2251 : // resolution that happens later in the compiler pipeline visits
2252 : // instructions in reverse rpo order, relying on the fact that phis are
2253 : // encountered before their hints.
2254 : InstructionOperand* hint = nullptr;
2255 : int hint_preference = 0;
2256 :
2257 : // The cost of hinting increases with the number of predecessors. At the
2258 : // same time, the typical benefit decreases, since this hinting only
2259 : // optimises the execution path through one predecessor. A limit of 2 is
2260 : // sufficient to hit the common if/else pattern.
2261 : int predecessor_limit = 2;
2262 :
2263 4219439 : for (RpoNumber predecessor : block->predecessors()) {
2264 7456254 : const InstructionBlock* predecessor_block =
2265 2703882 : code()->InstructionBlockAt(predecessor);
2266 : DCHECK_EQ(predecessor_block->rpo_number(), predecessor);
2267 :
2268 : // Only take hints from earlier rpo numbers.
2269 2703883 : if (predecessor >= block->rpo_number()) continue;
2270 :
2271 : // Look up the predecessor instruction.
2272 : const Instruction* predecessor_instr =
2273 2485419 : GetLastInstruction(code(), predecessor_block);
2274 : InstructionOperand* predecessor_hint = nullptr;
2275 : // Phis are assigned in the END position of the last instruction in each
2276 : // predecessor block.
2277 5793105 : for (MoveOperands* move :
2278 5793094 : *predecessor_instr->GetParallelMove(Instruction::END)) {
2279 : InstructionOperand& to = move->destination();
2280 6615355 : if (to.IsUnallocated() &&
2281 : UnallocatedOperand::cast(to).virtual_register() == phi_vreg) {
2282 2485407 : predecessor_hint = &move->source();
2283 2485407 : break;
2284 : }
2285 : }
2286 : DCHECK_NOT_NULL(predecessor_hint);
2287 :
2288 : // For each predecessor, generate a score according to the priorities
2289 : // described above, and pick the best one. Flags in higher-order bits have
2290 : // a higher priority than those in lower-order bits.
2291 : int predecessor_hint_preference = 0;
2292 : const int kNotDeferredBlockPreference = (1 << 2);
2293 : const int kMoveIsAllocatedPreference = (1 << 1);
2294 : const int kBlockIsEmptyPreference = (1 << 0);
2295 :
2296 : // - Avoid hints from deferred blocks.
2297 2485418 : if (!predecessor_block->IsDeferred()) {
2298 : predecessor_hint_preference |= kNotDeferredBlockPreference;
2299 : }
2300 :
2301 : // - Prefer hints from allocated (or explicit) operands.
2302 : //
2303 : // Already-allocated or explicit operands are typically assigned using
2304 : // the parallel moves on the last instruction. For example:
2305 : //
2306 : // gap (v101 = [x0|R|w32]) (v100 = v101)
2307 : // ArchJmp
2308 : // ...
2309 : // phi: v100 = v101 v102
2310 : //
2311 : // We have already found the END move, so look for a matching START move
2312 : // from an allocated (or explicit) operand.
2313 : //
2314 : // Note that we cannot simply look up data()->live_ranges()[vreg] here
2315 : // because the live ranges are still being built when this function is
2316 : // called.
2317 : // TODO(v8): Find a way to separate hinting from live range analysis in
2318 : // BuildLiveRanges so that we can use the O(1) live-range look-up.
2319 : auto moves = predecessor_instr->GetParallelMove(Instruction::START);
2320 2485418 : if (moves != nullptr) {
2321 1011978 : for (MoveOperands* move : *moves) {
2322 : InstructionOperand& to = move->destination();
2323 457623 : if (predecessor_hint->Equals(to)) {
2324 360891 : if (move->source().IsAllocated() || move->source().IsExplicit()) {
2325 360891 : predecessor_hint_preference |= kMoveIsAllocatedPreference;
2326 : }
2327 : break;
2328 : }
2329 : }
2330 : }
2331 :
2332 : // - Prefer hints from empty blocks.
2333 2485418 : if (predecessor_block->last_instruction_index() ==
2334 : predecessor_block->first_instruction_index()) {
2335 841167 : predecessor_hint_preference |= kBlockIsEmptyPreference;
2336 : }
2337 :
2338 4970836 : if ((hint == nullptr) ||
2339 2485418 : (predecessor_hint_preference > hint_preference)) {
2340 : // Take the hint from this predecessor.
2341 : hint = predecessor_hint;
2342 : hint_preference = predecessor_hint_preference;
2343 : }
2344 :
2345 2485418 : if (--predecessor_limit <= 0) break;
2346 : }
2347 : DCHECK_NOT_NULL(hint);
2348 :
2349 : LifetimePosition block_start = LifetimePosition::GapFromInstructionIndex(
2350 1333606 : block->first_instruction_index());
2351 1333604 : UsePosition* use_pos = Define(block_start, &phi->output(), hint,
2352 2667210 : UsePosition::HintTypeForOperand(*hint));
2353 : MapPhiHint(hint, use_pos);
2354 : }
2355 11461435 : }
2356 :
2357 :
2358 206614 : void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block,
2359 1112337 : BitVector* live) {
2360 : DCHECK(block->IsLoopHeader());
2361 : // Add a live range stretching from the first loop instruction to the last
2362 : // for each value live on entry to the header.
2363 : BitVector::Iterator iterator(live);
2364 : LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
2365 103307 : block->first_instruction_index());
2366 : LifetimePosition end = LifetimePosition::GapFromInstructionIndex(
2367 103307 : code()->LastLoopInstructionIndex(block))
2368 103307 : .NextFullStart();
2369 2534595 : while (!iterator.Done()) {
2370 1112337 : int operand_index = iterator.Current();
2371 1112337 : TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
2372 1112337 : range->EnsureInterval(start, end, allocation_zone());
2373 1112337 : iterator.Advance();
2374 : }
2375 : // Insert all values into the live in sets of all blocks in the loop.
2376 2014372 : for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt();
2377 : ++i) {
2378 5733195 : live_in_sets()[i]->Union(*live);
2379 : }
2380 103307 : }
2381 :
2382 :
2383 4388687 : void LiveRangeBuilder::BuildLiveRanges() {
2384 : // Process the blocks in reverse order.
2385 13293253 : for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
2386 : --block_id) {
2387 : InstructionBlock* block =
2388 11461434 : code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
2389 11461456 : BitVector* live = ComputeLiveOut(block, data());
2390 : // Initially consider all live_out values live for the entire block. We
2391 : // will shorten these intervals if necessary.
2392 11461442 : AddInitialIntervals(block, live);
2393 : // Process the instructions in reverse order, generating and killing
2394 : // live values.
2395 11461462 : ProcessInstructions(block, live);
2396 : // All phi output operands are killed by this block.
2397 11461484 : ProcessPhis(block, live);
2398 : // Now live is live_in for this block except not including values live
2399 : // out on backward successor edges.
2400 11461443 : if (block->IsLoopHeader()) ProcessLoopHeader(block, live);
2401 34384503 : live_in_sets()[block_id] = live;
2402 : }
2403 : // Postprocess the ranges.
2404 82114385 : for (TopLevelLiveRange* range : data()->live_ranges()) {
2405 47173421 : if (range == nullptr) continue;
2406 : // Give slots to all ranges with a non fixed slot use.
2407 25565633 : if (range->has_slot_use() && range->HasNoSpillType()) {
2408 1602770 : data()->AssignSpillRangeToLiveRange(range);
2409 : }
2410 : // TODO(bmeurer): This is a horrible hack to make sure that for constant
2411 : // live ranges, every use requires the constant to be in a register.
2412 : // Without this hack, all uses with "any" policy would get the constant
2413 : // operand assigned.
2414 33109078 : if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
2415 22200280 : for (UsePosition* pos = range->first_pos(); pos != nullptr;
2416 : pos = pos->next()) {
2417 13153041 : if (pos->type() == UsePositionType::kRequiresSlot) continue;
2418 : UsePositionType new_type = UsePositionType::kAny;
2419 : // Can't mark phis as needing a register.
2420 13153047 : if (!pos->pos().IsGapPosition()) {
2421 : new_type = UsePositionType::kRequiresRegister;
2422 : }
2423 : pos->set_type(new_type, true);
2424 : }
2425 : }
2426 : }
2427 2270318 : for (auto preassigned : data()->preassigned_slot_ranges()) {
2428 400293 : TopLevelLiveRange* range = preassigned.first;
2429 : int slot_id = preassigned.second;
2430 : SpillRange* spill = range->HasSpillRange()
2431 : ? range->GetSpillRange()
2432 476635 : : data()->AssignSpillRangeToLiveRange(range);
2433 : spill->set_assigned_slot(slot_id);
2434 : }
2435 : #ifdef DEBUG
2436 : Verify();
2437 : #endif
2438 915927 : }
2439 :
2440 :
2441 0 : void LiveRangeBuilder::MapPhiHint(InstructionOperand* operand,
2442 : UsePosition* use_pos) {
2443 : DCHECK(!use_pos->IsResolved());
2444 2667213 : auto res = phi_hints_.insert(std::make_pair(operand, use_pos));
2445 : DCHECK(res.second);
2446 : USE(res);
2447 0 : }
2448 :
2449 :
2450 3325566 : void LiveRangeBuilder::ResolvePhiHint(InstructionOperand* operand,
2451 : UsePosition* use_pos) {
2452 : auto it = phi_hints_.find(operand);
2453 6651136 : if (it == phi_hints_.end()) return;
2454 : DCHECK(!it->second->IsResolved());
2455 1333603 : it->second->ResolveHint(use_pos);
2456 : }
2457 :
2458 :
2459 0 : void LiveRangeBuilder::Verify() const {
2460 0 : for (auto& hint : phi_hints_) {
2461 0 : CHECK(hint.second->IsResolved());
2462 : }
2463 0 : for (const TopLevelLiveRange* current : data()->live_ranges()) {
2464 0 : if (current != nullptr && !current->IsEmpty()) {
2465 : // New LiveRanges should not be split.
2466 0 : CHECK_NULL(current->next());
2467 : // General integrity check.
2468 0 : current->Verify();
2469 0 : const UseInterval* first = current->first_interval();
2470 0 : if (first->next() == nullptr) continue;
2471 :
2472 : // Consecutive intervals should not end and start in the same block,
2473 : // otherwise the intervals should have been joined, because the
2474 : // variable is live throughout that block.
2475 0 : CHECK(NextIntervalStartsInDifferentBlocks(first));
2476 :
2477 0 : for (const UseInterval* i = first->next(); i != nullptr; i = i->next()) {
2478 : // Except for the first interval, the other intevals must start at
2479 : // a block boundary, otherwise data wouldn't flow to them.
2480 0 : CHECK(IntervalStartsAtBlockBoundary(i));
2481 : // The last instruction of the predecessors of the block the interval
2482 : // starts must be covered by the range.
2483 0 : CHECK(IntervalPredecessorsCoveredByRange(i, current));
2484 0 : if (i->next() != nullptr) {
2485 : // Check the consecutive intervals property, except for the last
2486 : // interval, where it doesn't apply.
2487 0 : CHECK(NextIntervalStartsInDifferentBlocks(i));
2488 : }
2489 : }
2490 : }
2491 : }
2492 0 : }
2493 :
2494 0 : bool LiveRangeBuilder::IntervalStartsAtBlockBoundary(
2495 0 : const UseInterval* interval) const {
2496 : LifetimePosition start = interval->start();
2497 0 : if (!start.IsFullStart()) return false;
2498 : int instruction_index = start.ToInstructionIndex();
2499 0 : const InstructionBlock* block =
2500 0 : data()->code()->GetInstructionBlock(instruction_index);
2501 0 : return block->first_instruction_index() == instruction_index;
2502 : }
2503 :
2504 0 : bool LiveRangeBuilder::IntervalPredecessorsCoveredByRange(
2505 0 : const UseInterval* interval, const TopLevelLiveRange* range) const {
2506 : LifetimePosition start = interval->start();
2507 : int instruction_index = start.ToInstructionIndex();
2508 : const InstructionBlock* block =
2509 0 : data()->code()->GetInstructionBlock(instruction_index);
2510 0 : for (RpoNumber pred_index : block->predecessors()) {
2511 0 : const InstructionBlock* predecessor =
2512 0 : data()->code()->InstructionBlockAt(pred_index);
2513 : LifetimePosition last_pos = LifetimePosition::GapFromInstructionIndex(
2514 : predecessor->last_instruction_index());
2515 : last_pos = last_pos.NextStart().End();
2516 0 : if (!range->Covers(last_pos)) return false;
2517 : }
2518 0 : return true;
2519 : }
2520 :
2521 0 : bool LiveRangeBuilder::NextIntervalStartsInDifferentBlocks(
2522 0 : const UseInterval* interval) const {
2523 : DCHECK_NOT_NULL(interval->next());
2524 : LifetimePosition end = interval->end();
2525 : LifetimePosition next_start = interval->next()->start();
2526 : // Since end is not covered, but the previous position is, move back a
2527 : // position
2528 0 : end = end.IsStart() ? end.PrevStart().End() : end.Start();
2529 : int last_covered_index = end.ToInstructionIndex();
2530 : const InstructionBlock* block =
2531 0 : data()->code()->GetInstructionBlock(last_covered_index);
2532 : const InstructionBlock* next_block =
2533 0 : data()->code()->GetInstructionBlock(next_start.ToInstructionIndex());
2534 0 : return block->rpo_number() < next_block->rpo_number();
2535 : }
2536 :
2537 3663638 : RegisterAllocator::RegisterAllocator(RegisterAllocationData* data,
2538 : RegisterKind kind)
2539 : : data_(data),
2540 : mode_(kind),
2541 : num_registers_(GetRegisterCount(data->config(), kind)),
2542 : num_allocatable_registers_(
2543 : GetAllocatableRegisterCount(data->config(), kind)),
2544 : allocatable_register_codes_(
2545 : GetAllocatableRegisterCodes(data->config(), kind)),
2546 7327276 : check_fp_aliasing_(false) {
2547 : if (!kSimpleFPAliasing && kind == FP_REGISTERS) {
2548 : check_fp_aliasing_ = (data->code()->representation_mask() &
2549 : (kFloatRepBit | kSimd128RepBit)) != 0;
2550 : }
2551 1831819 : }
2552 :
2553 0 : LifetimePosition RegisterAllocator::GetSplitPositionForInstruction(
2554 9162550 : const LiveRange* range, int instruction_index) {
2555 : LifetimePosition ret = LifetimePosition::Invalid();
2556 :
2557 : ret = LifetimePosition::GapFromInstructionIndex(instruction_index);
2558 18333350 : if (range->Start() >= ret || ret >= range->End()) {
2559 : return LifetimePosition::Invalid();
2560 : }
2561 0 : return ret;
2562 : }
2563 :
2564 96178539 : void RegisterAllocator::SplitAndSpillRangesDefinedByMemoryOperand() {
2565 1831926 : size_t initial_range_count = data()->live_ranges().size();
2566 96178446 : for (size_t i = 0; i < initial_range_count; ++i) {
2567 202149975 : TopLevelLiveRange* range = data()->live_ranges()[i];
2568 94346613 : if (!CanProcessRange(range)) continue;
2569 51162666 : if (range->HasNoSpillType() ||
2570 2353249 : (range->HasSpillRange() && !range->has_slot_use())) {
2571 : continue;
2572 : }
2573 0 : LifetimePosition start = range->Start();
2574 13456715 : TRACE("Live range %d:%d is defined by a spill operand.\n",
2575 : range->TopLevel()->vreg(), range->relative_id());
2576 : LifetimePosition next_pos = start;
2577 13456749 : if (next_pos.IsGapPosition()) {
2578 : next_pos = next_pos.NextStart();
2579 : }
2580 :
2581 : // With splinters, we can be more strict and skip over positions
2582 : // not strictly needing registers.
2583 : UsePosition* pos =
2584 : range->IsSplinter()
2585 2262879 : ? range->NextRegisterPosition(next_pos)
2586 15719628 : : range->NextUsePositionRegisterIsBeneficial(next_pos);
2587 : // If the range already has a spill operand and it doesn't need a
2588 : // register immediately, split it and spill the first part of the range.
2589 13456733 : if (pos == nullptr) {
2590 3812297 : Spill(range);
2591 9644436 : } else if (pos->pos() > range->Start().NextStart()) {
2592 : // Do not spill live range eagerly if use position that can benefit from
2593 : // the register is too close to the start of live range.
2594 : LifetimePosition split_pos = GetSplitPositionForInstruction(
2595 : range, pos->pos().ToInstructionIndex());
2596 : // There is no place to split, so we can't split and spill.
2597 9170800 : if (!split_pos.IsValid()) continue;
2598 :
2599 : split_pos =
2600 9162555 : FindOptimalSplitPos(range->Start().NextFullStart(), split_pos);
2601 :
2602 9162521 : SplitRangeAt(range, split_pos);
2603 9162546 : Spill(range);
2604 : }
2605 : }
2606 1831833 : }
2607 :
2608 :
2609 15162365 : LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
2610 : LifetimePosition pos) {
2611 : DCHECK(!range->TopLevel()->IsFixed());
2612 15162365 : TRACE("Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(),
2613 : range->relative_id(), pos.value());
2614 :
2615 15162401 : if (pos <= range->Start()) return range;
2616 :
2617 : // We can't properly connect liveranges if splitting occurred at the end
2618 : // a block.
2619 : DCHECK(pos.IsStart() || pos.IsGapPosition() ||
2620 : (GetInstructionBlock(code(), pos)->last_instruction_index() !=
2621 : pos.ToInstructionIndex()));
2622 :
2623 13797748 : LiveRange* result = range->SplitAt(pos, allocation_zone());
2624 13797767 : return result;
2625 : }
2626 :
2627 :
2628 1401656 : LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
2629 : LifetimePosition start,
2630 : LifetimePosition end) {
2631 : DCHECK(!range->TopLevel()->IsFixed());
2632 1401656 : TRACE("Splitting live range %d:%d in position between [%d, %d]\n",
2633 : range->TopLevel()->vreg(), range->relative_id(), start.value(),
2634 : end.value());
2635 :
2636 1401656 : LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2637 : DCHECK(split_pos >= start);
2638 1401656 : return SplitRangeAt(range, split_pos);
2639 : }
2640 :
2641 :
2642 10564162 : LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
2643 : LifetimePosition end) {
2644 : int start_instr = start.ToInstructionIndex();
2645 : int end_instr = end.ToInstructionIndex();
2646 : DCHECK(start_instr <= end_instr);
2647 :
2648 : // We have no choice
2649 10564162 : if (start_instr == end_instr) return end;
2650 :
2651 : const InstructionBlock* start_block = GetInstructionBlock(code(), start);
2652 : const InstructionBlock* end_block = GetInstructionBlock(code(), end);
2653 :
2654 7090988 : if (end_block == start_block) {
2655 : // The interval is split in the same basic block. Split at the latest
2656 : // possible position.
2657 5272725 : return end;
2658 : }
2659 :
2660 124435 : const InstructionBlock* block = end_block;
2661 : // Find header of outermost loop.
2662 : do {
2663 : const InstructionBlock* loop = GetContainingLoop(code(), block);
2664 1919184 : if (loop == nullptr ||
2665 : loop->rpo_number().ToInt() <= start_block->rpo_number().ToInt()) {
2666 : // No more loops or loop starts before the lifetime start.
2667 : break;
2668 : }
2669 : block = loop;
2670 : } while (true);
2671 :
2672 : // We did not find any suitable outer loop. Split at the latest possible
2673 : // position unless end_block is a loop header itself.
2674 3537985 : if (block == end_block && !end_block->IsLoopHeader()) return end;
2675 :
2676 : return LifetimePosition::GapFromInstructionIndex(
2677 : block->first_instruction_index());
2678 : }
2679 :
2680 :
2681 198628 : LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
2682 : LiveRange* range, LifetimePosition pos) {
2683 : const InstructionBlock* block = GetInstructionBlock(code(), pos.Start());
2684 47972 : const InstructionBlock* loop_header =
2685 198628 : block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
2686 :
2687 198628 : if (loop_header == nullptr) return pos;
2688 :
2689 : const UsePosition* prev_use =
2690 : range->PreviousUsePositionRegisterIsBeneficial(pos);
2691 :
2692 85013 : while (loop_header != nullptr) {
2693 : // We are going to spill live range inside the loop.
2694 : // If possible try to move spilling position backwards to loop header.
2695 : // This will reduce number of memory moves on the back edge.
2696 : LifetimePosition loop_start = LifetimePosition::GapFromInstructionIndex(
2697 : loop_header->first_instruction_index());
2698 :
2699 47972 : if (range->Covers(loop_start)) {
2700 35658 : if (prev_use == nullptr || prev_use->pos() < loop_start) {
2701 : // No register beneficial use inside the loop before the pos.
2702 : pos = loop_start;
2703 : }
2704 : }
2705 :
2706 : // Try hoisting out to an outer loop.
2707 : loop_header = GetContainingLoop(code(), loop_header);
2708 : }
2709 :
2710 37041 : return pos;
2711 : }
2712 :
2713 :
2714 18036068 : void RegisterAllocator::Spill(LiveRange* range) {
2715 : DCHECK(!range->spilled());
2716 0 : TopLevelLiveRange* first = range->TopLevel();
2717 16902283 : TRACE("Spilling live range %d:%d\n", first->vreg(), range->relative_id());
2718 :
2719 16902327 : if (first->HasNoSpillType()) {
2720 1133785 : data()->AssignSpillRangeToLiveRange(first);
2721 : }
2722 : range->Spill();
2723 16902331 : }
2724 :
2725 0 : const char* RegisterAllocator::RegisterName(int register_code) const {
2726 0 : if (mode() == GENERAL_REGISTERS) {
2727 0 : return data()->config()->GetGeneralRegisterName(register_code);
2728 : } else {
2729 0 : return data()->config()->GetDoubleRegisterName(register_code);
2730 : }
2731 : }
2732 :
2733 :
2734 1831808 : LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
2735 : RegisterKind kind, Zone* local_zone)
2736 : : RegisterAllocator(data, kind),
2737 : unhandled_live_ranges_(local_zone),
2738 : active_live_ranges_(local_zone),
2739 1831808 : inactive_live_ranges_(local_zone) {
2740 : unhandled_live_ranges().reserve(
2741 1831822 : static_cast<size_t>(code()->VirtualRegisterCount() * 2));
2742 1831856 : active_live_ranges().reserve(8);
2743 1831852 : inactive_live_ranges().reserve(8);
2744 : // TryAllocateFreeReg and AllocateBlockedReg assume this
2745 : // when allocating local arrays.
2746 : DCHECK(RegisterConfiguration::kMaxFPRegisters >=
2747 : this->data()->config()->num_general_registers());
2748 1831852 : }
2749 :
2750 :
2751 1831844 : void LinearScanAllocator::AllocateRegisters() {
2752 : DCHECK(unhandled_live_ranges().empty());
2753 : DCHECK(active_live_ranges().empty());
2754 : DCHECK(inactive_live_ranges().empty());
2755 :
2756 5495618 : SplitAndSpillRangesDefinedByMemoryOperand();
2757 :
2758 98009708 : for (TopLevelLiveRange* range : data()->live_ranges()) {
2759 94346011 : if (!CanProcessRange(range)) continue;
2760 69487633 : for (LiveRange* to_add = range; to_add != nullptr;
2761 : to_add = to_add->next()) {
2762 34743834 : if (!to_add->spilled()) {
2763 21769066 : AddToUnhandledUnsorted(to_add);
2764 : }
2765 : }
2766 : }
2767 1831831 : SortUnhandled();
2768 : DCHECK(UnhandledIsSorted());
2769 :
2770 1831887 : if (mode() == GENERAL_REGISTERS) {
2771 16485281 : for (TopLevelLiveRange* current : data()->fixed_live_ranges()) {
2772 14653397 : if (current != nullptr) AddToInactive(current);
2773 : }
2774 : } else {
2775 16486064 : for (TopLevelLiveRange* current : data()->fixed_double_live_ranges()) {
2776 14654233 : if (current != nullptr) AddToInactive(current);
2777 : }
2778 : if (!kSimpleFPAliasing && check_fp_aliasing()) {
2779 : for (TopLevelLiveRange* current : data()->fixed_float_live_ranges()) {
2780 : if (current != nullptr) AddToInactive(current);
2781 : }
2782 : for (TopLevelLiveRange* current : data()->fixed_simd128_live_ranges()) {
2783 : if (current != nullptr) AddToInactive(current);
2784 : }
2785 : }
2786 : }
2787 :
2788 28040401 : while (!unhandled_live_ranges().empty()) {
2789 : DCHECK(UnhandledIsSorted());
2790 26208575 : LiveRange* current = unhandled_live_ranges().back();
2791 : unhandled_live_ranges().pop_back();
2792 : DCHECK(UnhandledIsSorted());
2793 : LifetimePosition position = current->Start();
2794 : #ifdef DEBUG
2795 : allocation_finger_ = position;
2796 : #endif
2797 26208575 : TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
2798 : current->relative_id(), position.value());
2799 :
2800 26209561 : if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
2801 : continue;
2802 :
2803 223317427 : for (size_t i = 0; i < active_live_ranges().size(); ++i) {
2804 98567577 : LiveRange* cur_active = active_live_ranges()[i];
2805 98567577 : if (cur_active->End() <= position) {
2806 21235061 : ActiveToHandled(cur_active);
2807 21235320 : --i; // The live range was removed from the list of active live ranges.
2808 77332516 : } else if (!cur_active->Covers(position)) {
2809 18364513 : ActiveToInactive(cur_active);
2810 18364512 : --i; // The live range was removed from the list of active live ranges.
2811 : }
2812 : }
2813 :
2814 622782771 : for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
2815 298301210 : LiveRange* cur_inactive = inactive_live_ranges()[i];
2816 298301210 : if (cur_inactive->End() <= position) {
2817 6275624 : InactiveToHandled(cur_inactive);
2818 6276639 : --i; // Live range was removed from the list of inactive live ranges.
2819 292025586 : } else if (cur_inactive->Covers(position)) {
2820 19516244 : InactiveToActive(cur_inactive);
2821 19516225 : --i; // Live range was removed from the list of inactive live ranges.
2822 : }
2823 : }
2824 :
2825 : DCHECK(!current->HasRegisterAssigned() && !current->spilled());
2826 :
2827 26181233 : ProcessCurrentRange(current);
2828 : }
2829 1831826 : }
2830 :
2831 1106090 : bool LinearScanAllocator::TrySplitAndSpillSplinter(LiveRange* range) {
2832 : DCHECK(range->TopLevel()->IsSplinter());
2833 : // If we can spill the whole range, great. Otherwise, split above the
2834 : // first use needing a register and spill the top part.
2835 1106090 : const UsePosition* next_reg = range->NextRegisterPosition(range->Start());
2836 1106077 : if (next_reg == nullptr) {
2837 711911 : Spill(range);
2838 711925 : return true;
2839 394166 : } else if (range->FirstHintPosition() == nullptr) {
2840 : // If there was no hint, but we have a use position requiring a
2841 : // register, apply the hot path heuristics.
2842 : return false;
2843 218255 : } else if (next_reg->pos().PrevStart() > range->Start()) {
2844 103980 : LiveRange* tail = SplitRangeAt(range, next_reg->pos().PrevStart());
2845 103980 : AddToUnhandledSorted(tail);
2846 103980 : Spill(range);
2847 103980 : return true;
2848 : }
2849 : return false;
2850 : }
2851 :
2852 22479835 : void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
2853 : int reg) {
2854 23578352 : data()->MarkAllocated(range->representation(), reg);
2855 : range->set_assigned_register(reg);
2856 22479839 : range->SetUseHints(reg);
2857 34314567 : if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
2858 : data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
2859 : }
2860 22479833 : }
2861 :
2862 :
2863 22479817 : void LinearScanAllocator::AddToActive(LiveRange* range) {
2864 22479817 : TRACE("Add live range %d:%d to active\n", range->TopLevel()->vreg(),
2865 : range->relative_id());
2866 22479817 : active_live_ranges().push_back(range);
2867 22479820 : }
2868 :
2869 :
2870 17301704 : void LinearScanAllocator::AddToInactive(LiveRange* range) {
2871 17301704 : TRACE("Add live range %d:%d to inactive\n", range->TopLevel()->vreg(),
2872 : range->relative_id());
2873 17301704 : inactive_live_ranges().push_back(range);
2874 17301544 : }
2875 :
2876 :
2877 4439806 : void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) {
2878 8879612 : if (range == nullptr || range->IsEmpty()) return;
2879 : DCHECK(!range->HasRegisterAssigned() && !range->spilled());
2880 : DCHECK(allocation_finger_ <= range->Start());
2881 85080989 : for (int i = static_cast<int>(unhandled_live_ranges().size() - 1); i >= 0;
2882 : --i) {
2883 161149383 : LiveRange* cur_range = unhandled_live_ranges().at(i);
2884 156776183 : if (!range->ShouldBeAllocatedBefore(cur_range)) continue;
2885 4373245 : TRACE("Add live range %d:%d to unhandled at %d\n",
2886 : range->TopLevel()->vreg(), range->relative_id(), i + 1);
2887 8746490 : auto it = unhandled_live_ranges().begin() + (i + 1);
2888 4373245 : unhandled_live_ranges().insert(it, range);
2889 : DCHECK(UnhandledIsSorted());
2890 4373242 : return;
2891 : }
2892 66560 : TRACE("Add live range %d:%d to unhandled at start\n",
2893 : range->TopLevel()->vreg(), range->relative_id());
2894 66560 : unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range);
2895 : DCHECK(UnhandledIsSorted());
2896 : }
2897 :
2898 :
2899 21769019 : void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) {
2900 65307064 : if (range == nullptr || range->IsEmpty()) return;
2901 : DCHECK(!range->HasRegisterAssigned() && !range->spilled());
2902 21769059 : TRACE("Add live range %d:%d to unhandled unsorted at end\n",
2903 : range->TopLevel()->vreg(), range->relative_id());
2904 21769059 : unhandled_live_ranges().push_back(range);
2905 : }
2906 :
2907 :
2908 168743306 : static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) {
2909 : DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a));
2910 161233320 : if (a->ShouldBeAllocatedBefore(b)) return false;
2911 113770912 : if (b->ShouldBeAllocatedBefore(a)) return true;
2912 7509986 : return a->TopLevel()->vreg() < b->TopLevel()->vreg();
2913 : }
2914 :
2915 :
2916 : // Sort the unhandled live ranges so that the ranges to be processed first are
2917 : // at the end of the array list. This is convenient for the register allocation
2918 : // algorithm because it is efficient to remove elements from the end.
2919 1831790 : void LinearScanAllocator::SortUnhandled() {
2920 1831790 : TRACE("Sort unhandled\n");
2921 : std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(),
2922 1831790 : &UnhandledSortHelper);
2923 1831823 : }
2924 :
2925 :
2926 0 : bool LinearScanAllocator::UnhandledIsSorted() {
2927 0 : size_t len = unhandled_live_ranges().size();
2928 0 : for (size_t i = 1; i < len; i++) {
2929 0 : LiveRange* a = unhandled_live_ranges().at(i - 1);
2930 0 : LiveRange* b = unhandled_live_ranges().at(i);
2931 0 : if (a->Start() < b->Start()) return false;
2932 : }
2933 : return true;
2934 : }
2935 :
2936 :
2937 21435516 : void LinearScanAllocator::ActiveToHandled(LiveRange* range) {
2938 21435516 : RemoveElement(&active_live_ranges(), range);
2939 21435512 : TRACE("Moving live range %d:%d from active to handled\n",
2940 : range->TopLevel()->vreg(), range->relative_id());
2941 21435512 : }
2942 :
2943 :
2944 18364497 : void LinearScanAllocator::ActiveToInactive(LiveRange* range) {
2945 18364497 : RemoveElement(&active_live_ranges(), range);
2946 18364499 : inactive_live_ranges().push_back(range);
2947 18364506 : TRACE("Moving live range %d:%d from active to inactive\n",
2948 : range->TopLevel()->vreg(), range->relative_id());
2949 18364506 : }
2950 :
2951 :
2952 6278577 : void LinearScanAllocator::InactiveToHandled(LiveRange* range) {
2953 6278577 : RemoveElement(&inactive_live_ranges(), range);
2954 6278542 : TRACE("Moving live range %d:%d from inactive to handled\n",
2955 : range->TopLevel()->vreg(), range->relative_id());
2956 6278542 : }
2957 :
2958 :
2959 19516239 : void LinearScanAllocator::InactiveToActive(LiveRange* range) {
2960 19516239 : RemoveElement(&inactive_live_ranges(), range);
2961 19516230 : active_live_ranges().push_back(range);
2962 19516225 : TRACE("Moving live range %d:%d from inactive to active\n",
2963 : range->TopLevel()->vreg(), range->relative_id());
2964 19516225 : }
2965 :
2966 0 : void LinearScanAllocator::GetFPRegisterSet(MachineRepresentation rep,
2967 : int* num_regs, int* num_codes,
2968 : const int** codes) const {
2969 : DCHECK(!kSimpleFPAliasing);
2970 0 : if (rep == MachineRepresentation::kFloat32) {
2971 0 : *num_regs = data()->config()->num_float_registers();
2972 0 : *num_codes = data()->config()->num_allocatable_float_registers();
2973 0 : *codes = data()->config()->allocatable_float_codes();
2974 0 : } else if (rep == MachineRepresentation::kSimd128) {
2975 0 : *num_regs = data()->config()->num_simd128_registers();
2976 0 : *num_codes = data()->config()->num_allocatable_simd128_registers();
2977 0 : *codes = data()->config()->allocatable_simd128_codes();
2978 : } else {
2979 0 : UNREACHABLE();
2980 : }
2981 0 : }
2982 :
2983 26181657 : void LinearScanAllocator::FindFreeRegistersForRange(
2984 : LiveRange* range, Vector<LifetimePosition> positions) {
2985 26181657 : int num_regs = num_registers();
2986 : int num_codes = num_allocatable_registers();
2987 : const int* codes = allocatable_register_codes();
2988 : MachineRepresentation rep = range->representation();
2989 : if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
2990 : rep == MachineRepresentation::kSimd128))
2991 : GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
2992 : DCHECK_GE(positions.length(), num_regs);
2993 :
2994 445079267 : for (int i = 0; i < num_regs; ++i) {
2995 418897610 : positions[i] = LifetimePosition::MaxPosition();
2996 : }
2997 :
2998 130851441 : for (LiveRange* cur_active : active_live_ranges()) {
2999 : int cur_reg = cur_active->assigned_register();
3000 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3001 78488132 : positions[cur_reg] = LifetimePosition::GapFromInstructionIndex(0);
3002 78488132 : TRACE("Register %s is free until pos %d (1)\n", RegisterName(cur_reg),
3003 : LifetimePosition::GapFromInstructionIndex(0).value());
3004 : } else {
3005 : int alias_base_index = -1;
3006 : int aliases = data()->config()->GetAliases(
3007 : cur_active->representation(), cur_reg, rep, &alias_base_index);
3008 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3009 : while (aliases--) {
3010 : int aliased_reg = alias_base_index + aliases;
3011 : positions[aliased_reg] = LifetimePosition::GapFromInstructionIndex(0);
3012 : }
3013 : }
3014 : }
3015 :
3016 324881696 : for (LiveRange* cur_inactive : inactive_live_ranges()) {
3017 : DCHECK(cur_inactive->End() > range->Start());
3018 : int cur_reg = cur_inactive->assigned_register();
3019 : // No need to carry out intersections, when this register won't be
3020 : // interesting to this range anyway.
3021 : // TODO(mtrofin): extend to aliased ranges, too.
3022 272518884 : if ((kSimpleFPAliasing || !check_fp_aliasing()) &&
3023 : positions[cur_reg] < range->Start()) {
3024 233936874 : continue;
3025 : }
3026 :
3027 221922658 : LifetimePosition next_intersection = cur_inactive->FirstIntersection(range);
3028 221926735 : if (!next_intersection.IsValid()) continue;
3029 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3030 38586087 : positions[cur_reg] = Min(positions[cur_reg], next_intersection);
3031 38586087 : TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
3032 : Min(positions[cur_reg], next_intersection).value());
3033 : } else {
3034 : int alias_base_index = -1;
3035 : int aliases = data()->config()->GetAliases(
3036 : cur_inactive->representation(), cur_reg, rep, &alias_base_index);
3037 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3038 : while (aliases--) {
3039 : int aliased_reg = alias_base_index + aliases;
3040 : positions[aliased_reg] = Min(positions[aliased_reg], next_intersection);
3041 : }
3042 : }
3043 : }
3044 26181160 : }
3045 :
3046 : // High-level register allocation summary:
3047 : //
3048 : // For regular, or hot (i.e. not splinter) ranges, we attempt to first
3049 : // allocate first the preferred (hint) register. If that is not possible,
3050 : // we find a register that's free, and allocate that. If that's not possible,
3051 : // we search for a register to steal from a range that was allocated. The
3052 : // goal is to optimize for throughput by avoiding register-to-memory
3053 : // moves, which are expensive.
3054 : //
3055 : // For splinters, the goal is to minimize the number of moves. First we try
3056 : // to allocate the preferred register (more discussion follows). Failing that,
3057 : // we bail out and spill as far as we can, unless the first use is at start,
3058 : // case in which we apply the same behavior as we do for regular ranges.
3059 : // If there is no hint, we apply the hot-path behavior.
3060 : //
3061 : // For the splinter, the hint register may come from:
3062 : //
3063 : // - the hot path (we set it at splintering time with SetHint). In this case, if
3064 : // we cannot offer the hint register, spilling is better because it's at most
3065 : // 1 move, while trying to find and offer another register is at least 1 move.
3066 : //
3067 : // - a constraint. If we cannot offer that register, it's because there is some
3068 : // interference. So offering the hint register up to the interference would
3069 : // result
3070 : // in a move at the interference, plus a move to satisfy the constraint. This is
3071 : // also the number of moves if we spill, with the potential of the range being
3072 : // already spilled and thus saving a move (the spill).
3073 : // Note that this can only be an input constraint, if it were an output one,
3074 : // the range wouldn't be a splinter because it means it'd be defined in a
3075 : // deferred
3076 : // block, and we don't mark those as splinters (they live in deferred blocks
3077 : // only).
3078 : //
3079 : // - a phi. The same analysis as in the case of the input constraint applies.
3080 : //
3081 42589071 : void LinearScanAllocator::ProcessCurrentRange(LiveRange* current) {
3082 863976747 : LifetimePosition free_until_pos_buff[RegisterConfiguration::kMaxFPRegisters];
3083 : Vector<LifetimePosition> free_until_pos(
3084 : free_until_pos_buff, RegisterConfiguration::kMaxFPRegisters);
3085 26181195 : FindFreeRegistersForRange(current, free_until_pos);
3086 26181158 : if (!TryAllocatePreferredReg(current, free_until_pos)) {
3087 16407876 : if (current->TopLevel()->IsSplinter()) {
3088 1922002 : if (TrySplitAndSpillSplinter(current)) return;
3089 : }
3090 15591965 : if (!TryAllocateFreeReg(current, free_until_pos)) {
3091 3084098 : AllocateBlockedReg(current);
3092 : }
3093 : }
3094 25365181 : if (current->HasRegisterAssigned()) {
3095 22479798 : AddToActive(current);
3096 : }
3097 : }
3098 :
3099 29115253 : bool LinearScanAllocator::TryAllocatePreferredReg(
3100 31460734 : LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
3101 : int hint_register;
3102 29115253 : if (current->FirstHintPosition(&hint_register) != nullptr) {
3103 15730333 : TRACE(
3104 : "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
3105 : RegisterName(hint_register), free_until_pos[hint_register].value(),
3106 : current->TopLevel()->vreg(), current->relative_id(),
3107 : current->End().value());
3108 :
3109 : // The desired register is free until the end of the current live range.
3110 31460734 : if (free_until_pos[hint_register] >= current->End()) {
3111 10157738 : TRACE("Assigning preferred reg %s to live range %d:%d\n",
3112 : RegisterName(hint_register), current->TopLevel()->vreg(),
3113 : current->relative_id());
3114 10157738 : SetLiveRangeAssignedRegister(current, hint_register);
3115 10157699 : return true;
3116 : }
3117 : }
3118 : return false;
3119 : }
3120 :
3121 15591995 : bool LinearScanAllocator::TryAllocateFreeReg(
3122 203155369 : LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
3123 : int num_regs = 0; // used only for the call to GetFPRegisterSet.
3124 15591995 : int num_codes = num_allocatable_registers();
3125 : const int* codes = allocatable_register_codes();
3126 : MachineRepresentation rep = current->representation();
3127 : if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
3128 : rep == MachineRepresentation::kSimd128)) {
3129 : GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
3130 : }
3131 :
3132 : DCHECK_GE(free_until_pos.length(), num_codes);
3133 :
3134 : // Find the register which stays free for the longest time.
3135 15591995 : int reg = codes[0];
3136 190647436 : for (int i = 1; i < num_codes; ++i) {
3137 175055441 : int code = codes[i];
3138 175055441 : if (free_until_pos[code] > free_until_pos[reg]) {
3139 : reg = code;
3140 : }
3141 : }
3142 :
3143 15591995 : LifetimePosition pos = free_until_pos[reg];
3144 :
3145 15591995 : if (pos <= current->Start()) {
3146 : // All registers are blocked.
3147 : return false;
3148 : }
3149 :
3150 12507933 : if (pos < current->End()) {
3151 : // Register reg is available at the range start but becomes blocked before
3152 : // the range end. Split current at position where it becomes blocked.
3153 2934157 : LiveRange* tail = SplitRangeAt(current, pos);
3154 2934157 : AddToUnhandledSorted(tail);
3155 :
3156 : // Try to allocate preferred register once more.
3157 2934154 : if (TryAllocatePreferredReg(current, free_until_pos)) return true;
3158 : }
3159 :
3160 : // Register reg is available at the range start and is free until the range
3161 : // end.
3162 : DCHECK(pos >= current->End());
3163 12123626 : TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
3164 : current->TopLevel()->vreg(), current->relative_id());
3165 12123626 : SetLiveRangeAssignedRegister(current, reg);
3166 :
3167 12123611 : return true;
3168 : }
3169 :
3170 3282723 : void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
3171 3084095 : UsePosition* register_use = current->NextRegisterPosition(current->Start());
3172 3084089 : if (register_use == nullptr) {
3173 : // There is no use in the current live range that requires a register.
3174 : // We can just spill it.
3175 1527140 : Spill(current);
3176 1527140 : return;
3177 : }
3178 :
3179 : int num_regs = num_registers();
3180 : int num_codes = num_allocatable_registers();
3181 : const int* codes = allocatable_register_codes();
3182 : MachineRepresentation rep = current->representation();
3183 : if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
3184 : rep == MachineRepresentation::kSimd128))
3185 : GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
3186 :
3187 : // use_pos keeps track of positions a register/alias is used at.
3188 : // block_pos keeps track of positions where a register/alias is blocked
3189 : // from.
3190 51379541 : LifetimePosition use_pos[RegisterConfiguration::kMaxFPRegisters];
3191 49822592 : LifetimePosition block_pos[RegisterConfiguration::kMaxFPRegisters];
3192 24911170 : for (int i = 0; i < num_regs; i++) {
3193 24911170 : use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
3194 : }
3195 :
3196 40821024 : for (LiveRange* range : active_live_ranges()) {
3197 : int cur_reg = range->assigned_register();
3198 : bool is_fixed_or_cant_spill =
3199 21438195 : range->TopLevel()->IsFixed() || !range->CanBeSpilled(current->Start());
3200 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3201 18853567 : if (is_fixed_or_cant_spill) {
3202 : block_pos[cur_reg] = use_pos[cur_reg] =
3203 16456249 : LifetimePosition::GapFromInstructionIndex(0);
3204 : } else {
3205 : DCHECK_NE(LifetimePosition::GapFromInstructionIndex(0),
3206 : block_pos[cur_reg]);
3207 : use_pos[cur_reg] =
3208 2397318 : range->NextLifetimePositionRegisterIsBeneficial(current->Start());
3209 : }
3210 : } else {
3211 : int alias_base_index = -1;
3212 : int aliases = data()->config()->GetAliases(
3213 : range->representation(), cur_reg, rep, &alias_base_index);
3214 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3215 : while (aliases--) {
3216 : int aliased_reg = alias_base_index + aliases;
3217 : if (is_fixed_or_cant_spill) {
3218 : block_pos[aliased_reg] = use_pos[aliased_reg] =
3219 : LifetimePosition::GapFromInstructionIndex(0);
3220 : } else {
3221 : use_pos[aliased_reg] =
3222 : Min(block_pos[aliased_reg],
3223 : range->NextLifetimePositionRegisterIsBeneficial(
3224 : current->Start()));
3225 : }
3226 : }
3227 : }
3228 : }
3229 :
3230 10626938 : for (LiveRange* range : inactive_live_ranges()) {
3231 : DCHECK(range->End() > current->Start());
3232 : int cur_reg = range->assigned_register();
3233 3756514 : bool is_fixed = range->TopLevel()->IsFixed();
3234 :
3235 : // Don't perform costly intersections if they are guaranteed to not update
3236 : // block_pos or use_pos.
3237 : // TODO(mtrofin): extend to aliased ranges, too.
3238 : if ((kSimpleFPAliasing || !check_fp_aliasing())) {
3239 3756514 : if (is_fixed) {
3240 8280624 : if (block_pos[cur_reg] < range->Start()) continue;
3241 : } else {
3242 2643988 : if (use_pos[cur_reg] < range->Start()) continue;
3243 : }
3244 : }
3245 :
3246 2385018 : LifetimePosition next_intersection = range->FirstIntersection(current);
3247 2385018 : if (!next_intersection.IsValid()) continue;
3248 :
3249 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3250 344930 : if (is_fixed) {
3251 335506 : block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
3252 335506 : use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
3253 : } else {
3254 9424 : use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
3255 : }
3256 : } else {
3257 : int alias_base_index = -1;
3258 : int aliases = data()->config()->GetAliases(
3259 : range->representation(), cur_reg, rep, &alias_base_index);
3260 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3261 : while (aliases--) {
3262 : int aliased_reg = alias_base_index + aliases;
3263 : if (is_fixed) {
3264 : block_pos[aliased_reg] =
3265 : Min(block_pos[aliased_reg], next_intersection);
3266 : use_pos[aliased_reg] =
3267 : Min(block_pos[aliased_reg], use_pos[aliased_reg]);
3268 : } else {
3269 : use_pos[aliased_reg] = Min(use_pos[aliased_reg], next_intersection);
3270 : }
3271 : }
3272 : }
3273 : }
3274 :
3275 1556955 : int reg = codes[0];
3276 18853599 : for (int i = 1; i < num_codes; ++i) {
3277 17296644 : int code = codes[i];
3278 34593288 : if (use_pos[code] > use_pos[reg]) {
3279 : reg = code;
3280 : }
3281 : }
3282 :
3283 3113910 : if (use_pos[reg] < register_use->pos()) {
3284 : // If there is a gap position before the next register use, we can
3285 : // spill until there. The gap position will then fit the fill move.
3286 1358327 : if (LifetimePosition::ExistsGapPositionBetween(current->Start(),
3287 : register_use->pos())) {
3288 : SpillBetween(current, current->Start(), register_use->pos());
3289 : return;
3290 : }
3291 : }
3292 :
3293 : // We couldn't spill until the next register use. Split before the register
3294 : // is blocked, if applicable.
3295 397256 : if (block_pos[reg] < current->End()) {
3296 : // Register becomes blocked before the current range end. Split before that
3297 : // position.
3298 : LiveRange* tail =
3299 13661 : SplitBetween(current, current->Start(), block_pos[reg].Start());
3300 13661 : AddToUnhandledSorted(tail);
3301 : }
3302 :
3303 : // Register reg is not blocked for the whole range.
3304 : DCHECK(block_pos[reg] >= current->End());
3305 198628 : TRACE("Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg),
3306 : current->TopLevel()->vreg(), current->relative_id());
3307 198628 : SetLiveRangeAssignedRegister(current, reg);
3308 :
3309 : // This register was not free. Thus we need to find and spill
3310 : // parts of active and inactive live regions that use the same register
3311 : // at the same lifetime positions as current.
3312 198628 : SplitAndSpillIntersecting(current);
3313 : }
3314 :
3315 :
3316 198628 : void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
3317 : DCHECK(current->HasRegisterAssigned());
3318 : int reg = current->assigned_register();
3319 : LifetimePosition split_pos = current->Start();
3320 5175648 : for (size_t i = 0; i < active_live_ranges().size(); ++i) {
3321 2389196 : LiveRange* range = active_live_ranges()[i];
3322 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3323 4579764 : if (range->assigned_register() != reg) continue;
3324 : } else {
3325 : if (!data()->config()->AreAliases(current->representation(), reg,
3326 : range->representation(),
3327 : range->assigned_register())) {
3328 : continue;
3329 : }
3330 : }
3331 :
3332 198628 : UsePosition* next_pos = range->NextRegisterPosition(current->Start());
3333 198628 : LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
3334 198628 : if (next_pos == nullptr) {
3335 172044 : SpillAfter(range, spill_pos);
3336 : } else {
3337 : // When spilling between spill_pos and next_pos ensure that the range
3338 : // remains spilled at least until the start of the current live range.
3339 : // This guarantees that we will not introduce new unhandled ranges that
3340 : // start before the current range as this violates allocation invariants
3341 : // and will lead to an inconsistent state of active and inactive
3342 : // live-ranges: ranges are allocated in order of their start positions,
3343 : // ranges are retired from active/inactive when the start of the
3344 : // current live-range is larger than their end.
3345 : DCHECK(LifetimePosition::ExistsGapPositionBetween(current->Start(),
3346 : next_pos->pos()));
3347 26584 : SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
3348 : }
3349 198628 : ActiveToHandled(range);
3350 198628 : --i;
3351 : }
3352 :
3353 4941124 : for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
3354 2501020 : LiveRange* range = inactive_live_ranges()[i];
3355 : DCHECK(range->End() > current->Start());
3356 2371248 : if (range->TopLevel()->IsFixed()) continue;
3357 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3358 129772 : if (range->assigned_register() != reg) continue;
3359 : } else {
3360 : if (!data()->config()->AreAliases(current->representation(), reg,
3361 : range->representation(),
3362 : range->assigned_register()))
3363 : continue;
3364 : }
3365 :
3366 8665 : LifetimePosition next_intersection = range->FirstIntersection(current);
3367 8665 : if (next_intersection.IsValid()) {
3368 86 : UsePosition* next_pos = range->NextRegisterPosition(current->Start());
3369 86 : if (next_pos == nullptr) {
3370 63 : SpillAfter(range, split_pos);
3371 : } else {
3372 : next_intersection = Min(next_intersection, next_pos->pos());
3373 : SpillBetween(range, split_pos, next_intersection);
3374 : }
3375 86 : InactiveToHandled(range);
3376 86 : --i;
3377 : }
3378 : }
3379 198628 : }
3380 :
3381 :
3382 12606389 : bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
3383 12606389 : if (!range->is_phi()) return false;
3384 :
3385 : DCHECK(!range->HasSpillOperand());
3386 1135114 : RegisterAllocationData::PhiMapValue* phi_map_value =
3387 4205877 : data()->GetPhiMapValueFor(range);
3388 : const PhiInstruction* phi = phi_map_value->phi();
3389 : const InstructionBlock* block = phi_map_value->block();
3390 : // Count the number of spilled operands.
3391 : size_t spilled_count = 0;
3392 32922 : LiveRange* first_op = nullptr;
3393 8005692 : for (size_t i = 0; i < phi->operands().size(); i++) {
3394 2867735 : int op = phi->operands()[i];
3395 4220725 : LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op);
3396 2867732 : if (!op_range->TopLevel()->HasSpillRange()) continue;
3397 302196 : const InstructionBlock* pred =
3398 604392 : code()->InstructionBlockAt(block->predecessors()[i]);
3399 : LifetimePosition pred_end =
3400 : LifetimePosition::InstructionFromInstructionIndex(
3401 : pred->last_instruction_index());
3402 2831625 : while (op_range != nullptr && !op_range->CanCover(pred_end)) {
3403 : op_range = op_range->next();
3404 : }
3405 601043 : if (op_range != nullptr && op_range->spilled()) {
3406 244622 : spilled_count++;
3407 244622 : if (first_op == nullptr) {
3408 : first_op = op_range->TopLevel();
3409 : }
3410 : }
3411 : }
3412 :
3413 : // Only continue if more than half of the operands are spilled.
3414 1135111 : if (spilled_count * 2 <= phi->operands().size()) {
3415 : return false;
3416 : }
3417 :
3418 : // Try to merge the spilled operands and count the number of merged spilled
3419 : // operands.
3420 : DCHECK(first_op != nullptr);
3421 62023 : SpillRange* first_op_spill = first_op->TopLevel()->GetSpillRange();
3422 : size_t num_merged = 1;
3423 416934 : for (size_t i = 1; i < phi->operands().size(); i++) {
3424 175545 : int op = phi->operands()[i];
3425 686459 : TopLevelLiveRange* op_range = data()->live_ranges()[op];
3426 175545 : if (!op_range->HasSpillRange()) continue;
3427 : SpillRange* op_spill = op_range->GetSpillRange();
3428 159824 : if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) {
3429 146233 : num_merged++;
3430 : }
3431 : }
3432 :
3433 : // Only continue if enough operands could be merged to the
3434 : // same spill slot.
3435 62023 : if (num_merged * 2 <= phi->operands().size() ||
3436 : AreUseIntervalsIntersecting(first_op_spill->interval(),
3437 85684 : range->first_interval())) {
3438 : return false;
3439 : }
3440 :
3441 : // If the range does not need register soon, spill it to the merged
3442 : // spill range.
3443 : LifetimePosition next_pos = range->Start();
3444 28884 : if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
3445 28884 : UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
3446 28884 : if (pos == nullptr) {
3447 : SpillRange* spill_range =
3448 : range->TopLevel()->HasSpillRange()
3449 6 : ? range->TopLevel()->GetSpillRange()
3450 48802 : : data()->AssignSpillRangeToLiveRange(range->TopLevel());
3451 24404 : bool merged = first_op_spill->TryMerge(spill_range);
3452 24404 : if (!merged) return false;
3453 24404 : Spill(range);
3454 24404 : return true;
3455 4480 : } else if (pos->pos() > range->Start().NextStart()) {
3456 : SpillRange* spill_range =
3457 : range->TopLevel()->HasSpillRange()
3458 0 : ? range->TopLevel()->GetSpillRange()
3459 6156 : : data()->AssignSpillRangeToLiveRange(range->TopLevel());
3460 3078 : bool merged = first_op_spill->TryMerge(spill_range);
3461 3078 : if (!merged) return false;
3462 : SpillBetween(range, range->Start(), pos->pos());
3463 : DCHECK(UnhandledIsSorted());
3464 3078 : return true;
3465 : }
3466 : return false;
3467 : }
3468 :
3469 :
3470 172107 : void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
3471 172107 : LiveRange* second_part = SplitRangeAt(range, pos);
3472 172107 : Spill(second_part);
3473 172107 : }
3474 :
3475 :
3476 0 : void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
3477 : LifetimePosition end) {
3478 1361428 : SpillBetweenUntil(range, start, start, end);
3479 0 : }
3480 :
3481 :
3482 1388012 : void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
3483 : LifetimePosition start,
3484 : LifetimePosition until,
3485 : LifetimePosition end) {
3486 1388012 : CHECK(start < end);
3487 2776007 : LiveRange* second_part = SplitRangeAt(range, start);
3488 :
3489 1388011 : if (second_part->Start() < end) {
3490 : // The split result intersects with [start, end[.
3491 : // Split it at position between ]start+1, end[, spill the middle part
3492 : // and put the rest to unhandled.
3493 1387995 : LifetimePosition third_part_end = end.PrevStart().End();
3494 1387995 : if (data()->IsBlockBoundary(end.Start())) {
3495 0 : third_part_end = end.Start();
3496 : }
3497 : LiveRange* third_part = SplitBetween(
3498 1387995 : second_part, Max(second_part->Start().End(), until), third_part_end);
3499 :
3500 : DCHECK(third_part != second_part);
3501 :
3502 1387996 : Spill(second_part);
3503 1387996 : AddToUnhandledSorted(third_part);
3504 : } else {
3505 : // The split result does not intersect with [start, end[.
3506 : // Nothing to spill. Just put it to unhandled as whole.
3507 16 : AddToUnhandledSorted(second_part);
3508 : }
3509 1388013 : }
3510 :
3511 :
3512 915922 : SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
3513 915922 : : data_(data) {}
3514 :
3515 :
3516 915917 : void SpillSlotLocator::LocateSpillSlots() {
3517 915917 : const InstructionSequence* code = data()->code();
3518 53655847 : for (TopLevelLiveRange* range : data()->live_ranges()) {
3519 70524674 : if (range == nullptr || range->IsEmpty()) continue;
3520 : // We care only about ranges which spill in the frame.
3521 24935940 : if (!range->HasSpillRange() || range->IsSpilledOnlyInDeferredBlocks()) {
3522 : continue;
3523 : }
3524 : TopLevelLiveRange::SpillMoveInsertionList* spills =
3525 : range->GetSpillMoveInsertionLocations();
3526 : DCHECK_NOT_NULL(spills);
3527 4019817 : for (; spills != nullptr; spills = spills->next) {
3528 2009904 : code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
3529 : }
3530 : }
3531 915926 : }
3532 :
3533 :
3534 1831818 : OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
3535 :
3536 :
3537 915901 : void OperandAssigner::AssignSpillSlots() {
3538 : ZoneVector<SpillRange*>& spill_ranges = data()->spill_ranges();
3539 : // Merge disjoint spill ranges
3540 49005386 : for (size_t i = 0; i < spill_ranges.size(); ++i) {
3541 165078436 : SpillRange* range = spill_ranges[i];
3542 23586783 : if (range == nullptr) continue;
3543 2640466 : if (range->IsEmpty()) continue;
3544 233977920 : for (size_t j = i + 1; j < spill_ranges.size(); ++j) {
3545 115489979 : SpillRange* other = spill_ranges[j];
3546 134598563 : if (other != nullptr && !other->IsEmpty()) {
3547 15158021 : range->TryMerge(other);
3548 : }
3549 : }
3550 : }
3551 : // Allocate slots for the merged spill ranges.
3552 50648215 : for (SpillRange* range : spill_ranges) {
3553 26226863 : if (range == nullptr || range->IsEmpty()) continue;
3554 : // Allocate a new operand referring to the spill slot.
3555 1498906 : if (!range->HasSlot()) {
3556 1060442 : int index = data()->frame()->AllocateSpillSlot(range->byte_width());
3557 : range->set_assigned_slot(index);
3558 : }
3559 : }
3560 915919 : }
3561 :
3562 :
3563 915949 : void OperandAssigner::CommitAssignment() {
3564 73171402 : for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3565 117696966 : if (top_range == nullptr || top_range->IsEmpty()) continue;
3566 : InstructionOperand spill_operand;
3567 22295063 : if (top_range->HasSpillOperand()) {
3568 9758067 : spill_operand = *top_range->TopLevel()->GetSpillOperand();
3569 12536996 : } else if (top_range->TopLevel()->HasSpillRange()) {
3570 2640455 : spill_operand = top_range->TopLevel()->GetSpillRangeOperand();
3571 : }
3572 22295063 : if (top_range->is_phi()) {
3573 : data()->GetPhiMapValueFor(top_range)->CommitAssignment(
3574 2667046 : top_range->GetAssignedOperand());
3575 : }
3576 77230851 : for (LiveRange* range = top_range; range != nullptr;
3577 : range = range->next()) {
3578 54935686 : InstructionOperand assigned = range->GetAssignedOperand();
3579 54935709 : range->ConvertUsesToOperand(assigned, spill_operand);
3580 : }
3581 :
3582 22295165 : if (!spill_operand.IsInvalid()) {
3583 : // If this top level range has a child spilled in a deferred block, we use
3584 : // the range and control flow connection mechanism instead of spilling at
3585 : // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
3586 : // phases. Normally, when we spill at definition, we do not insert a
3587 : // connecting move when a successor child range is spilled - because the
3588 : // spilled range picks up its value from the slot which was assigned at
3589 : // definition. For ranges that are determined to spill only in deferred
3590 : // blocks, we let ConnectLiveRanges and ResolveControlFlow find the blocks
3591 : // where a spill operand is expected, and then finalize by inserting the
3592 : // spills in the deferred blocks dominators.
3593 12398477 : if (!top_range->IsSpilledOnlyInDeferredBlocks()) {
3594 : // Spill at definition if the range isn't spilled only in deferred
3595 : // blocks.
3596 : top_range->CommitSpillMoves(
3597 : data()->code(), spill_operand,
3598 33248955 : top_range->has_slot_use() || top_range->spilled());
3599 : }
3600 : }
3601 : }
3602 915919 : }
3603 :
3604 :
3605 915922 : ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
3606 915922 : : data_(data) {}
3607 :
3608 :
3609 0 : bool ReferenceMapPopulator::SafePointsAreInOrder() const {
3610 : int safe_point = 0;
3611 0 : for (ReferenceMap* map : *data()->code()->reference_maps()) {
3612 0 : if (safe_point > map->instruction_position()) return false;
3613 : safe_point = map->instruction_position();
3614 : }
3615 0 : return true;
3616 : }
3617 :
3618 :
3619 915869 : void ReferenceMapPopulator::PopulateReferenceMaps() {
3620 : DCHECK(SafePointsAreInOrder());
3621 : // Map all delayed references.
3622 1831738 : for (RegisterAllocationData::DelayedReference& delayed_reference :
3623 : data()->delayed_references()) {
3624 : delayed_reference.map->RecordReference(
3625 0 : AllocatedOperand::cast(*delayed_reference.operand));
3626 : }
3627 : // Iterate over all safe point positions and record a pointer
3628 : // for all spilled live ranges at this point.
3629 : int last_range_start = 0;
3630 915869 : const ReferenceMapDeque* reference_maps = data()->code()->reference_maps();
3631 : ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
3632 106645573 : for (TopLevelLiveRange* range : data()->live_ranges()) {
3633 47173075 : if (range == nullptr) continue;
3634 : // Skip non-reference values.
3635 23350843 : if (!data()->IsReference(range)) continue;
3636 : // Skip empty live ranges.
3637 10472699 : if (range->IsEmpty()) continue;
3638 9419490 : if (range->has_preassigned_slot()) continue;
3639 :
3640 : // Find the extent of the range and its children.
3641 : int start = range->Start().ToInstructionIndex();
3642 : int end = 0;
3643 35103471 : for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) {
3644 : LifetimePosition this_end = cur->End();
3645 26122596 : if (this_end.ToInstructionIndex() > end)
3646 : end = this_end.ToInstructionIndex();
3647 : DCHECK(cur->Start().ToInstructionIndex() >= start);
3648 : }
3649 :
3650 : // Most of the ranges are in order, but not all. Keep an eye on when they
3651 : // step backwards and reset the first_it so we don't miss any safe points.
3652 8980875 : if (start < last_range_start) first_it = reference_maps->begin();
3653 : last_range_start = start;
3654 :
3655 : // Step across all the safe points that are before the start of this range,
3656 : // recording how far we step in order to save doing this for the next range.
3657 252206499 : for (; first_it != reference_maps->end(); ++first_it) {
3658 242158288 : ReferenceMap* map = *first_it;
3659 242158288 : if (map->instruction_position() >= start) break;
3660 : }
3661 :
3662 : InstructionOperand spill_operand;
3663 12580306 : if (((range->HasSpillOperand() &&
3664 17293403 : !range->GetSpillOperand()->IsConstant()) ||
3665 : range->HasSpillRange())) {
3666 2128058 : if (range->HasSpillOperand()) {
3667 668496 : spill_operand = *range->GetSpillOperand();
3668 : } else {
3669 : spill_operand = range->GetSpillRangeOperand();
3670 : }
3671 : DCHECK(spill_operand.IsStackSlot());
3672 : DCHECK(CanBeTaggedPointer(
3673 : AllocatedOperand::cast(spill_operand).representation()));
3674 : }
3675 :
3676 41422622 : LiveRange* cur = range;
3677 : // Step through the safe points to see whether they are in the range.
3678 44543171 : for (auto it = first_it; it != reference_maps->end(); ++it) {
3679 32861527 : ReferenceMap* map = *it;
3680 : int safe_point = map->instruction_position();
3681 :
3682 : // The safe points are sorted so we can stop searching here.
3683 32861527 : if (safe_point - 1 > end) break;
3684 :
3685 : // Advance to the next active range that covers the current
3686 : // safe point position.
3687 : LifetimePosition safe_point_pos =
3688 : LifetimePosition::InstructionFromInstructionIndex(safe_point);
3689 :
3690 : // Search for the child range (cur) that covers safe_point_pos. If we
3691 : // don't find it before the children pass safe_point_pos, keep cur at
3692 : // the last child, because the next safe_point_pos may be covered by cur.
3693 : // This may happen if cur has more than one interval, and the current
3694 : // safe_point_pos is in between intervals.
3695 : // For that reason, cur may be at most the last child.
3696 : DCHECK_NOT_NULL(cur);
3697 : DCHECK(safe_point_pos >= cur->Start() || range == cur);
3698 : bool found = false;
3699 90141115 : while (!found) {
3700 41422466 : if (cur->Covers(safe_point_pos)) {
3701 : found = true;
3702 : } else {
3703 : LiveRange* next = cur->next();
3704 34883103 : if (next == nullptr || next->Start() > safe_point_pos) {
3705 : break;
3706 : }
3707 : cur = next;
3708 : }
3709 : }
3710 :
3711 26581427 : if (!found) {
3712 : continue;
3713 : }
3714 :
3715 : // Check if the live range is spilled and the safe point is after
3716 : // the spill position.
3717 : int spill_index = range->IsSpilledOnlyInDeferredBlocks()
3718 : ? cur->Start().ToInstructionIndex()
3719 22137384 : : range->spill_start_index();
3720 :
3721 22137384 : if (!spill_operand.IsInvalid() && safe_point >= spill_index) {
3722 10824415 : TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
3723 : range->vreg(), spill_index, safe_point);
3724 10824415 : map->RecordReference(AllocatedOperand::cast(spill_operand));
3725 : }
3726 :
3727 22137378 : if (!cur->spilled()) {
3728 0 : TRACE(
3729 : "Pointer in register for range %d:%d (start at %d) "
3730 : "at safe point %d\n",
3731 : range->vreg(), cur->relative_id(), cur->Start().value(),
3732 : safe_point);
3733 0 : InstructionOperand operand = cur->GetAssignedOperand();
3734 : DCHECK(!operand.IsStackSlot());
3735 : DCHECK(CanBeTaggedPointer(
3736 : AllocatedOperand::cast(operand).representation()));
3737 0 : map->RecordReference(AllocatedOperand::cast(operand));
3738 : }
3739 : }
3740 : }
3741 915899 : }
3742 :
3743 :
3744 1831829 : LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
3745 1831829 : : data_(data) {}
3746 :
3747 :
3748 0 : bool LiveRangeConnector::CanEagerlyResolveControlFlow(
3749 : const InstructionBlock* block) const {
3750 12179350 : if (block->PredecessorCount() != 1) return false;
3751 0 : return block->predecessors()[0].IsNext(block->rpo_number());
3752 : }
3753 :
3754 :
3755 915842 : void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
3756 : // Lazily linearize live ranges in memory for fast lookup.
3757 915842 : LiveRangeFinder finder(data(), local_zone);
3758 : ZoneVector<BitVector*>& live_in_sets = data()->live_in_sets();
3759 15534038 : for (const InstructionBlock* block : code()->instruction_blocks()) {
3760 15855584 : if (CanEagerlyResolveControlFlow(block)) continue;
3761 14135064 : BitVector* live = live_in_sets[block->rpo_number().ToInt()];
3762 : BitVector::Iterator iterator(live);
3763 117121750 : while (!iterator.Done()) {
3764 51493355 : int vreg = iterator.Current();
3765 51493355 : LiveRangeBoundArray* array = finder.ArrayFor(vreg);
3766 182392642 : for (const RpoNumber& pred : block->predecessors()) {
3767 : FindResult result;
3768 79568484 : const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
3769 79406134 : if (!array->FindConnectableSubranges(block, pred_block, &result)) {
3770 78250620 : continue;
3771 : }
3772 4709747 : InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
3773 4709745 : InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
3774 4709742 : if (pred_op.Equals(cur_op)) continue;
3775 2375435 : if (!pred_op.IsAnyRegister() && cur_op.IsAnyRegister()) {
3776 : // We're doing a reload.
3777 : // We don't need to, if:
3778 : // 1) there's no register use in this block, and
3779 : // 2) the range ends before the block does, and
3780 : // 3) we don't have a successor, or the successor is spilled.
3781 : LifetimePosition block_start =
3782 1120248 : LifetimePosition::GapFromInstructionIndex(block->code_start());
3783 : LifetimePosition block_end =
3784 : LifetimePosition::GapFromInstructionIndex(block->code_end());
3785 2140883 : const LiveRange* current = result.cur_cover_;
3786 159753 : const LiveRange* successor = current->next();
3787 2240496 : if (current->End() < block_end &&
3788 159753 : (successor == nullptr || successor->spilled())) {
3789 : // verify point 1: no register use. We can go to the end of the
3790 : // range, since it's all within the block.
3791 :
3792 : bool uses_reg = false;
3793 264368 : for (const UsePosition* use = current->NextUsePosition(block_start);
3794 : use != nullptr; use = use->next()) {
3795 164755 : if (use->operand()->IsAnyRegister()) {
3796 : uses_reg = true;
3797 : break;
3798 : }
3799 : }
3800 363861 : if (!uses_reg) continue;
3801 : }
3802 1183094 : if (current->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3803 : pred_block->IsDeferred()) {
3804 : // The spill location should be defined in pred_block, so add
3805 : // pred_block to the list of blocks requiring a spill operand.
3806 : current->TopLevel()->GetListOfBlocksRequiringSpillOperands()->Add(
3807 162459 : pred_block->rpo_number().ToInt());
3808 : }
3809 : }
3810 1155576 : int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op);
3811 : USE(move_loc);
3812 : DCHECK_IMPLIES(
3813 : result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3814 : !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()),
3815 : code()->GetInstructionBlock(move_loc)->IsDeferred());
3816 : }
3817 51493369 : iterator.Advance();
3818 : }
3819 : }
3820 :
3821 : // At this stage, we collected blocks needing a spill operand from
3822 : // ConnectRanges and from ResolveControlFlow. Time to commit the spills for
3823 : // deferred blocks.
3824 71931380 : for (TopLevelLiveRange* top : data()->live_ranges()) {
3825 92819858 : if (top == nullptr || top->IsEmpty() ||
3826 : !top->IsSpilledOnlyInDeferredBlocks())
3827 : continue;
3828 630576 : CommitSpillsInDeferredBlocks(top, finder.ArrayFor(top->vreg()), local_zone);
3829 : }
3830 915914 : }
3831 :
3832 :
3833 1657900 : int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
3834 : const InstructionOperand& cur_op,
3835 653248 : const InstructionBlock* pred,
3836 : const InstructionOperand& pred_op) {
3837 : DCHECK(!pred_op.Equals(cur_op));
3838 : int gap_index;
3839 : Instruction::GapPosition position;
3840 1155574 : if (block->PredecessorCount() == 1) {
3841 : gap_index = block->first_instruction_index();
3842 : position = Instruction::START;
3843 : } else {
3844 : DCHECK(pred->SuccessorCount() == 1);
3845 : DCHECK(!code()
3846 : ->InstructionAt(pred->last_instruction_index())
3847 : ->HasReferenceMap());
3848 : gap_index = pred->last_instruction_index();
3849 : position = Instruction::END;
3850 : }
3851 1155574 : data()->AddGapMove(gap_index, position, pred_op, cur_op);
3852 1155575 : return gap_index;
3853 : }
3854 :
3855 915992 : void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
3856 : DelayedInsertionMap delayed_insertion_map(local_zone);
3857 73138009 : for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3858 47173210 : if (top_range == nullptr) continue;
3859 : bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
3860 35918322 : LiveRange* first_range = top_range;
3861 88632791 : for (LiveRange *second_range = first_range->next(); second_range != nullptr;
3862 : first_range = second_range, second_range = second_range->next()) {
3863 : LifetimePosition pos = second_range->Start();
3864 : // Add gap move if the two live ranges touch and there is no block
3865 : // boundary.
3866 53707062 : if (second_range->spilled()) continue;
3867 12567526 : if (first_range->End() != pos) continue;
3868 12836179 : if (data()->IsBlockBoundary(pos) &&
3869 : !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
3870 : continue;
3871 : }
3872 11729490 : InstructionOperand prev_operand = first_range->GetAssignedOperand();
3873 11729432 : InstructionOperand cur_operand = second_range->GetAssignedOperand();
3874 11729482 : if (prev_operand.Equals(cur_operand)) continue;
3875 : bool delay_insertion = false;
3876 : Instruction::GapPosition gap_pos;
3877 : int gap_index = pos.ToInstructionIndex();
3878 13142248 : if (connect_spilled && !prev_operand.IsAnyRegister() &&
3879 : cur_operand.IsAnyRegister()) {
3880 782103 : const InstructionBlock* block = code()->GetInstructionBlock(gap_index);
3881 : DCHECK(block->IsDeferred());
3882 : // Performing a reload in this block, meaning the spill operand must
3883 : // be defined here.
3884 : top_range->GetListOfBlocksRequiringSpillOperands()->Add(
3885 782112 : block->rpo_number().ToInt());
3886 : }
3887 :
3888 11575131 : if (pos.IsGapPosition()) {
3889 11575005 : gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
3890 : } else {
3891 126 : if (pos.IsStart()) {
3892 : delay_insertion = true;
3893 : } else {
3894 0 : gap_index++;
3895 : }
3896 126 : gap_pos = delay_insertion ? Instruction::END : Instruction::START;
3897 : }
3898 : // Reloads or spills for spilled in deferred blocks ranges must happen
3899 : // only in deferred blocks.
3900 : DCHECK_IMPLIES(
3901 : connect_spilled &&
3902 : !(prev_operand.IsAnyRegister() && cur_operand.IsAnyRegister()),
3903 : code()->GetInstructionBlock(gap_index)->IsDeferred());
3904 :
3905 : ParallelMove* move =
3906 : code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
3907 11575082 : gap_pos, code_zone());
3908 11575119 : if (!delay_insertion) {
3909 : move->AddMove(prev_operand, cur_operand);
3910 : } else {
3911 : delayed_insertion_map.insert(
3912 126 : std::make_pair(std::make_pair(move, prev_operand), cur_operand));
3913 : }
3914 : }
3915 : }
3916 1831793 : if (delayed_insertion_map.empty()) return;
3917 : // Insert all the moves which should occur after the stored move.
3918 : ZoneVector<MoveOperands*> to_insert(local_zone);
3919 : ZoneVector<MoveOperands*> to_eliminate(local_zone);
3920 75 : to_insert.reserve(4);
3921 75 : to_eliminate.reserve(4);
3922 75 : ParallelMove* moves = delayed_insertion_map.begin()->first.first;
3923 : for (auto it = delayed_insertion_map.begin();; ++it) {
3924 : bool done = it == delayed_insertion_map.end();
3925 201 : if (done || it->first.first != moves) {
3926 : // Commit the MoveOperands for current ParallelMove.
3927 252 : for (MoveOperands* move : to_eliminate) {
3928 : move->Eliminate();
3929 : }
3930 378 : for (MoveOperands* move : to_insert) {
3931 126 : moves->push_back(move);
3932 : }
3933 126 : if (done) break;
3934 : // Reset state.
3935 : to_eliminate.clear();
3936 : to_insert.clear();
3937 51 : moves = it->first.first;
3938 : }
3939 : // Gather all MoveOperands for a single ParallelMove.
3940 : MoveOperands* move =
3941 126 : new (code_zone()) MoveOperands(it->first.second, it->second);
3942 126 : moves->PrepareInsertAfter(move, &to_eliminate);
3943 126 : to_insert.push_back(move);
3944 : }
3945 : }
3946 :
3947 :
3948 630570 : void LiveRangeConnector::CommitSpillsInDeferredBlocks(
3949 3145219 : TopLevelLiveRange* range, LiveRangeBoundArray* array, Zone* temp_zone) {
3950 : DCHECK(range->IsSpilledOnlyInDeferredBlocks());
3951 : DCHECK(!range->spilled());
3952 :
3953 6597879 : InstructionSequence* code = data()->code();
3954 630570 : InstructionOperand spill_operand = range->GetSpillRangeOperand();
3955 :
3956 630570 : TRACE("Live Range %d will be spilled only in deferred blocks.\n",
3957 : range->vreg());
3958 : // If we have ranges that aren't spilled but require the operand on the stack,
3959 : // make sure we insert the spill.
3960 6001739 : for (const LiveRange* child = range; child != nullptr;
3961 : child = child->next()) {
3962 5540395 : for (const UsePosition* pos = child->first_pos(); pos != nullptr;
3963 : pos = pos->next()) {
3964 6080682 : if (pos->type() != UsePositionType::kRequiresSlot && !child->spilled())
3965 : continue;
3966 : range->AddBlockRequiringSpillOperand(
3967 : code->GetInstructionBlock(pos->pos().ToInstructionIndex())
3968 371990 : ->rpo_number());
3969 : }
3970 : }
3971 :
3972 630571 : ZoneQueue<int> worklist(temp_zone);
3973 :
3974 1649882 : for (BitVector::Iterator iterator(
3975 : range->GetListOfBlocksRequiringSpillOperands());
3976 1019311 : !iterator.Done(); iterator.Advance()) {
3977 2038627 : worklist.push(iterator.Current());
3978 : }
3979 :
3980 : ZoneSet<std::pair<RpoNumber, int>> done_moves(temp_zone);
3981 : // Seek the deferred blocks that dominate locations requiring spill operands,
3982 : // and spill there. We only need to spill at the start of such blocks.
3983 : BitVector done_blocks(
3984 630566 : range->GetListOfBlocksRequiringSpillOperands()->length(), temp_zone);
3985 4628581 : while (!worklist.empty()) {
3986 3367472 : int block_id = worklist.front();
3987 : worklist.pop();
3988 6734976 : if (done_blocks.Contains(block_id)) continue;
3989 : done_blocks.Add(block_id);
3990 942024 : InstructionBlock* spill_block =
3991 2677103 : code->InstructionBlockAt(RpoNumber::FromInt(block_id));
3992 :
3993 8644419 : for (const RpoNumber& pred : spill_block->predecessors()) {
3994 4232251 : const InstructionBlock* pred_block = code->InstructionBlockAt(pred);
3995 :
3996 3290207 : if (pred_block->IsDeferred()) {
3997 4696331 : worklist.push(pred_block->rpo_number().ToInt());
3998 : } else {
3999 : LifetimePosition pred_end =
4000 : LifetimePosition::InstructionFromInstructionIndex(
4001 : pred_block->last_instruction_index());
4002 :
4003 : LiveRangeBound* bound = array->Find(pred_end);
4004 :
4005 942045 : InstructionOperand pred_op = bound->range_->GetAssignedOperand();
4006 :
4007 : RpoNumber spill_block_number = spill_block->rpo_number();
4008 942030 : if (done_moves.find(std::make_pair(
4009 1884076 : spill_block_number, range->vreg())) == done_moves.end()) {
4010 : data()->AddGapMove(spill_block->first_instruction_index(),
4011 : Instruction::GapPosition::START, pred_op,
4012 942024 : spill_operand);
4013 1884074 : done_moves.insert(std::make_pair(spill_block_number, range->vreg()));
4014 : spill_block->mark_needs_frame();
4015 : }
4016 : }
4017 : }
4018 : }
4019 630567 : }
4020 :
4021 :
4022 : } // namespace compiler
4023 : } // namespace internal
4024 : } // namespace v8
|