Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/backend/register-allocator.h"
6 :
7 : #include <iomanip>
8 :
9 : #include "src/assembler-inl.h"
10 : #include "src/base/adapters.h"
11 : #include "src/base/small-vector.h"
12 : #include "src/compiler/linkage.h"
13 : #include "src/string-stream.h"
14 :
15 : namespace v8 {
16 : namespace internal {
17 : namespace compiler {
18 :
19 : #define TRACE(...) \
20 : do { \
21 : if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
22 : } while (false)
23 :
24 : namespace {
25 :
26 : static constexpr int kFloat32Bit =
27 : RepresentationBit(MachineRepresentation::kFloat32);
28 : static constexpr int kSimd128Bit =
29 : RepresentationBit(MachineRepresentation::kSimd128);
30 :
31 190137 : int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) {
32 : return kind == FP_REGISTERS ? cfg->num_double_registers()
33 2331304 : : cfg->num_general_registers();
34 : }
35 :
36 190110 : int GetAllocatableRegisterCount(const RegisterConfiguration* cfg,
37 : RegisterKind kind) {
38 : return kind == FP_REGISTERS ? cfg->num_allocatable_double_registers()
39 2331304 : : cfg->num_allocatable_general_registers();
40 : }
41 :
42 190057 : const int* GetAllocatableRegisterCodes(const RegisterConfiguration* cfg,
43 : RegisterKind kind) {
44 : return kind == FP_REGISTERS ? cfg->allocatable_double_codes()
45 2331304 : : cfg->allocatable_general_codes();
46 : }
47 :
48 431933 : const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence,
49 : const InstructionBlock* block) {
50 : RpoNumber index = block->loop_header();
51 4276226 : if (!index.IsValid()) return nullptr;
52 431933 : return sequence->InstructionBlockAt(index);
53 : }
54 :
55 : const InstructionBlock* GetInstructionBlock(const InstructionSequence* code,
56 : LifetimePosition pos) {
57 21881216 : return code->GetInstructionBlock(pos.ToInstructionIndex());
58 : }
59 :
60 3822266 : Instruction* GetLastInstruction(InstructionSequence* code,
61 3822266 : const InstructionBlock* block) {
62 3822269 : return code->InstructionAt(block->last_instruction_index());
63 : }
64 :
65 : // TODO(dcarney): fix frame to allow frame accesses to half size location.
66 4951807 : int GetByteWidth(MachineRepresentation rep) {
67 4951807 : switch (rep) {
68 : case MachineRepresentation::kBit:
69 : case MachineRepresentation::kWord8:
70 : case MachineRepresentation::kWord16:
71 : case MachineRepresentation::kWord32:
72 : case MachineRepresentation::kFloat32:
73 : return kSystemPointerSize;
74 : case MachineRepresentation::kTaggedSigned:
75 : case MachineRepresentation::kTaggedPointer:
76 : case MachineRepresentation::kTagged:
77 : // TODO(ishell): kTaggedSize once half size locations are supported.
78 : return kSystemPointerSize;
79 : case MachineRepresentation::kWord64:
80 : case MachineRepresentation::kFloat64:
81 : return kDoubleSize;
82 : case MachineRepresentation::kSimd128:
83 996 : return kSimd128Size;
84 : case MachineRepresentation::kNone:
85 : break;
86 : }
87 0 : UNREACHABLE();
88 : }
89 :
90 : } // namespace
91 :
92 : class LiveRangeBound {
93 : public:
94 36671771 : explicit LiveRangeBound(LiveRange* range, bool skip)
95 110015313 : : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
96 : DCHECK(!range->IsEmpty());
97 : }
98 :
99 : bool CanCover(LifetimePosition position) {
100 256545835 : return start_ <= position && position < end_;
101 : }
102 :
103 : LiveRange* const range_;
104 : const LifetimePosition start_;
105 : const LifetimePosition end_;
106 : const bool skip_;
107 :
108 : private:
109 : DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
110 : };
111 :
112 : struct FindResult {
113 : LiveRange* cur_cover_;
114 : LiveRange* pred_cover_;
115 : };
116 :
117 : class LiveRangeBoundArray {
118 : public:
119 80715007 : LiveRangeBoundArray() : length_(0), start_(nullptr) {}
120 :
121 : bool ShouldInitialize() { return start_ == nullptr; }
122 :
123 6798199 : void Initialize(Zone* zone, TopLevelLiveRange* range) {
124 6798199 : size_t max_child_count = range->GetMaxChildCount();
125 :
126 6798210 : start_ = zone->NewArray<LiveRangeBound>(max_child_count);
127 6798210 : length_ = 0;
128 : LiveRangeBound* curr = start_;
129 : // Normally, spilled ranges do not need connecting moves, because the spill
130 : // location has been assigned at definition. For ranges spilled in deferred
131 : // blocks, that is not the case, so we need to connect the spilled children.
132 43470027 : for (LiveRange *i = range; i != nullptr; i = i->next(), ++curr, ++length_) {
133 36671817 : new (curr) LiveRangeBound(i, i->spilled());
134 : }
135 6798210 : }
136 :
137 : LiveRangeBound* Find(const LifetimePosition position) const {
138 : size_t left_index = 0;
139 170182659 : size_t right_index = length_;
140 : while (true) {
141 580481674 : size_t current_index = left_index + (right_index - left_index) / 2;
142 : DCHECK(right_index > current_index);
143 580481674 : LiveRangeBound* bound = &start_[current_index];
144 580481674 : if (bound->start_ <= position) {
145 395848659 : if (position < bound->end_) return bound;
146 : DCHECK(left_index < current_index);
147 : left_index = current_index;
148 : } else {
149 : right_index = current_index;
150 : }
151 : }
152 : }
153 :
154 : LiveRangeBound* FindPred(const InstructionBlock* pred) {
155 : LifetimePosition pred_end =
156 : LifetimePosition::InstructionFromInstructionIndex(
157 : pred->last_instruction_index());
158 : return Find(pred_end);
159 : }
160 :
161 : LiveRangeBound* FindSucc(const InstructionBlock* succ) {
162 : LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
163 : succ->first_instruction_index());
164 : return Find(succ_start);
165 : }
166 :
167 257566542 : bool FindConnectableSubranges(const InstructionBlock* block,
168 128783271 : const InstructionBlock* pred,
169 : FindResult* result) const {
170 : LifetimePosition pred_end =
171 : LifetimePosition::InstructionFromInstructionIndex(
172 : pred->last_instruction_index());
173 : LiveRangeBound* bound = Find(pred_end);
174 128783271 : result->pred_cover_ = bound->range_;
175 : LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
176 : block->first_instruction_index());
177 :
178 128783271 : if (bound->CanCover(cur_start)) {
179 : // Both blocks are covered by the same range, so there is nothing to
180 : // connect.
181 : return false;
182 : }
183 : bound = Find(cur_start);
184 40065807 : if (bound->skip_) {
185 : return false;
186 : }
187 7393428 : result->cur_cover_ = bound->range_;
188 : DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
189 7393428 : return (result->cur_cover_ != result->pred_cover_);
190 : }
191 :
192 : private:
193 : size_t length_;
194 : LiveRangeBound* start_;
195 :
196 : DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
197 : };
198 :
199 : class LiveRangeFinder {
200 : public:
201 2141165 : explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
202 : : data_(data),
203 2141165 : bounds_length_(static_cast<int>(data_->live_ranges().size())),
204 2141165 : bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
205 6423934 : zone_(zone) {
206 82856579 : for (int i = 0; i < bounds_length_; ++i) {
207 80714975 : new (&bounds_[i]) LiveRangeBoundArray();
208 : }
209 2141604 : }
210 :
211 84849423 : LiveRangeBoundArray* ArrayFor(int operand_index) {
212 : DCHECK(operand_index < bounds_length_);
213 169698846 : TopLevelLiveRange* range = data_->live_ranges()[operand_index];
214 : DCHECK(range != nullptr && !range->IsEmpty());
215 84849423 : LiveRangeBoundArray* array = &bounds_[operand_index];
216 84849423 : if (array->ShouldInitialize()) {
217 6798204 : array->Initialize(zone_, range);
218 : }
219 84849428 : return array;
220 : }
221 :
222 : private:
223 : const RegisterAllocationData* const data_;
224 : const int bounds_length_;
225 : LiveRangeBoundArray* const bounds_;
226 : Zone* const zone_;
227 :
228 : DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
229 : };
230 :
231 : typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
232 :
233 : struct DelayedInsertionMapCompare {
234 : bool operator()(const DelayedInsertionMapKey& a,
235 : const DelayedInsertionMapKey& b) const {
236 450 : if (a.first == b.first) {
237 : return a.second.Compare(b.second);
238 : }
239 450 : return a.first < b.first;
240 : }
241 : };
242 :
243 : typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
244 : DelayedInsertionMapCompare>
245 : DelayedInsertionMap;
246 :
247 106796716 : UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
248 : void* hint, UsePositionHintType hint_type)
249 106796716 : : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) {
250 : DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone);
251 : bool register_beneficial = true;
252 : UsePositionType type = UsePositionType::kRegisterOrSlot;
253 211452548 : if (operand_ != nullptr && operand_->IsUnallocated()) {
254 : const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
255 104655957 : if (unalloc->HasRegisterPolicy()) {
256 : type = UsePositionType::kRequiresRegister;
257 64242016 : } else if (unalloc->HasSlotPolicy()) {
258 : type = UsePositionType::kRequiresSlot;
259 : register_beneficial = false;
260 49526048 : } else if (unalloc->HasRegisterOrSlotOrConstantPolicy()) {
261 : type = UsePositionType::kRegisterOrSlotOrConstant;
262 : register_beneficial = false;
263 : } else {
264 49525995 : register_beneficial = !unalloc->HasRegisterOrSlotPolicy();
265 : }
266 : }
267 213593432 : flags_ = TypeField::encode(type) | HintTypeField::encode(hint_type) |
268 106796716 : RegisterBeneficialField::encode(register_beneficial) |
269 106796716 : AssignedRegisterField::encode(kUnassignedRegister);
270 : DCHECK(pos_.IsValid());
271 106796716 : }
272 :
273 0 : bool UsePosition::HasHint() const {
274 : int hint_register;
275 106939248 : return HintRegister(&hint_register);
276 : }
277 :
278 382279929 : bool UsePosition::HintRegister(int* register_code) const {
279 382279929 : if (hint_ == nullptr) return false;
280 169493926 : switch (HintTypeField::decode(flags_)) {
281 : case UsePositionHintType::kNone:
282 : case UsePositionHintType::kUnresolved:
283 : return false;
284 : case UsePositionHintType::kUsePos: {
285 : UsePosition* use_pos = reinterpret_cast<UsePosition*>(hint_);
286 26358134 : int assigned_register = AssignedRegisterField::decode(use_pos->flags_);
287 26358134 : if (assigned_register == kUnassignedRegister) return false;
288 11785600 : *register_code = assigned_register;
289 11785600 : return true;
290 : }
291 : case UsePositionHintType::kOperand: {
292 : InstructionOperand* operand =
293 : reinterpret_cast<InstructionOperand*>(hint_);
294 47843930 : *register_code = LocationOperand::cast(operand)->register_code();
295 47843930 : return true;
296 : }
297 : case UsePositionHintType::kPhi: {
298 1533395 : RegisterAllocationData::PhiMapValue* phi =
299 : reinterpret_cast<RegisterAllocationData::PhiMapValue*>(hint_);
300 : int assigned_register = phi->assigned_register();
301 1533395 : if (assigned_register == kUnassignedRegister) return false;
302 251235 : *register_code = assigned_register;
303 251235 : return true;
304 : }
305 : }
306 0 : UNREACHABLE();
307 : }
308 :
309 47811650 : UsePositionHintType UsePosition::HintTypeForOperand(
310 : const InstructionOperand& op) {
311 47811650 : switch (op.kind()) {
312 : case InstructionOperand::CONSTANT:
313 : case InstructionOperand::IMMEDIATE:
314 : case InstructionOperand::EXPLICIT:
315 : return UsePositionHintType::kNone;
316 : case InstructionOperand::UNALLOCATED:
317 21830645 : return UsePositionHintType::kUnresolved;
318 : case InstructionOperand::ALLOCATED:
319 27326252 : if (op.IsRegister() || op.IsFPRegister()) {
320 : return UsePositionHintType::kOperand;
321 : } else {
322 : DCHECK(op.IsStackSlot() || op.IsFPStackSlot());
323 1207393 : return UsePositionHintType::kNone;
324 : }
325 : case InstructionOperand::INVALID:
326 : break;
327 : }
328 0 : UNREACHABLE();
329 : }
330 :
331 0 : void UsePosition::SetHint(UsePosition* use_pos) {
332 : DCHECK_NOT_NULL(use_pos);
333 11705159 : hint_ = use_pos;
334 23410318 : flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
335 0 : }
336 :
337 0 : void UsePosition::ResolveHint(UsePosition* use_pos) {
338 : DCHECK_NOT_NULL(use_pos);
339 15308184 : if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return;
340 7654092 : hint_ = use_pos;
341 7654092 : flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
342 : }
343 :
344 0 : void UsePosition::set_type(UsePositionType type, bool register_beneficial) {
345 : DCHECK_IMPLIES(type == UsePositionType::kRequiresSlot, !register_beneficial);
346 : DCHECK_EQ(kUnassignedRegister, AssignedRegisterField::decode(flags_));
347 19185180 : flags_ = TypeField::encode(type) |
348 19185180 : RegisterBeneficialField::encode(register_beneficial) |
349 19185180 : HintTypeField::encode(HintTypeField::decode(flags_)) |
350 19185180 : AssignedRegisterField::encode(kUnassignedRegister);
351 0 : }
352 :
353 43708546 : UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
354 : DCHECK(Contains(pos) && pos != start());
355 43708546 : UseInterval* after = new (zone) UseInterval(pos, end_);
356 43708551 : after->next_ = next_;
357 43708551 : next_ = nullptr;
358 43708551 : end_ = pos;
359 43708551 : return after;
360 : }
361 :
362 0 : void LifetimePosition::Print() const { StdoutStream{} << *this << std::endl; }
363 :
364 0 : std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) {
365 0 : os << '@' << pos.ToInstructionIndex();
366 0 : if (pos.IsGapPosition()) {
367 : os << 'g';
368 : } else {
369 : os << 'i';
370 : }
371 0 : if (pos.IsStart()) {
372 : os << 's';
373 : } else {
374 : os << 'e';
375 : }
376 0 : return os;
377 : }
378 :
379 0 : LiveRange::LiveRange(int relative_id, MachineRepresentation rep,
380 : TopLevelLiveRange* top_level)
381 : : relative_id_(relative_id),
382 : bits_(0),
383 : last_interval_(nullptr),
384 : first_interval_(nullptr),
385 : first_pos_(nullptr),
386 : top_level_(top_level),
387 : next_(nullptr),
388 : current_interval_(nullptr),
389 : last_processed_use_(nullptr),
390 : current_hint_position_(nullptr),
391 148926934 : splitting_pointer_(nullptr) {
392 : DCHECK(AllocatedOperand::IsSupportedRepresentation(rep));
393 148926934 : bits_ = AssignedRegisterField::encode(kUnassignedRegister) |
394 148926934 : RepresentationField::encode(rep);
395 0 : }
396 :
397 0 : void LiveRange::VerifyPositions() const {
398 : // Walk the positions, verifying that each is in an interval.
399 0 : UseInterval* interval = first_interval_;
400 0 : for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
401 0 : CHECK(Start() <= pos->pos());
402 0 : CHECK(pos->pos() <= End());
403 0 : CHECK_NOT_NULL(interval);
404 0 : while (!interval->Contains(pos->pos()) && interval->end() != pos->pos()) {
405 : interval = interval->next();
406 0 : CHECK_NOT_NULL(interval);
407 : }
408 : }
409 0 : }
410 :
411 0 : void LiveRange::VerifyIntervals() const {
412 : DCHECK(first_interval()->start() == Start());
413 : LifetimePosition last_end = first_interval()->end();
414 0 : for (UseInterval* interval = first_interval()->next(); interval != nullptr;
415 : interval = interval->next()) {
416 : DCHECK(last_end <= interval->start());
417 : last_end = interval->end();
418 : }
419 : DCHECK(last_end == End());
420 0 : }
421 :
422 0 : void LiveRange::set_assigned_register(int reg) {
423 : DCHECK(!HasRegisterAssigned() && !spilled());
424 161909151 : bits_ = AssignedRegisterField::update(bits_, reg);
425 0 : }
426 :
427 0 : void LiveRange::UnsetAssignedRegister() {
428 : DCHECK(HasRegisterAssigned() && !spilled());
429 0 : bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
430 0 : }
431 :
432 0 : void LiveRange::AttachToNext() {
433 : DCHECK_NOT_NULL(next_);
434 : DCHECK_NE(TopLevel()->last_child_covers_, next_);
435 0 : last_interval_->set_next(next_->first_interval());
436 0 : next_->first_interval_ = nullptr;
437 0 : last_interval_ = next_->last_interval_;
438 0 : next_->last_interval_ = nullptr;
439 0 : if (first_pos() == nullptr) {
440 0 : first_pos_ = next_->first_pos();
441 : } else {
442 0 : UsePosition* ptr = first_pos_;
443 0 : while (ptr->next() != nullptr) {
444 : ptr = ptr->next();
445 : }
446 0 : ptr->set_next(next_->first_pos());
447 : }
448 0 : next_->first_pos_ = nullptr;
449 0 : LiveRange* old_next = next_;
450 0 : next_ = next_->next_;
451 0 : old_next->next_ = nullptr;
452 0 : }
453 :
454 0 : void LiveRange::Unspill() {
455 : DCHECK(spilled());
456 : set_spilled(false);
457 0 : bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
458 0 : }
459 :
460 0 : void LiveRange::Spill() {
461 : DCHECK(!spilled());
462 : DCHECK(!TopLevel()->HasNoSpillType());
463 : set_spilled(true);
464 24669682 : bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
465 0 : }
466 :
467 120460812 : RegisterKind LiveRange::kind() const {
468 120460812 : return IsFloatingPoint(representation()) ? FP_REGISTERS : GENERAL_REGISTERS;
469 : }
470 :
471 76357373 : UsePosition* LiveRange::FirstHintPosition(int* register_index) const {
472 313345711 : for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
473 271644948 : if (pos->HintRegister(register_index)) return pos;
474 : }
475 : return nullptr;
476 : }
477 :
478 116553548 : UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
479 777283177 : UsePosition* use_pos = last_processed_use_;
480 59532809 : if (use_pos == nullptr || use_pos->pos() > start) {
481 : use_pos = first_pos();
482 : }
483 777283177 : while (use_pos != nullptr && use_pos->pos() < start) {
484 : use_pos = use_pos->next();
485 : }
486 59532809 : last_processed_use_ = use_pos;
487 59532809 : return use_pos;
488 : }
489 :
490 29123143 : UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
491 : LifetimePosition start) const {
492 80953052 : UsePosition* pos = NextUsePosition(start);
493 110076223 : while (pos != nullptr && !pos->RegisterIsBeneficial()) {
494 : pos = pos->next();
495 : }
496 29123157 : return pos;
497 : }
498 :
499 14538805 : LifetimePosition LiveRange::NextLifetimePositionRegisterIsBeneficial(
500 1692920 : const LifetimePosition& start) const {
501 14538805 : UsePosition* next_use = NextUsePositionRegisterIsBeneficial(start);
502 14538804 : if (next_use == nullptr) return End();
503 : return next_use->pos();
504 : }
505 :
506 0 : UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
507 62803 : LifetimePosition start) const {
508 536036 : UsePosition* pos = first_pos();
509 : UsePosition* prev = nullptr;
510 330821 : while (pos != nullptr && pos->pos() < start) {
511 268018 : if (pos->RegisterIsBeneficial()) prev = pos;
512 : pos = pos->next();
513 : }
514 0 : return prev;
515 : }
516 :
517 27617738 : UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const {
518 78678211 : UsePosition* pos = NextUsePosition(start);
519 106295965 : while (pos != nullptr && pos->type() != UsePositionType::kRequiresRegister) {
520 : pos = pos->next();
521 : }
522 27617746 : return pos;
523 : }
524 :
525 2250047 : UsePosition* LiveRange::NextSlotPosition(LifetimePosition start) const {
526 7857144 : for (UsePosition* pos = NextUsePosition(start); pos != nullptr;
527 : pos = pos->next()) {
528 5607713 : if (pos->type() != UsePositionType::kRequiresSlot) continue;
529 : return pos;
530 : }
531 : return nullptr;
532 : }
533 :
534 15594287 : bool LiveRange::CanBeSpilled(LifetimePosition pos) const {
535 : // We cannot spill a live range that has a use requiring a register
536 : // at the current or the immediate next position.
537 15594287 : UsePosition* use_pos = NextRegisterPosition(pos);
538 15594287 : if (use_pos == nullptr) return true;
539 : return use_pos->pos() > pos.NextStart().End();
540 : }
541 :
542 84542153 : bool LiveRange::IsTopLevel() const { return top_level_ == this; }
543 :
544 196144324 : InstructionOperand LiveRange::GetAssignedOperand() const {
545 : DCHECK(!IsEmpty());
546 136955064 : if (HasRegisterAssigned()) {
547 : DCHECK(!spilled());
548 : return AllocatedOperand(LocationOperand::REGISTER, representation(),
549 77765804 : assigned_register());
550 : }
551 : DCHECK(spilled());
552 : DCHECK(!HasRegisterAssigned());
553 59189260 : if (TopLevel()->HasSpillOperand()) {
554 39686880 : InstructionOperand* op = TopLevel()->GetSpillOperand();
555 : DCHECK(!op->IsUnallocated());
556 39686880 : return *op;
557 : }
558 19502380 : return TopLevel()->GetSpillRangeOperand();
559 : }
560 :
561 0 : UseInterval* LiveRange::FirstSearchIntervalForPosition(
562 : LifetimePosition position) const {
563 905578960 : if (current_interval_ == nullptr) return first_interval_;
564 666574518 : if (current_interval_->start() > position) {
565 2600543 : current_interval_ = nullptr;
566 2140305 : return first_interval_;
567 : }
568 : return current_interval_;
569 : }
570 :
571 0 : void LiveRange::AdvanceLastProcessedMarker(
572 : UseInterval* to_start_of, LifetimePosition but_not_past) const {
573 540132851 : if (to_start_of == nullptr) return;
574 540136042 : if (to_start_of->start() > but_not_past) return;
575 272881661 : LifetimePosition start = current_interval_ == nullptr
576 : ? LifetimePosition::Invalid()
577 272881661 : : current_interval_->start();
578 272881661 : if (to_start_of->start() > start) {
579 104526632 : current_interval_ = to_start_of;
580 : }
581 : }
582 :
583 159721942 : LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
584 : int new_id = TopLevel()->GetNextChildId();
585 : LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel());
586 39930562 : child->set_bundle(bundle_);
587 : // If we split, we do so because we're about to switch registers or move
588 : // to/from a slot, so there's no value in connecting hints.
589 39930562 : DetachAt(position, child, zone, DoNotConnectHints);
590 :
591 39930454 : child->top_level_ = TopLevel();
592 39930454 : child->next_ = next_;
593 39930454 : next_ = child;
594 39930454 : return child;
595 : }
596 :
597 65372839 : UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
598 : Zone* zone,
599 54987739 : HintConnectionOption connect_hints) {
600 : DCHECK(Start() < position);
601 : DCHECK(End() > position);
602 : DCHECK(result->IsEmpty());
603 : // Find the last interval that ends before the position. If the
604 : // position is contained in one of the intervals in the chain, we
605 : // split that interval and use the first part.
606 37806661 : UseInterval* current = FirstSearchIntervalForPosition(position);
607 :
608 : // If the split position coincides with the beginning of a use interval
609 : // we need to split use positons in a special way.
610 : bool split_at_start = false;
611 :
612 65372839 : if (current->start() == position) {
613 : // When splitting at start we need to locate the previous use interval.
614 1094 : current = first_interval_;
615 : }
616 :
617 : UseInterval* after = nullptr;
618 81514242 : while (current != nullptr) {
619 81514173 : if (current->Contains(position)) {
620 43707512 : after = current->SplitAt(position, zone);
621 43708593 : break;
622 : }
623 : UseInterval* next = current->next();
624 37806661 : if (next->start() >= position) {
625 : split_at_start = (next->start() == position);
626 : after = next;
627 : current->set_next(nullptr);
628 : break;
629 : }
630 : current = next;
631 : }
632 : DCHECK_NOT_NULL(after);
633 :
634 : // Partition original use intervals to the two live ranges.
635 : UseInterval* before = current;
636 : result->last_interval_ =
637 65373920 : (last_interval_ == before)
638 : ? after // Only interval in the range after split.
639 65373920 : : last_interval_; // Last interval of the original range.
640 65373920 : result->first_interval_ = after;
641 65373920 : last_interval_ = before;
642 :
643 : // Find the last use position before the split and the first use
644 : // position after it.
645 52210359 : UsePosition* use_after =
646 77056949 : splitting_pointer_ == nullptr || splitting_pointer_->pos() > position
647 : ? first_pos()
648 120361659 : : splitting_pointer_;
649 : UsePosition* use_before = nullptr;
650 65373920 : if (split_at_start) {
651 : // The split position coincides with the beginning of a use interval (the
652 : // end of a lifetime hole). Use at this position should be attributed to
653 : // the split child because split child owns use interval covering it.
654 6393931 : while (use_after != nullptr && use_after->pos() < position) {
655 : use_before = use_after;
656 : use_after = use_after->next();
657 : }
658 : } else {
659 111190348 : while (use_after != nullptr && use_after->pos() <= position) {
660 : use_before = use_after;
661 : use_after = use_after->next();
662 : }
663 : }
664 :
665 : // Partition original use positions to the two live ranges.
666 65373920 : if (use_before != nullptr) {
667 : use_before->set_next(nullptr);
668 : } else {
669 39137814 : first_pos_ = nullptr;
670 : }
671 65373920 : result->first_pos_ = use_after;
672 :
673 : // Discard cached iteration state. It might be pointing
674 : // to the use that no longer belongs to this live range.
675 65373920 : last_processed_use_ = nullptr;
676 65373920 : current_interval_ = nullptr;
677 :
678 78944339 : if (connect_hints == ConnectHints && use_before != nullptr &&
679 13570419 : use_after != nullptr) {
680 : use_after->SetHint(use_before);
681 : }
682 : #ifdef DEBUG
683 : VerifyChildStructure();
684 : result->VerifyChildStructure();
685 : #endif
686 65373920 : return use_before;
687 : }
688 :
689 0 : void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
690 35061747 : LiveRange* child = this;
691 40431158 : for (; child != nullptr; child = child->next()) {
692 35061747 : child->top_level_ = new_top_level;
693 : }
694 0 : }
695 :
696 81210161 : void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
697 81210161 : const InstructionOperand& spill_op) {
698 290796918 : for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
699 : DCHECK(Start() <= pos->pos() && pos->pos() <= End());
700 104928951 : if (!pos->HasOperand()) continue;
701 104657806 : switch (pos->type()) {
702 : case UsePositionType::kRequiresSlot:
703 : DCHECK(spill_op.IsStackSlot() || spill_op.IsFPStackSlot());
704 : InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
705 : break;
706 : case UsePositionType::kRequiresRegister:
707 : DCHECK(op.IsRegister() || op.IsFPRegister());
708 : V8_FALLTHROUGH;
709 : case UsePositionType::kRegisterOrSlot:
710 : case UsePositionType::kRegisterOrSlotOrConstant:
711 : InstructionOperand::ReplaceWith(pos->operand(), &op);
712 : break;
713 : }
714 : }
715 81210161 : }
716 :
717 : // This implements an ordering on live ranges so that they are ordered by their
718 : // start positions. This is needed for the correctness of the register
719 : // allocation algorithm. If two live ranges start at the same offset then there
720 : // is a tie breaker based on where the value is first used. This part of the
721 : // ordering is merely a heuristic.
722 427349800 : bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
723 : LifetimePosition start = Start();
724 : LifetimePosition other_start = other->Start();
725 369847015 : if (start == other_start) {
726 : UsePosition* pos = first_pos();
727 : UsePosition* other_pos = other->first_pos();
728 : // To make the order total, handle the case where both positions are null.
729 43809771 : if (pos == other_pos) return TopLevel()->vreg() < other->TopLevel()->vreg();
730 13520787 : if (pos == nullptr) return false;
731 13194654 : if (other_pos == nullptr) return true;
732 : // To make the order total, handle the case where both positions are equal.
733 12805114 : if (pos->pos() == other_pos->pos())
734 13693014 : return TopLevel()->vreg() < other->TopLevel()->vreg();
735 : return pos->pos() < other_pos->pos();
736 : }
737 346229900 : return start < other_start;
738 : }
739 :
740 39254638 : void LiveRange::SetUseHints(int register_index) {
741 213432466 : for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
742 87223894 : if (!pos->HasOperand()) continue;
743 86953934 : switch (pos->type()) {
744 : case UsePositionType::kRequiresSlot:
745 : break;
746 : case UsePositionType::kRequiresRegister:
747 : case UsePositionType::kRegisterOrSlot:
748 : case UsePositionType::kRegisterOrSlotOrConstant:
749 : pos->set_assigned_register(register_index);
750 : break;
751 : }
752 : }
753 39254638 : }
754 :
755 234891494 : bool LiveRange::CanCover(LifetimePosition position) const {
756 257386124 : if (IsEmpty()) return false;
757 492278006 : return Start() <= position && position < End();
758 : }
759 :
760 256708946 : bool LiveRange::Covers(LifetimePosition position) const {
761 256708946 : if (!CanCover(position)) return false;
762 : UseInterval* start_search = FirstSearchIntervalForPosition(position);
763 368414011 : for (UseInterval* interval = start_search; interval != nullptr;
764 : interval = interval->next()) {
765 : DCHECK(interval->next() == nullptr ||
766 : interval->next()->start() >= interval->start());
767 : AdvanceLastProcessedMarker(interval, position);
768 368411281 : if (interval->Contains(position)) return true;
769 259224582 : if (interval->start() > position) return false;
770 : }
771 : return false;
772 : }
773 :
774 0 : LifetimePosition LiveRange::NextEndAfter(LifetimePosition position) const {
775 0 : UseInterval* start_search = FirstSearchIntervalForPosition(position);
776 111985331 : while (start_search->end() < position) {
777 : start_search = start_search->next();
778 : }
779 0 : return start_search->end();
780 : }
781 :
782 0 : LifetimePosition LiveRange::NextStartAfter(LifetimePosition position) const {
783 92717955 : UseInterval* start_search = FirstSearchIntervalForPosition(position);
784 228943469 : while (start_search->start() < position) {
785 : start_search = start_search->next();
786 : }
787 0 : return start_search->start();
788 : }
789 :
790 2046332997 : LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
791 130596345 : UseInterval* b = other->first_interval();
792 387275997 : if (b == nullptr) return LifetimePosition::Invalid();
793 : LifetimePosition advance_last_processed_up_to = b->start();
794 399805540 : UseInterval* a = FirstSearchIntervalForPosition(b->start());
795 1076869909 : while (a != nullptr && b != nullptr) {
796 648372366 : if (a->start() > other->End()) break;
797 611111722 : if (b->start() > End()) break;
798 606072222 : LifetimePosition cur_intersection = a->Intersect(b);
799 606093248 : if (cur_intersection.IsValid()) {
800 75691363 : return cur_intersection;
801 : }
802 530401885 : if (a->start() < b->start()) {
803 : a = a->next();
804 799378452 : if (a == nullptr || a->start() > other->End()) break;
805 : AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
806 : } else {
807 : b = b->next();
808 : }
809 : }
810 : return LifetimePosition::Invalid();
811 : }
812 :
813 0 : void LiveRange::Print(const RegisterConfiguration* config,
814 : bool with_children) const {
815 0 : StdoutStream os;
816 : PrintableLiveRange wrapper;
817 0 : wrapper.register_configuration_ = config;
818 0 : for (const LiveRange* i = this; i != nullptr; i = i->next()) {
819 0 : wrapper.range_ = i;
820 0 : os << wrapper << std::endl;
821 0 : if (!with_children) break;
822 0 : }
823 0 : }
824 :
825 0 : void LiveRange::Print(bool with_children) const {
826 0 : Print(RegisterConfiguration::Default(), with_children);
827 0 : }
828 :
829 0 : bool LiveRange::RegisterFromBundle(int* hint) const {
830 44826003 : if (bundle_ == nullptr || bundle_->reg() == kUnassignedRegister) return false;
831 2577350 : *hint = bundle_->reg();
832 0 : return true;
833 : }
834 :
835 0 : void LiveRange::UpdateBundleRegister(int reg) const {
836 39254677 : if (bundle_ == nullptr || bundle_->reg() != kUnassignedRegister) return;
837 : bundle_->set_reg(reg);
838 : }
839 :
840 : struct TopLevelLiveRange::SpillMoveInsertionList : ZoneObject {
841 : SpillMoveInsertionList(int gap_index, InstructionOperand* operand,
842 : SpillMoveInsertionList* next)
843 23592029 : : gap_index(gap_index), operand(operand), next(next) {}
844 : const int gap_index;
845 : InstructionOperand* const operand;
846 : SpillMoveInsertionList* const next;
847 : };
848 :
849 71 : TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineRepresentation rep)
850 : : LiveRange(0, rep, this),
851 : vreg_(vreg),
852 : last_child_id_(0),
853 : splintered_from_(nullptr),
854 : spill_operand_(nullptr),
855 : spill_move_insertion_locations_(nullptr),
856 : spilled_in_deferred_blocks_(false),
857 : spill_start_index_(kMaxInt),
858 : last_pos_(nullptr),
859 : last_child_covers_(this),
860 : splinter_(nullptr),
861 97123160 : has_preassigned_slot_(false) {
862 : bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
863 71 : }
864 :
865 : #if DEBUG
866 : int TopLevelLiveRange::debug_virt_reg() const {
867 : return IsSplinter() ? splintered_from()->vreg() : vreg();
868 : }
869 : #endif
870 :
871 0 : void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index,
872 : InstructionOperand* operand) {
873 : DCHECK(HasNoSpillType());
874 : spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList(
875 47184058 : gap_index, operand, spill_move_insertion_locations_);
876 0 : }
877 :
878 17357557 : void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
879 : const InstructionOperand& op,
880 23836083 : bool might_be_duplicated) {
881 : DCHECK_IMPLIES(op.IsConstant(), GetSpillMoveInsertionLocations() == nullptr);
882 : Zone* zone = sequence->zone();
883 :
884 21097741 : for (SpillMoveInsertionList* to_spill = GetSpillMoveInsertionLocations();
885 : to_spill != nullptr; to_spill = to_spill->next) {
886 3740132 : Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
887 : ParallelMove* move =
888 3740154 : instr->GetOrCreateParallelMove(Instruction::START, zone);
889 : // Skip insertion if it's possible that the move exists already as a
890 : // constraint move from a fixed output register to a slot.
891 6478552 : if (might_be_duplicated || has_preassigned_slot()) {
892 : bool found = false;
893 2235113 : for (MoveOperands* move_op : *move) {
894 829408 : if (move_op->IsEliminated()) continue;
895 2480145 : if (move_op->source().Equals(*to_spill->operand) &&
896 : move_op->destination().Equals(op)) {
897 : found = true;
898 732289 : if (has_preassigned_slot()) move_op->Eliminate();
899 : break;
900 : }
901 : }
902 1068997 : if (found) continue;
903 : }
904 3007844 : if (!has_preassigned_slot()) {
905 2940581 : move->AddMove(*to_spill->operand, op);
906 : }
907 : }
908 17357609 : }
909 :
910 0 : void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) {
911 : DCHECK(HasNoSpillType());
912 : DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
913 : set_spill_type(SpillType::kSpillOperand);
914 13620517 : spill_operand_ = operand;
915 0 : }
916 :
917 0 : void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
918 : DCHECK(!HasSpillOperand());
919 : DCHECK(spill_range);
920 9808529 : spill_range_ = spill_range;
921 0 : }
922 :
923 27774583 : AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
924 27774583 : SpillRange* spill_range = GetSpillRange();
925 : int index = spill_range->assigned_slot();
926 879124 : return AllocatedOperand(LocationOperand::STACK_SLOT, representation(), index);
927 : }
928 :
929 13570439 : void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end,
930 57206100 : Zone* zone) {
931 : DCHECK(start != Start() || end != End());
932 : DCHECK(start < end);
933 :
934 39014090 : TopLevelLiveRange splinter_temp(-1, representation());
935 : UsePosition* last_in_splinter = nullptr;
936 : // Live ranges defined in deferred blocks stay in deferred blocks, so we
937 : // don't need to splinter them. That means that start should always be
938 : // after the beginning of the range.
939 : DCHECK(start > Start());
940 :
941 13570439 : if (end >= End()) {
942 : DCHECK(start > Start());
943 1697203 : DetachAt(start, &splinter_temp, zone, ConnectHints);
944 1697254 : next_ = nullptr;
945 : } else {
946 : DCHECK(start < End() && Start() < end);
947 :
948 : const int kInvalidId = std::numeric_limits<int>::max();
949 :
950 11873236 : UsePosition* last = DetachAt(start, &splinter_temp, zone, ConnectHints);
951 :
952 : LiveRange end_part(kInvalidId, this->representation(), nullptr);
953 : // The last chunk exits the deferred region, and we don't want to connect
954 : // hints here, because the non-deferred region shouldn't be affected
955 : // by allocation decisions on the deferred path.
956 : last_in_splinter =
957 11873212 : splinter_temp.DetachAt(end, &end_part, zone, DoNotConnectHints);
958 :
959 11873210 : next_ = end_part.next_;
960 11873210 : last_interval_->set_next(end_part.first_interval_);
961 : // The next splinter will happen either at or after the current interval.
962 : // We can optimize DetachAt by setting current_interval_ accordingly,
963 : // which will then be picked up by FirstSearchIntervalForPosition.
964 11873210 : current_interval_ = last_interval_;
965 11873210 : last_interval_ = end_part.last_interval_;
966 :
967 11873210 : if (first_pos_ == nullptr) {
968 1025855 : first_pos_ = end_part.first_pos_;
969 : } else {
970 10847355 : splitting_pointer_ = last;
971 10847355 : if (last != nullptr) last->set_next(end_part.first_pos_);
972 : }
973 : }
974 :
975 13570464 : if (splinter()->IsEmpty()) {
976 5369438 : splinter()->first_interval_ = splinter_temp.first_interval_;
977 5369438 : splinter()->last_interval_ = splinter_temp.last_interval_;
978 : } else {
979 8201026 : splinter()->last_interval_->set_next(splinter_temp.first_interval_);
980 8201026 : splinter()->last_interval_ = splinter_temp.last_interval_;
981 : }
982 13570464 : if (splinter()->first_pos() == nullptr) {
983 10065008 : splinter()->first_pos_ = splinter_temp.first_pos_;
984 : } else {
985 3505456 : splinter()->last_pos_->set_next(splinter_temp.first_pos_);
986 : }
987 13570464 : if (last_in_splinter != nullptr) {
988 2917301 : splinter()->last_pos_ = last_in_splinter;
989 : } else {
990 14431243 : if (splinter()->first_pos() != nullptr &&
991 3778080 : splinter()->last_pos_ == nullptr) {
992 1396641 : splinter()->last_pos_ = splinter()->first_pos();
993 2924244 : for (UsePosition* pos = splinter()->first_pos(); pos != nullptr;
994 : pos = pos->next()) {
995 1527603 : splinter()->last_pos_ = pos;
996 : }
997 : }
998 : }
999 : #if DEBUG
1000 : Verify();
1001 : splinter()->Verify();
1002 : #endif
1003 13570464 : }
1004 :
1005 5369384 : void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) {
1006 5369384 : splintered_from_ = splinter_parent;
1007 5369384 : if (!HasSpillOperand() && splinter_parent->spill_range_ != nullptr) {
1008 3010362 : SetSpillRange(splinter_parent->spill_range_);
1009 : }
1010 5369384 : }
1011 :
1012 5984211 : void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) {
1013 : DCHECK(merged->TopLevel() == this);
1014 :
1015 6319622 : if (HasNoSpillType() && merged->HasSpillRange()) {
1016 : set_spill_type(merged->spill_type());
1017 : DCHECK_LT(0, GetSpillRange()->live_ranges().size());
1018 614799 : merged->spill_range_ = nullptr;
1019 : merged->bits_ =
1020 1229598 : SpillTypeField::update(merged->bits_, SpillType::kNoSpillType);
1021 : }
1022 5369412 : }
1023 :
1024 9873339 : void TopLevelLiveRange::Merge(TopLevelLiveRange* other, Zone* zone) {
1025 : DCHECK(Start() < other->Start());
1026 : DCHECK(other->splintered_from() == this);
1027 :
1028 62504917 : LiveRange* first = this;
1029 17287249 : LiveRange* second = other;
1030 : DCHECK(first->Start() < second->Start());
1031 55359667 : while (first != nullptr && second != nullptr) {
1032 : DCHECK(first != second);
1033 : // Make sure the ranges are in order each time we iterate.
1034 44620902 : if (second->Start() < first->Start()) {
1035 : LiveRange* tmp = second;
1036 : second = first;
1037 : first = tmp;
1038 : continue;
1039 : }
1040 :
1041 27309898 : if (first->End() <= second->Start()) {
1042 14675888 : if (first->next() == nullptr ||
1043 : first->next()->Start() > second->Start()) {
1044 : // First is in order before second.
1045 : LiveRange* temp = first->next();
1046 5393285 : first->next_ = second;
1047 : first = temp;
1048 : } else {
1049 : // First is in order before its successor (or second), so advance first.
1050 : first = first->next();
1051 : }
1052 : continue;
1053 : }
1054 :
1055 : DCHECK(first->Start() < second->Start());
1056 : // If first and second intersect, split first.
1057 17287249 : if (first->Start() < second->End() && second->Start() < first->End()) {
1058 17287200 : LiveRange* temp = first->SplitAt(second->Start(), zone);
1059 17287257 : CHECK(temp != first);
1060 : temp->set_spilled(first->spilled());
1061 17287257 : if (!temp->spilled())
1062 : temp->set_assigned_register(first->assigned_register());
1063 :
1064 17287257 : first->next_ = second;
1065 : first = temp;
1066 17287257 : continue;
1067 : }
1068 : DCHECK(first->End() <= second->Start());
1069 : }
1070 :
1071 16108232 : TopLevel()->UpdateParentForAllChildren(TopLevel());
1072 5369411 : TopLevel()->UpdateSpillRangePostMerge(other);
1073 15242805 : TopLevel()->set_has_slot_use(TopLevel()->has_slot_use() ||
1074 : other->has_slot_use());
1075 :
1076 : #if DEBUG
1077 : Verify();
1078 : #endif
1079 5369410 : }
1080 :
1081 0 : void TopLevelLiveRange::VerifyChildrenInOrder() const {
1082 0 : LifetimePosition last_end = End();
1083 0 : for (const LiveRange* child = this->next(); child != nullptr;
1084 : child = child->next()) {
1085 : DCHECK(last_end <= child->Start());
1086 : last_end = child->End();
1087 : }
1088 0 : }
1089 :
1090 0 : LiveRange* TopLevelLiveRange::GetChildCovers(LifetimePosition pos) {
1091 0 : LiveRange* child = last_child_covers_;
1092 0 : while (child != nullptr && child->End() <= pos) {
1093 : child = child->next();
1094 : }
1095 0 : last_child_covers_ = child;
1096 0 : return !child || !child->Covers(pos) ? nullptr : child;
1097 : }
1098 :
1099 0 : void TopLevelLiveRange::Verify() const {
1100 : VerifyChildrenInOrder();
1101 0 : for (const LiveRange* child = this; child != nullptr; child = child->next()) {
1102 : VerifyChildStructure();
1103 : }
1104 0 : }
1105 :
1106 63224635 : void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
1107 63224635 : TRACE("Shorten live range %d to [%d\n", vreg(), start.value());
1108 : DCHECK_NOT_NULL(first_interval_);
1109 : DCHECK(first_interval_->start() <= start);
1110 : DCHECK(start < first_interval_->end());
1111 63224635 : first_interval_->set_start(start);
1112 63224635 : }
1113 :
1114 2274845 : void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
1115 0 : LifetimePosition end, Zone* zone) {
1116 2274845 : TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
1117 : end.value());
1118 : LifetimePosition new_end = end;
1119 8805455 : while (first_interval_ != nullptr && first_interval_->start() <= end) {
1120 4255755 : if (first_interval_->end() > end) {
1121 : new_end = first_interval_->end();
1122 : }
1123 4255755 : first_interval_ = first_interval_->next();
1124 : }
1125 :
1126 4549700 : UseInterval* new_interval = new (zone) UseInterval(start, new_end);
1127 2274850 : new_interval->set_next(first_interval_);
1128 2274850 : first_interval_ = new_interval;
1129 2274850 : if (new_interval->next() == nullptr) {
1130 898765 : last_interval_ = new_interval;
1131 : }
1132 2274850 : }
1133 :
1134 388037619 : void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
1135 0 : LifetimePosition end, Zone* zone) {
1136 388037619 : TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(),
1137 : end.value());
1138 388047395 : if (first_interval_ == nullptr) {
1139 76352230 : UseInterval* interval = new (zone) UseInterval(start, end);
1140 76352147 : first_interval_ = interval;
1141 76352147 : last_interval_ = interval;
1142 : } else {
1143 311695165 : if (end == first_interval_->start()) {
1144 : first_interval_->set_start(start);
1145 191851704 : } else if (end < first_interval_->start()) {
1146 129660976 : UseInterval* interval = new (zone) UseInterval(start, end);
1147 129660834 : interval->set_next(first_interval_);
1148 129660834 : first_interval_ = interval;
1149 : } else {
1150 : // Order of instruction's processing (see ProcessInstructions) guarantees
1151 : // that each new use interval either precedes, intersects with or touches
1152 : // the last added interval.
1153 : DCHECK(start <= first_interval_->end());
1154 : first_interval_->set_start(Min(start, first_interval_->start()));
1155 62190728 : first_interval_->set_end(Max(end, first_interval_->end()));
1156 : }
1157 : }
1158 388047170 : }
1159 :
1160 106796282 : void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
1161 : LifetimePosition pos = use_pos->pos();
1162 106796282 : TRACE("Add to live range %d use position %d\n", vreg(), pos.value());
1163 : UsePosition* prev_hint = nullptr;
1164 142369 : UsePosition* prev = nullptr;
1165 106939230 : UsePosition* current = first_pos_;
1166 213736093 : while (current != nullptr && current->pos() < pos) {
1167 142367 : prev_hint = current->HasHint() ? current : prev_hint;
1168 : prev = current;
1169 : current = current->next();
1170 : }
1171 :
1172 106796862 : if (prev == nullptr) {
1173 106654493 : use_pos->set_next(first_pos_);
1174 106654493 : first_pos_ = use_pos;
1175 : } else {
1176 : use_pos->set_next(prev->next());
1177 : prev->set_next(use_pos);
1178 : }
1179 :
1180 213593827 : if (prev_hint == nullptr && use_pos->HasHint()) {
1181 24773992 : current_hint_position_ = use_pos;
1182 : }
1183 106796947 : }
1184 :
1185 312595883 : static bool AreUseIntervalsIntersecting(UseInterval* interval1,
1186 10809541 : UseInterval* interval2) {
1187 580264740 : while (interval1 != nullptr && interval2 != nullptr) {
1188 323197625 : if (interval1->start() < interval2->start()) {
1189 101052475 : if (interval1->end() > interval2->start()) {
1190 : return true;
1191 : }
1192 : interval1 = interval1->next();
1193 : } else {
1194 222145150 : if (interval2->end() > interval1->start()) {
1195 : return true;
1196 : }
1197 : interval2 = interval2->next();
1198 : }
1199 : }
1200 : return false;
1201 : }
1202 :
1203 0 : std::ostream& operator<<(std::ostream& os,
1204 : const PrintableLiveRange& printable_range) {
1205 0 : const LiveRange* range = printable_range.range_;
1206 0 : os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id()
1207 0 : << " ";
1208 0 : if (range->TopLevel()->is_phi()) os << "phi ";
1209 0 : if (range->TopLevel()->is_non_loop_phi()) os << "nlphi ";
1210 :
1211 0 : os << "{" << std::endl;
1212 0 : UseInterval* interval = range->first_interval();
1213 0 : UsePosition* use_pos = range->first_pos();
1214 0 : while (use_pos != nullptr) {
1215 0 : if (use_pos->HasOperand()) {
1216 0 : os << *use_pos->operand() << use_pos->pos() << " ";
1217 : }
1218 : use_pos = use_pos->next();
1219 : }
1220 : os << std::endl;
1221 :
1222 0 : while (interval != nullptr) {
1223 0 : os << '[' << interval->start() << ", " << interval->end() << ')'
1224 : << std::endl;
1225 : interval = interval->next();
1226 : }
1227 0 : os << "}";
1228 0 : return os;
1229 : }
1230 :
1231 : namespace {
1232 0 : void PrintBlockRow(std::ostream& os, const InstructionBlocks& blocks) {
1233 0 : os << " ";
1234 0 : for (auto block : blocks) {
1235 : LifetimePosition start_pos = LifetimePosition::GapFromInstructionIndex(
1236 : block->first_instruction_index());
1237 : LifetimePosition end_pos = LifetimePosition::GapFromInstructionIndex(
1238 : block->last_instruction_index())
1239 : .NextFullStart();
1240 0 : int length = end_pos.value() - start_pos.value();
1241 0 : constexpr int kMaxPrefixLength = 32;
1242 : char buffer[kMaxPrefixLength];
1243 : int rpo_number = block->rpo_number().ToInt();
1244 0 : const char* deferred_marker = block->IsDeferred() ? "(deferred)" : "";
1245 0 : int max_prefix_length = std::min(length, kMaxPrefixLength);
1246 : int prefix = snprintf(buffer, max_prefix_length, "[-B%d-%s", rpo_number,
1247 0 : deferred_marker);
1248 0 : os << buffer;
1249 0 : int remaining = length - std::min(prefix, max_prefix_length) - 1;
1250 0 : for (int i = 0; i < remaining; ++i) os << '-';
1251 : os << ']';
1252 : }
1253 : os << '\n';
1254 0 : }
1255 : } // namespace
1256 :
1257 0 : void LinearScanAllocator::PrintRangeRow(std::ostream& os,
1258 0 : const TopLevelLiveRange* toplevel) {
1259 : int position = 0;
1260 0 : os << std::setw(3) << toplevel->vreg()
1261 0 : << (toplevel->IsSplinter() ? "s:" : ": ");
1262 :
1263 0 : for (const LiveRange* range = toplevel; range != nullptr;
1264 : range = range->next()) {
1265 0 : for (UseInterval* interval = range->first_interval(); interval != nullptr;
1266 : interval = interval->next()) {
1267 : LifetimePosition start = interval->start();
1268 : LifetimePosition end = interval->end();
1269 0 : CHECK_GE(start.value(), position);
1270 0 : for (; start.value() > position; position++) {
1271 : os << ' ';
1272 : }
1273 0 : int length = end.value() - start.value();
1274 0 : constexpr int kMaxPrefixLength = 32;
1275 : char buffer[kMaxPrefixLength];
1276 0 : int max_prefix_length = std::min(length + 1, kMaxPrefixLength);
1277 : int prefix;
1278 0 : if (range->spilled()) {
1279 0 : prefix = snprintf(buffer, max_prefix_length, "|ss");
1280 : } else {
1281 : const char* reg_name;
1282 0 : if (range->assigned_register() == kUnassignedRegister) {
1283 : reg_name = "???";
1284 : } else {
1285 : reg_name = RegisterName(range->assigned_register());
1286 : }
1287 0 : prefix = snprintf(buffer, max_prefix_length, "|%s", reg_name);
1288 : }
1289 0 : os << buffer;
1290 0 : position += std::min(prefix, max_prefix_length - 1);
1291 0 : CHECK_GE(end.value(), position);
1292 0 : const char line_style = range->spilled() ? '-' : '=';
1293 0 : for (; end.value() > position; position++) {
1294 : os << line_style;
1295 : }
1296 : }
1297 : }
1298 : os << '\n';
1299 0 : }
1300 :
1301 0 : void LinearScanAllocator::PrintRangeOverview(std::ostream& os) {
1302 0 : PrintBlockRow(os, code()->instruction_blocks());
1303 0 : for (auto toplevel : data()->fixed_live_ranges()) {
1304 0 : if (toplevel == nullptr) continue;
1305 0 : PrintRangeRow(os, toplevel);
1306 : }
1307 : int rowcount = 0;
1308 0 : for (auto toplevel : data()->live_ranges()) {
1309 0 : if (!CanProcessRange(toplevel)) continue;
1310 0 : if (rowcount++ % 10 == 0) PrintBlockRow(os, code()->instruction_blocks());
1311 0 : PrintRangeRow(os, toplevel);
1312 : }
1313 0 : }
1314 :
1315 4951834 : SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone)
1316 : : live_ranges_(zone),
1317 : assigned_slot_(kUnassignedSlot),
1318 9903668 : byte_width_(GetByteWidth(parent->representation())) {
1319 : // Spill ranges are created for top level, non-splintered ranges. This is so
1320 : // that, when merging decisions are made, we consider the full extent of the
1321 : // virtual register, and avoid clobbering it.
1322 : DCHECK(!parent->IsSplinter());
1323 : UseInterval* result = nullptr;
1324 : UseInterval* node = nullptr;
1325 : // Copy the intervals for all ranges.
1326 12645230 : for (LiveRange* range = parent; range != nullptr; range = range->next()) {
1327 11505253 : UseInterval* src = range->first_interval();
1328 26891977 : while (src != nullptr) {
1329 : UseInterval* new_node = new (zone) UseInterval(src->start(), src->end());
1330 11505253 : if (result == nullptr) {
1331 : result = new_node;
1332 : } else {
1333 : node->set_next(new_node);
1334 : }
1335 : node = new_node;
1336 : src = src->next();
1337 : }
1338 : }
1339 4951868 : use_interval_ = result;
1340 4951868 : live_ranges().push_back(parent);
1341 4951874 : end_position_ = node->end();
1342 4951874 : parent->SetSpillRange(this);
1343 4951874 : }
1344 :
1345 258474622 : bool SpillRange::IsIntersectingWith(SpillRange* other) const {
1346 775423875 : if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
1347 516880481 : this->End() <= other->use_interval_->start() ||
1348 : other->End() <= this->use_interval_->start()) {
1349 : return false;
1350 : }
1351 256859318 : return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
1352 : }
1353 :
1354 776037168 : bool SpillRange::TryMerge(SpillRange* other) {
1355 517425942 : if (HasSlot() || other->HasSlot()) return false;
1356 258611226 : if (byte_width() != other->byte_width() || IsIntersectingWith(other))
1357 : return false;
1358 :
1359 : LifetimePosition max = LifetimePosition::MaxPosition();
1360 1822859 : if (End() < other->End() && other->End() != max) {
1361 71868 : end_position_ = other->End();
1362 : }
1363 1822859 : other->end_position_ = max;
1364 :
1365 1822859 : MergeDisjointIntervals(other->use_interval_);
1366 1822858 : other->use_interval_ = nullptr;
1367 :
1368 5492009 : for (TopLevelLiveRange* range : other->live_ranges()) {
1369 : DCHECK(range->GetSpillRange() == other);
1370 : range->SetSpillRange(this);
1371 : }
1372 :
1373 : live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
1374 1822858 : other->live_ranges().end());
1375 : other->live_ranges().clear();
1376 :
1377 1822857 : return true;
1378 : }
1379 :
1380 1822858 : void SpillRange::MergeDisjointIntervals(UseInterval* other) {
1381 : UseInterval* tail = nullptr;
1382 1822858 : UseInterval* current = use_interval_;
1383 10297359 : while (other != nullptr) {
1384 : // Make sure the 'current' list starts first
1385 13303286 : if (current == nullptr || current->start() > other->start()) {
1386 : std::swap(current, other);
1387 : }
1388 : // Check disjointness
1389 : DCHECK(other == nullptr || current->end() <= other->start());
1390 : // Append the 'current' node to the result accumulator and move forward
1391 6651643 : if (tail == nullptr) {
1392 1822857 : use_interval_ = current;
1393 : } else {
1394 : tail->set_next(current);
1395 : }
1396 : tail = current;
1397 : current = current->next();
1398 : }
1399 : // Other list is empty => we are done
1400 1822858 : }
1401 :
1402 0 : void SpillRange::Print() const {
1403 0 : StdoutStream os;
1404 0 : os << "{" << std::endl;
1405 0 : for (TopLevelLiveRange* range : live_ranges()) {
1406 0 : os << range->vreg() << " ";
1407 : }
1408 : os << std::endl;
1409 :
1410 0 : for (UseInterval* i = interval(); i != nullptr; i = i->next()) {
1411 0 : os << '[' << i->start() << ", " << i->end() << ')' << std::endl;
1412 : }
1413 0 : os << "}" << std::endl;
1414 0 : }
1415 :
1416 0 : RegisterAllocationData::PhiMapValue::PhiMapValue(PhiInstruction* phi,
1417 : const InstructionBlock* block,
1418 : Zone* zone)
1419 : : phi_(phi),
1420 : block_(block),
1421 : incoming_operands_(zone),
1422 4197752 : assigned_register_(kUnassignedRegister) {
1423 4197752 : incoming_operands_.reserve(phi->operands().size());
1424 0 : }
1425 :
1426 0 : void RegisterAllocationData::PhiMapValue::AddOperand(
1427 : InstructionOperand* operand) {
1428 5080654 : incoming_operands_.push_back(operand);
1429 0 : }
1430 :
1431 0 : void RegisterAllocationData::PhiMapValue::CommitAssignment(
1432 : const InstructionOperand& assigned) {
1433 12260188 : for (InstructionOperand* operand : incoming_operands_) {
1434 : InstructionOperand::ReplaceWith(operand, &assigned);
1435 : }
1436 0 : }
1437 :
1438 2141488 : RegisterAllocationData::RegisterAllocationData(
1439 : const RegisterConfiguration* config, Zone* zone, Frame* frame,
1440 36405930 : InstructionSequence* code, const char* debug_name)
1441 : : allocation_zone_(zone),
1442 : frame_(frame),
1443 : code_(code),
1444 : debug_name_(debug_name),
1445 : config_(config),
1446 : phi_map_(allocation_zone()),
1447 : live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1448 : live_out_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1449 2141535 : live_ranges_(code->VirtualRegisterCount() * 2, nullptr,
1450 : allocation_zone()),
1451 2141491 : fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
1452 : allocation_zone()),
1453 : fixed_float_live_ranges_(allocation_zone()),
1454 2141502 : fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
1455 : allocation_zone()),
1456 : fixed_simd128_live_ranges_(allocation_zone()),
1457 : spill_ranges_(code->VirtualRegisterCount(), nullptr, allocation_zone()),
1458 : delayed_references_(allocation_zone()),
1459 : assigned_registers_(nullptr),
1460 : assigned_double_registers_(nullptr),
1461 : virtual_register_count_(code->VirtualRegisterCount()),
1462 : preassigned_slot_ranges_(zone),
1463 : spill_state_(code->InstructionBlockCount(), ZoneVector<LiveRange*>(zone),
1464 23556626 : zone) {
1465 : if (!kSimpleFPAliasing) {
1466 : fixed_float_live_ranges_.resize(this->config()->num_float_registers(),
1467 : nullptr);
1468 : fixed_simd128_live_ranges_.resize(this->config()->num_simd128_registers(),
1469 : nullptr);
1470 : }
1471 :
1472 : assigned_registers_ = new (code_zone())
1473 4283054 : BitVector(this->config()->num_general_registers(), code_zone());
1474 : assigned_double_registers_ = new (code_zone())
1475 4283044 : BitVector(this->config()->num_double_registers(), code_zone());
1476 : fixed_register_use_ = new (code_zone())
1477 4283073 : BitVector(this->config()->num_general_registers(), code_zone());
1478 : fixed_fp_register_use_ = new (code_zone())
1479 4283108 : BitVector(this->config()->num_double_registers(), code_zone());
1480 :
1481 2141568 : this->frame()->SetAllocatedRegisters(assigned_registers_);
1482 2141568 : this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1483 2141568 : }
1484 :
1485 39608535 : MoveOperands* RegisterAllocationData::AddGapMove(
1486 : int index, Instruction::GapPosition position,
1487 39608535 : const InstructionOperand& from, const InstructionOperand& to) {
1488 : Instruction* instr = code()->InstructionAt(index);
1489 39608510 : ParallelMove* moves = instr->GetOrCreateParallelMove(position, code_zone());
1490 39608656 : return moves->AddMove(from, to);
1491 : }
1492 :
1493 0 : MachineRepresentation RegisterAllocationData::RepresentationFor(
1494 65554227 : int virtual_register) {
1495 : DCHECK_LT(virtual_register, code()->VirtualRegisterCount());
1496 65554227 : return code()->GetRepresentation(virtual_register);
1497 : }
1498 :
1499 328218633 : TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) {
1500 656437266 : if (index >= static_cast<int>(live_ranges().size())) {
1501 0 : live_ranges().resize(index + 1, nullptr);
1502 : }
1503 656437266 : TopLevelLiveRange* result = live_ranges()[index];
1504 328218633 : if (result == nullptr) {
1505 37741573 : result = NewLiveRange(index, RepresentationFor(index));
1506 75481964 : live_ranges()[index] = result;
1507 : }
1508 328218102 : return result;
1509 : }
1510 :
1511 83552871 : TopLevelLiveRange* RegisterAllocationData::NewLiveRange(
1512 83552871 : int index, MachineRepresentation rep) {
1513 83552650 : return new (allocation_zone()) TopLevelLiveRange(index, rep);
1514 : }
1515 :
1516 5369391 : int RegisterAllocationData::GetNextLiveRangeId() {
1517 5369391 : int vreg = virtual_register_count_++;
1518 10738782 : if (vreg >= static_cast<int>(live_ranges().size())) {
1519 0 : live_ranges().resize(vreg + 1, nullptr);
1520 : }
1521 5369391 : return vreg;
1522 : }
1523 :
1524 5369388 : TopLevelLiveRange* RegisterAllocationData::NextLiveRange(
1525 : MachineRepresentation rep) {
1526 5369388 : int vreg = GetNextLiveRangeId();
1527 5369401 : TopLevelLiveRange* ret = NewLiveRange(vreg, rep);
1528 5369379 : return ret;
1529 : }
1530 :
1531 2098875 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap(
1532 4197743 : const InstructionBlock* block, PhiInstruction* phi) {
1533 : RegisterAllocationData::PhiMapValue* map_value = new (allocation_zone())
1534 : RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
1535 : auto res =
1536 4197750 : phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
1537 : DCHECK(res.second);
1538 : USE(res);
1539 2098882 : return map_value;
1540 : }
1541 :
1542 0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1543 : int virtual_register) {
1544 : auto it = phi_map_.find(virtual_register);
1545 : DCHECK(it != phi_map_.end());
1546 6858507 : return it->second;
1547 : }
1548 :
1549 0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1550 6104880 : TopLevelLiveRange* top_range) {
1551 0 : return GetPhiMapValueFor(top_range->vreg());
1552 : }
1553 :
1554 42 : bool RegisterAllocationData::ExistsUseWithoutDefinition() {
1555 : bool found = false;
1556 42 : BitVector::Iterator iterator(live_in_sets()[0]);
1557 126 : while (!iterator.Done()) {
1558 : found = true;
1559 0 : int operand_index = iterator.Current();
1560 : PrintF("Register allocator error: live v%d reached first block.\n",
1561 0 : operand_index);
1562 0 : LiveRange* range = GetOrCreateLiveRangeFor(operand_index);
1563 0 : PrintF(" (first use is at %d)\n", range->first_pos()->pos().value());
1564 0 : if (debug_name() == nullptr) {
1565 0 : PrintF("\n");
1566 : } else {
1567 0 : PrintF(" (function: %s)\n", debug_name());
1568 : }
1569 0 : iterator.Advance();
1570 : }
1571 42 : return found;
1572 : }
1573 :
1574 : // If a range is defined in a deferred block, we can expect all the range
1575 : // to only cover positions in deferred blocks. Otherwise, a block on the
1576 : // hot path would be dominated by a deferred block, meaning it is unreachable
1577 : // without passing through the deferred block, which is contradictory.
1578 : // In particular, when such a range contributes a result back on the hot
1579 : // path, it will be as one of the inputs of a phi. In that case, the value
1580 : // will be transferred via a move in the Gap::END's of the last instruction
1581 : // of a deferred block.
1582 362 : bool RegisterAllocationData::RangesDefinedInDeferredStayInDeferred() {
1583 42 : const size_t live_ranges_size = live_ranges().size();
1584 770 : for (const TopLevelLiveRange* range : live_ranges()) {
1585 1372 : CHECK_EQ(live_ranges_size,
1586 : live_ranges().size()); // TODO(neis): crbug.com/831822
1587 1349 : if (range == nullptr || range->IsEmpty() ||
1588 : !code()
1589 : ->GetInstructionBlock(range->Start().ToInstructionIndex())
1590 320 : ->IsDeferred()) {
1591 : continue;
1592 : }
1593 0 : for (const UseInterval* i = range->first_interval(); i != nullptr;
1594 : i = i->next()) {
1595 : int first = i->FirstGapIndex();
1596 : int last = i->LastGapIndex();
1597 0 : for (int instr = first; instr <= last;) {
1598 0 : const InstructionBlock* block = code()->GetInstructionBlock(instr);
1599 0 : if (!block->IsDeferred()) return false;
1600 : instr = block->last_instruction_index() + 1;
1601 : }
1602 : }
1603 : }
1604 : return true;
1605 : }
1606 :
1607 5809145 : SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
1608 18347444 : TopLevelLiveRange* range) {
1609 : DCHECK(!range->HasSpillOperand());
1610 :
1611 : SpillRange* spill_range = range->GetAllocatedSpillRange();
1612 5809145 : if (spill_range == nullptr) {
1613 : DCHECK(!range->IsSplinter());
1614 2727466 : spill_range = new (allocation_zone()) SpillRange(range, allocation_zone());
1615 : }
1616 : range->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange);
1617 :
1618 : int spill_range_index =
1619 5809168 : range->IsSplinter() ? range->splintered_from()->vreg() : range->vreg();
1620 :
1621 11618336 : spill_ranges()[spill_range_index] = spill_range;
1622 :
1623 5809168 : return spill_range;
1624 : }
1625 :
1626 2224440 : SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
1627 2224440 : TopLevelLiveRange* range) {
1628 : DCHECK(!range->HasSpillOperand());
1629 : DCHECK(!range->IsSplinter());
1630 : SpillRange* spill_range =
1631 2224444 : new (allocation_zone()) SpillRange(range, allocation_zone());
1632 2224442 : return spill_range;
1633 : }
1634 :
1635 18627075 : void RegisterAllocationData::MarkFixedUse(MachineRepresentation rep,
1636 : int index) {
1637 18627075 : switch (rep) {
1638 : case MachineRepresentation::kFloat32:
1639 : case MachineRepresentation::kSimd128:
1640 : if (kSimpleFPAliasing) {
1641 18722 : fixed_fp_register_use_->Add(index);
1642 : } else {
1643 : int alias_base_index = -1;
1644 : int aliases = config()->GetAliases(
1645 : rep, index, MachineRepresentation::kFloat64, &alias_base_index);
1646 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
1647 : while (aliases--) {
1648 : int aliased_reg = alias_base_index + aliases;
1649 : fixed_fp_register_use_->Add(aliased_reg);
1650 : }
1651 : }
1652 : break;
1653 : case MachineRepresentation::kFloat64:
1654 58560 : fixed_fp_register_use_->Add(index);
1655 : break;
1656 : default:
1657 : DCHECK(!IsFloatingPoint(rep));
1658 18549793 : fixed_register_use_->Add(index);
1659 : break;
1660 : }
1661 18627075 : }
1662 :
1663 302849062 : bool RegisterAllocationData::HasFixedUse(MachineRepresentation rep, int index) {
1664 302849062 : switch (rep) {
1665 : case MachineRepresentation::kFloat32:
1666 : case MachineRepresentation::kSimd128:
1667 : if (kSimpleFPAliasing) {
1668 1758982 : return fixed_fp_register_use_->Contains(index);
1669 : } else {
1670 : int alias_base_index = -1;
1671 : int aliases = config()->GetAliases(
1672 : rep, index, MachineRepresentation::kFloat64, &alias_base_index);
1673 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
1674 : bool result = false;
1675 : while (aliases-- && !result) {
1676 : int aliased_reg = alias_base_index + aliases;
1677 : result |= fixed_fp_register_use_->Contains(aliased_reg);
1678 : }
1679 : return result;
1680 : }
1681 : break;
1682 : case MachineRepresentation::kFloat64:
1683 32606242 : return fixed_fp_register_use_->Contains(index);
1684 : break;
1685 : default:
1686 : DCHECK(!IsFloatingPoint(rep));
1687 571332900 : return fixed_register_use_->Contains(index);
1688 : break;
1689 : }
1690 : }
1691 :
1692 79696513 : void RegisterAllocationData::MarkAllocated(MachineRepresentation rep,
1693 : int index) {
1694 79696513 : switch (rep) {
1695 : case MachineRepresentation::kFloat32:
1696 : case MachineRepresentation::kSimd128:
1697 : if (kSimpleFPAliasing) {
1698 102932 : assigned_double_registers_->Add(index);
1699 : } else {
1700 : int alias_base_index = -1;
1701 : int aliases = config()->GetAliases(
1702 : rep, index, MachineRepresentation::kFloat64, &alias_base_index);
1703 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
1704 : while (aliases--) {
1705 : int aliased_reg = alias_base_index + aliases;
1706 : assigned_double_registers_->Add(aliased_reg);
1707 : }
1708 : }
1709 : break;
1710 : case MachineRepresentation::kFloat64:
1711 23724417 : assigned_double_registers_->Add(index);
1712 : break;
1713 : default:
1714 : DCHECK(!IsFloatingPoint(rep));
1715 55869164 : assigned_registers_->Add(index);
1716 : break;
1717 : }
1718 79696513 : }
1719 :
1720 37968317 : bool RegisterAllocationData::IsBlockBoundary(LifetimePosition pos) const {
1721 37968396 : return pos.IsFullStart() &&
1722 14850305 : code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
1723 23118091 : pos.ToInstructionIndex();
1724 : }
1725 :
1726 4282923 : ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data)
1727 4282923 : : data_(data) {}
1728 :
1729 27885094 : InstructionOperand* ConstraintBuilder::AllocateFixed(
1730 : UnallocatedOperand* operand, int pos, bool is_tagged, bool is_input) {
1731 27885094 : TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
1732 : DCHECK(operand->HasFixedPolicy());
1733 : InstructionOperand allocated;
1734 : MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
1735 : int virtual_register = operand->virtual_register();
1736 27884976 : if (virtual_register != InstructionOperand::kInvalidVirtualRegister) {
1737 : rep = data()->RepresentationFor(virtual_register);
1738 : }
1739 27885024 : if (operand->HasFixedSlotPolicy()) {
1740 : allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, rep,
1741 : operand->fixed_slot_index());
1742 26677621 : } else if (operand->HasFixedRegisterPolicy()) {
1743 : DCHECK(!IsFloatingPoint(rep));
1744 : DCHECK(data()->config()->IsAllocatableGeneralCode(
1745 : operand->fixed_register_index()));
1746 : allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1747 : operand->fixed_register_index());
1748 137970 : } else if (operand->HasFixedFPRegisterPolicy()) {
1749 : DCHECK(IsFloatingPoint(rep));
1750 : DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
1751 : allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1752 : operand->fixed_register_index());
1753 : } else {
1754 0 : UNREACHABLE();
1755 : }
1756 46601697 : if (is_input && allocated.IsAnyRegister()) {
1757 18627194 : data()->MarkFixedUse(rep, operand->fixed_register_index());
1758 : }
1759 : InstructionOperand::ReplaceWith(operand, &allocated);
1760 27885006 : if (is_tagged) {
1761 18149168 : TRACE("Fixed reg is tagged at %d\n", pos);
1762 18149195 : Instruction* instr = code()->InstructionAt(pos);
1763 18149195 : if (instr->HasReferenceMap()) {
1764 14853892 : instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand));
1765 : }
1766 : }
1767 27885047 : return operand;
1768 : }
1769 :
1770 2141397 : void ConstraintBuilder::MeetRegisterConstraints() {
1771 23724196 : for (InstructionBlock* block : code()->instruction_blocks()) {
1772 19441215 : MeetRegisterConstraints(block);
1773 : }
1774 2141584 : }
1775 :
1776 19441272 : void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
1777 : int start = block->first_instruction_index();
1778 : int end = block->last_instruction_index();
1779 : DCHECK_NE(-1, start);
1780 82614694 : for (int i = start; i <= end; ++i) {
1781 63173189 : MeetConstraintsBefore(i);
1782 63173556 : if (i != end) MeetConstraintsAfter(i);
1783 : }
1784 : // Meet register constraints for the instruction in the end.
1785 19441505 : MeetRegisterConstraintsForLastInstructionInBlock(block);
1786 19441380 : }
1787 :
1788 19441398 : void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
1789 19441398 : const InstructionBlock* block) {
1790 : int end = block->last_instruction_index();
1791 19593102 : Instruction* last_instruction = code()->InstructionAt(end);
1792 39186204 : for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
1793 : InstructionOperand* output_operand = last_instruction->OutputAt(i);
1794 : DCHECK(!output_operand->IsConstant());
1795 151741 : UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
1796 : int output_vreg = output->virtual_register();
1797 151741 : TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1798 : bool assigned = false;
1799 151742 : if (output->HasFixedPolicy()) {
1800 2 : AllocateFixed(output, -1, false, false);
1801 : // This value is produced on the stack, we never need to spill it.
1802 2 : if (output->IsStackSlot()) {
1803 : DCHECK(LocationOperand::cast(output)->index() <
1804 : data()->frame()->GetSpillSlotCount());
1805 : range->SetSpillOperand(LocationOperand::cast(output));
1806 : range->SetSpillStartIndex(end);
1807 : assigned = true;
1808 : }
1809 :
1810 6 : for (const RpoNumber& succ : block->successors()) {
1811 2 : const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1812 : DCHECK_EQ(1, successor->PredecessorCount());
1813 : int gap_index = successor->first_instruction_index();
1814 : // Create an unconstrained operand for the same virtual register
1815 : // and insert a gap move from the fixed output to the operand.
1816 : UnallocatedOperand output_copy(UnallocatedOperand::REGISTER_OR_SLOT,
1817 : output_vreg);
1818 2 : data()->AddGapMove(gap_index, Instruction::START, *output, output_copy);
1819 : }
1820 : }
1821 :
1822 151742 : if (!assigned) {
1823 606961 : for (const RpoNumber& succ : block->successors()) {
1824 303481 : const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1825 : DCHECK_EQ(1, successor->PredecessorCount());
1826 : int gap_index = successor->first_instruction_index();
1827 : range->RecordSpillLocation(allocation_zone(), gap_index, output);
1828 : range->SetSpillStartIndex(gap_index);
1829 : }
1830 : }
1831 : }
1832 19441361 : }
1833 :
1834 43732490 : void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
1835 123024344 : Instruction* first = code()->InstructionAt(instr_index);
1836 : // Handle fixed temporaries.
1837 88963794 : for (size_t i = 0; i < first->TempCount(); i++) {
1838 749343 : UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i));
1839 749343 : if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false, false);
1840 : }
1841 : // Handle constant/fixed output operands.
1842 113352340 : for (size_t i = 0; i < first->OutputCount(); i++) {
1843 34810030 : InstructionOperand* output = first->OutputAt(i);
1844 34810030 : if (output->IsConstant()) {
1845 : int output_vreg = ConstantOperand::cast(output)->virtual_register();
1846 12527831 : TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1847 12528092 : range->SetSpillStartIndex(instr_index + 1);
1848 : range->SetSpillOperand(output);
1849 : continue;
1850 : }
1851 : UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
1852 : TopLevelLiveRange* range =
1853 22282199 : data()->GetOrCreateLiveRangeFor(first_output->virtual_register());
1854 : bool assigned = false;
1855 22281712 : if (first_output->HasFixedPolicy()) {
1856 : int output_vreg = first_output->virtual_register();
1857 : UnallocatedOperand output_copy(UnallocatedOperand::REGISTER_OR_SLOT,
1858 : output_vreg);
1859 : bool is_tagged = code()->IsReference(output_vreg);
1860 9095958 : if (first_output->HasSecondaryStorage()) {
1861 : range->MarkHasPreassignedSlot();
1862 : data()->preassigned_slot_ranges().push_back(
1863 219133 : std::make_pair(range, first_output->GetSecondaryStorage()));
1864 : }
1865 9095962 : AllocateFixed(first_output, instr_index, is_tagged, false);
1866 :
1867 : // This value is produced on the stack, we never need to spill it.
1868 9096020 : if (first_output->IsStackSlot()) {
1869 : DCHECK(LocationOperand::cast(first_output)->index() <
1870 : data()->frame()->GetTotalFrameSlotCount());
1871 : range->SetSpillOperand(LocationOperand::cast(first_output));
1872 1092423 : range->SetSpillStartIndex(instr_index + 1);
1873 : assigned = true;
1874 : }
1875 : data()->AddGapMove(instr_index + 1, Instruction::START, *first_output,
1876 18192040 : output_copy);
1877 : }
1878 : // Make sure we add a gap move for spilling (if we have not done
1879 : // so already).
1880 22281903 : if (!assigned) {
1881 : range->RecordSpillLocation(allocation_zone(), instr_index + 1,
1882 21189786 : first_output);
1883 : range->SetSpillStartIndex(instr_index + 1);
1884 : }
1885 : }
1886 43732417 : }
1887 :
1888 63173216 : void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
1889 294622800 : Instruction* second = code()->InstructionAt(instr_index);
1890 : // Handle fixed input operands of second instruction.
1891 391877048 : for (size_t i = 0; i < second->InputCount(); i++) {
1892 132765189 : InstructionOperand* input = second->InputAt(i);
1893 132765189 : if (input->IsImmediate() || input->IsExplicit()) {
1894 : continue; // Ignore immediates and explicitly reserved registers.
1895 : }
1896 : UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
1897 69958942 : if (cur_input->HasFixedPolicy()) {
1898 : int input_vreg = cur_input->virtual_register();
1899 : UnallocatedOperand input_copy(UnallocatedOperand::REGISTER_OR_SLOT,
1900 : input_vreg);
1901 : bool is_tagged = code()->IsReference(input_vreg);
1902 18716735 : AllocateFixed(cur_input, instr_index, is_tagged, true);
1903 37433214 : data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
1904 : }
1905 : }
1906 : // Handle "output same as input" for second instruction.
1907 133096963 : for (size_t i = 0; i < second->OutputCount(); i++) {
1908 34961634 : InstructionOperand* output = second->OutputAt(i);
1909 67145911 : if (!output->IsUnallocated()) continue;
1910 : UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
1911 22433837 : if (!second_output->HasSameAsInputPolicy()) continue;
1912 : DCHECK_EQ(0, i); // Only valid for first output.
1913 : UnallocatedOperand* cur_input =
1914 2777357 : UnallocatedOperand::cast(second->InputAt(0));
1915 : int output_vreg = second_output->virtual_register();
1916 : int input_vreg = cur_input->virtual_register();
1917 : UnallocatedOperand input_copy(UnallocatedOperand::REGISTER_OR_SLOT,
1918 : input_vreg);
1919 : *cur_input =
1920 2777357 : UnallocatedOperand(*cur_input, second_output->virtual_register());
1921 : MoveOperands* gap_move = data()->AddGapMove(instr_index, Instruction::END,
1922 5554714 : input_copy, *cur_input);
1923 : DCHECK_NOT_NULL(gap_move);
1924 3326734 : if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) {
1925 549127 : if (second->HasReferenceMap()) {
1926 : RegisterAllocationData::DelayedReference delayed_reference = {
1927 0 : second->reference_map(), &gap_move->source()};
1928 0 : data()->delayed_references().push_back(delayed_reference);
1929 : }
1930 2228476 : } else if (!code()->IsReference(input_vreg) &&
1931 : code()->IsReference(output_vreg)) {
1932 : // The input is assumed to immediately have a tagged representation,
1933 : // before the pointer map can be used. I.e. the pointer map at the
1934 : // instruction will include the output operand (whose value at the
1935 : // beginning of the instruction is equal to the input operand). If
1936 : // this is not desired, then the pointer map at this instruction needs
1937 : // to be adjusted manually.
1938 : }
1939 : }
1940 63173515 : }
1941 :
1942 2141590 : void ConstraintBuilder::ResolvePhis() {
1943 : // Process the blocks in reverse order.
1944 43165676 : for (InstructionBlock* block : base::Reversed(code()->instruction_blocks())) {
1945 19441282 : ResolvePhis(block);
1946 : }
1947 2141522 : }
1948 :
1949 21539976 : void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) {
1950 40981094 : for (PhiInstruction* phi : block->phis()) {
1951 : int phi_vreg = phi->virtual_register();
1952 : RegisterAllocationData::PhiMapValue* map_value =
1953 2098872 : data()->InitializePhiMap(block, phi);
1954 2098868 : InstructionOperand& output = phi->output();
1955 : // Map the destination operands, so the commitment phase can find them.
1956 14359070 : for (size_t i = 0; i < phi->operands().size(); ++i) {
1957 5080662 : InstructionBlock* cur_block =
1958 10161312 : code()->InstructionBlockAt(block->predecessors()[i]);
1959 : UnallocatedOperand input(UnallocatedOperand::REGISTER_OR_SLOT,
1960 5080662 : phi->operands()[i]);
1961 : MoveOperands* move = data()->AddGapMove(
1962 5080662 : cur_block->last_instruction_index(), Instruction::END, input, output);
1963 5080654 : map_value->AddOperand(&move->destination());
1964 : DCHECK(!code()
1965 : ->InstructionAt(cur_block->last_instruction_index())
1966 : ->HasReferenceMap());
1967 : }
1968 2098879 : TopLevelLiveRange* live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
1969 : int gap_index = block->first_instruction_index();
1970 : live_range->RecordSpillLocation(allocation_zone(), gap_index, &output);
1971 : live_range->SetSpillStartIndex(gap_index);
1972 : // We use the phi-ness of some nodes in some later heuristics.
1973 : live_range->set_is_phi(true);
1974 2098864 : live_range->set_is_non_loop_phi(!block->IsLoopHeader());
1975 : }
1976 19441107 : }
1977 :
1978 2141456 : LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data,
1979 : Zone* local_zone)
1980 4282912 : : data_(data), phi_hints_(local_zone) {}
1981 :
1982 19440575 : BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block,
1983 19440599 : RegisterAllocationData* data) {
1984 : size_t block_index = block->rpo_number().ToSize();
1985 38881150 : BitVector* live_out = data->live_out_sets()[block_index];
1986 19440575 : if (live_out == nullptr) {
1987 : // Compute live out for the given block, except not including backward
1988 : // successor edges.
1989 : Zone* zone = data->allocation_zone();
1990 42476050 : const InstructionSequence* code = data->code();
1991 :
1992 19440559 : live_out = new (zone) BitVector(code->VirtualRegisterCount(), zone);
1993 :
1994 : // Process all successor blocks.
1995 62161377 : for (const RpoNumber& succ : block->successors()) {
1996 : // Add values live on entry to the successor.
1997 23279866 : if (succ <= block->rpo_number()) continue;
1998 46071304 : BitVector* live_in = data->live_in_sets()[succ.ToSize()];
1999 23035652 : if (live_in != nullptr) live_out->Union(*live_in);
2000 :
2001 : // All phi input operands corresponding to this successor edge are live
2002 : // out from this block.
2003 23035451 : const InstructionBlock* successor = code->InstructionBlockAt(succ);
2004 23035486 : size_t index = successor->PredecessorIndexOf(block->rpo_number());
2005 : DCHECK(index < successor->PredecessorCount());
2006 50773992 : for (PhiInstruction* phi : successor->phis()) {
2007 9404988 : live_out->Add(phi->operands()[index]);
2008 : }
2009 : }
2010 38881608 : data->live_out_sets()[block_index] = live_out;
2011 : }
2012 19440780 : return live_out;
2013 : }
2014 :
2015 38881174 : void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
2016 128021059 : BitVector* live_out) {
2017 : // Add an interval that includes the entire block to the live range for
2018 : // each live_out value.
2019 : LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
2020 19440587 : block->first_instruction_index());
2021 : LifetimePosition end = LifetimePosition::InstructionFromInstructionIndex(
2022 : block->last_instruction_index())
2023 19440587 : .NextStart();
2024 19440587 : BitVector::Iterator iterator(live_out);
2025 314362387 : while (!iterator.Done()) {
2026 128021059 : int operand_index = iterator.Current();
2027 128021059 : TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
2028 128021183 : range->AddUseInterval(start, end, allocation_zone());
2029 128021024 : iterator.Advance();
2030 : }
2031 19440065 : }
2032 :
2033 21910160 : int LiveRangeBuilder::FixedFPLiveRangeID(int index, MachineRepresentation rep) {
2034 21910160 : int result = -index - 1;
2035 21910160 : switch (rep) {
2036 : case MachineRepresentation::kSimd128:
2037 48 : result -= config()->num_float_registers();
2038 : V8_FALLTHROUGH;
2039 : case MachineRepresentation::kFloat32:
2040 9635 : result -= config()->num_double_registers();
2041 : V8_FALLTHROUGH;
2042 : case MachineRepresentation::kFloat64:
2043 21910160 : result -= config()->num_general_registers();
2044 : break;
2045 : default:
2046 0 : UNREACHABLE();
2047 : break;
2048 : }
2049 21910160 : return result;
2050 : }
2051 :
2052 276925684 : TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
2053 : DCHECK(index < config()->num_general_registers());
2054 359791749 : TopLevelLiveRange* result = data()->fixed_live_ranges()[index];
2055 119930583 : if (result == nullptr) {
2056 : MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
2057 18532416 : result = data()->NewLiveRange(FixedLiveRangeID(index), rep);
2058 : DCHECK(result->IsFixed());
2059 : result->set_assigned_register(index);
2060 18532193 : data()->MarkAllocated(rep, index);
2061 37064650 : data()->fixed_live_ranges()[index] = result;
2062 : }
2063 119930492 : return result;
2064 : }
2065 :
2066 86134592 : TopLevelLiveRange* LiveRangeBuilder::FixedFPLiveRangeFor(
2067 108044528 : int index, MachineRepresentation rep) {
2068 : int num_regs = config()->num_double_registers();
2069 : ZoneVector<TopLevelLiveRange*>* live_ranges =
2070 : &data()->fixed_double_live_ranges();
2071 : if (!kSimpleFPAliasing) {
2072 : switch (rep) {
2073 : case MachineRepresentation::kFloat32:
2074 : num_regs = config()->num_float_registers();
2075 : live_ranges = &data()->fixed_float_live_ranges();
2076 : break;
2077 : case MachineRepresentation::kSimd128:
2078 : num_regs = config()->num_simd128_registers();
2079 : live_ranges = &data()->fixed_simd128_live_ranges();
2080 : break;
2081 : default:
2082 : break;
2083 : }
2084 : }
2085 :
2086 : DCHECK(index < num_regs);
2087 : USE(num_regs);
2088 194179225 : TopLevelLiveRange* result = (*live_ranges)[index];
2089 86134592 : if (result == nullptr) {
2090 21910158 : result = data()->NewLiveRange(FixedFPLiveRangeID(index, rep), rep);
2091 : DCHECK(result->IsFixed());
2092 : result->set_assigned_register(index);
2093 21909936 : data()->MarkAllocated(rep, index);
2094 21910041 : (*live_ranges)[index] = result;
2095 : }
2096 86134475 : return result;
2097 : }
2098 :
2099 288307403 : TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
2100 171123002 : if (operand->IsUnallocated()) {
2101 : return data()->GetOrCreateLiveRangeFor(
2102 104656445 : UnallocatedOperand::cast(operand)->virtual_register());
2103 66466557 : } else if (operand->IsConstant()) {
2104 : return data()->GetOrCreateLiveRangeFor(
2105 12527956 : ConstantOperand::cast(operand)->virtual_register());
2106 53938601 : } else if (operand->IsRegister()) {
2107 : return FixedLiveRangeFor(
2108 51248382 : LocationOperand::cast(operand)->GetRegister().code());
2109 2690219 : } else if (operand->IsFPRegister()) {
2110 : LocationOperand* op = LocationOperand::cast(operand);
2111 275458 : return FixedFPLiveRangeFor(op->register_code(), op->representation());
2112 : } else {
2113 : return nullptr;
2114 : }
2115 : }
2116 :
2117 106797067 : UsePosition* LiveRangeBuilder::NewUsePosition(LifetimePosition pos,
2118 : InstructionOperand* operand,
2119 : void* hint,
2120 : UsePositionHintType hint_type) {
2121 213594033 : return new (allocation_zone()) UsePosition(pos, operand, hint, hint_type);
2122 : }
2123 :
2124 66572789 : UsePosition* LiveRangeBuilder::Define(LifetimePosition position,
2125 : InstructionOperand* operand, void* hint,
2126 : UsePositionHintType hint_type) {
2127 66572789 : TopLevelLiveRange* range = LiveRangeFor(operand);
2128 66572802 : if (range == nullptr) return nullptr;
2129 :
2130 65365600 : if (range->IsEmpty() || range->Start() > position) {
2131 : // Can happen if there is a definition without use.
2132 2140805 : range->AddUseInterval(position, position.NextStart(), allocation_zone());
2133 2140801 : range->AddUsePosition(NewUsePosition(position.NextStart()));
2134 : } else {
2135 63224795 : range->ShortenTo(position);
2136 : }
2137 65365785 : if (!operand->IsUnallocated()) return nullptr;
2138 : UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
2139 : UsePosition* use_pos =
2140 26160162 : NewUsePosition(position, unalloc_operand, hint, hint_type);
2141 26159994 : range->AddUsePosition(use_pos);
2142 26160165 : return use_pos;
2143 : }
2144 :
2145 104550784 : UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start,
2146 : LifetimePosition position,
2147 : InstructionOperand* operand, void* hint,
2148 : UsePositionHintType hint_type) {
2149 104550784 : TopLevelLiveRange* range = LiveRangeFor(operand);
2150 104550874 : if (range == nullptr) return nullptr;
2151 : UsePosition* use_pos = nullptr;
2152 103343608 : if (operand->IsUnallocated()) {
2153 : UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
2154 78497766 : use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type);
2155 78497399 : range->AddUsePosition(use_pos);
2156 : }
2157 103343488 : range->AddUseInterval(block_start, position, allocation_zone());
2158 103343293 : return use_pos;
2159 : }
2160 :
2161 73842812 : void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
2162 32424205 : BitVector* live) {
2163 : int block_start = block->first_instruction_index();
2164 : LifetimePosition block_start_position =
2165 : LifetimePosition::GapFromInstructionIndex(block_start);
2166 : bool fixed_float_live_ranges = false;
2167 : bool fixed_simd128_live_ranges = false;
2168 : if (!kSimpleFPAliasing) {
2169 : int mask = data()->code()->representation_mask();
2170 : fixed_float_live_ranges = (mask & kFloat32Bit) != 0;
2171 : fixed_simd128_live_ranges = (mask & kSimd128Bit) != 0;
2172 : }
2173 :
2174 82613538 : for (int index = block->last_instruction_index(); index >= block_start;
2175 : index--) {
2176 : LifetimePosition curr_position =
2177 : LifetimePosition::InstructionFromInstructionIndex(index);
2178 357998464 : Instruction* instr = code()->InstructionAt(index);
2179 : DCHECK_NOT_NULL(instr);
2180 : DCHECK(curr_position.IsInstructionPosition());
2181 : // Process output, inputs, and temps of this instruction.
2182 196269842 : for (size_t i = 0; i < instr->OutputCount(); i++) {
2183 34962085 : InstructionOperand* output = instr->OutputAt(i);
2184 34962085 : if (output->IsUnallocated()) {
2185 : // Unsupported.
2186 : DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
2187 : int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
2188 13338040 : live->Remove(out_vreg);
2189 21624045 : } else if (output->IsConstant()) {
2190 : int out_vreg = ConstantOperand::cast(output)->virtual_register();
2191 12527971 : live->Remove(out_vreg);
2192 : }
2193 35575701 : if (block->IsHandler() && index == block_start && output->IsAllocated() &&
2194 35159280 : output->IsRegister() &&
2195 : AllocatedOperand::cast(output)->GetRegister() ==
2196 : v8::internal::kReturnRegister0) {
2197 : // The register defined here is blocked from gap start - it is the
2198 : // exception value.
2199 : // TODO(mtrofin): should we explore an explicit opcode for
2200 : // the first instruction in the handler?
2201 : Define(LifetimePosition::GapFromInstructionIndex(index), output);
2202 : } else {
2203 : Define(curr_position, output);
2204 : }
2205 : }
2206 :
2207 63172836 : if (instr->ClobbersRegisters()) {
2208 143089167 : for (int i = 0; i < config()->num_allocatable_general_registers(); ++i) {
2209 : // Create a UseInterval at this instruction for all fixed registers,
2210 : // (including the instruction outputs). Adding another UseInterval here
2211 : // is OK because AddUseInterval will just merge it with the existing
2212 : // one at the end of the range.
2213 68682668 : int code = config()->GetAllocatableGeneralCode(i);
2214 68682668 : TopLevelLiveRange* range = FixedLiveRangeFor(code);
2215 : range->AddUseInterval(curr_position, curr_position.End(),
2216 68682634 : allocation_zone());
2217 : }
2218 : }
2219 :
2220 63172767 : if (instr->ClobbersDoubleRegisters()) {
2221 177442130 : for (int i = 0; i < config()->num_allocatable_double_registers(); ++i) {
2222 : // Add a UseInterval for all DoubleRegisters. See comment above for
2223 : // general registers.
2224 85859137 : int code = config()->GetAllocatableDoubleCode(i);
2225 : TopLevelLiveRange* range =
2226 85859137 : FixedFPLiveRangeFor(code, MachineRepresentation::kFloat64);
2227 : range->AddUseInterval(curr_position, curr_position.End(),
2228 85859044 : allocation_zone());
2229 : }
2230 : // Clobber fixed float registers on archs with non-simple aliasing.
2231 : if (!kSimpleFPAliasing) {
2232 : if (fixed_float_live_ranges) {
2233 : for (int i = 0; i < config()->num_allocatable_float_registers();
2234 : ++i) {
2235 : // Add a UseInterval for all FloatRegisters. See comment above for
2236 : // general registers.
2237 : int code = config()->GetAllocatableFloatCode(i);
2238 : TopLevelLiveRange* range =
2239 : FixedFPLiveRangeFor(code, MachineRepresentation::kFloat32);
2240 : range->AddUseInterval(curr_position, curr_position.End(),
2241 : allocation_zone());
2242 : }
2243 : }
2244 : if (fixed_simd128_live_ranges) {
2245 : for (int i = 0; i < config()->num_allocatable_simd128_registers();
2246 : ++i) {
2247 : int code = config()->GetAllocatableSimd128Code(i);
2248 : TopLevelLiveRange* range =
2249 : FixedFPLiveRangeFor(code, MachineRepresentation::kSimd128);
2250 : range->AddUseInterval(curr_position, curr_position.End(),
2251 : allocation_zone());
2252 : }
2253 : }
2254 : }
2255 : }
2256 :
2257 328701541 : for (size_t i = 0; i < instr->InputCount(); i++) {
2258 132764076 : InstructionOperand* input = instr->InputAt(i);
2259 132764076 : if (input->IsImmediate() || input->IsExplicit()) {
2260 : continue; // Ignore immediates and explicitly reserved registers.
2261 : }
2262 : LifetimePosition use_pos;
2263 121201515 : if (input->IsUnallocated() &&
2264 : UnallocatedOperand::cast(input)->IsUsedAtStart()) {
2265 : use_pos = curr_position;
2266 : } else {
2267 : use_pos = curr_position.End();
2268 : }
2269 :
2270 69959042 : if (input->IsUnallocated()) {
2271 : UnallocatedOperand* unalloc = UnallocatedOperand::cast(input);
2272 : int vreg = unalloc->virtual_register();
2273 : live->Add(vreg);
2274 51242558 : if (unalloc->HasSlotPolicy()) {
2275 14716278 : data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(true);
2276 : }
2277 : }
2278 : Use(block_start_position, use_pos, input);
2279 : }
2280 :
2281 64679787 : for (size_t i = 0; i < instr->TempCount(); i++) {
2282 753368 : InstructionOperand* temp = instr->TempAt(i);
2283 : // Unsupported.
2284 : DCHECK_IMPLIES(temp->IsUnallocated(),
2285 : !UnallocatedOperand::cast(temp)->HasSlotPolicy());
2286 753368 : if (instr->ClobbersTemps()) {
2287 0 : if (temp->IsRegister()) continue;
2288 0 : if (temp->IsUnallocated()) {
2289 : UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
2290 0 : if (temp_unalloc->HasFixedPolicy()) {
2291 : continue;
2292 : }
2293 : }
2294 : }
2295 : Use(block_start_position, curr_position.End(), temp);
2296 : Define(curr_position, temp);
2297 : }
2298 :
2299 : // Process the moves of the instruction's gaps, making their sources live.
2300 : const Instruction::GapPosition kPositions[] = {Instruction::END,
2301 63173050 : Instruction::START};
2302 : curr_position = curr_position.PrevStart();
2303 : DCHECK(curr_position.IsGapPosition());
2304 189518354 : for (const Instruction::GapPosition& position : kPositions) {
2305 126345212 : ParallelMove* move = instr->GetParallelMove(position);
2306 126345212 : if (move == nullptr) continue;
2307 24072798 : if (position == Instruction::END) {
2308 : curr_position = curr_position.End();
2309 : } else {
2310 : curr_position = curr_position.Start();
2311 : }
2312 83816477 : for (MoveOperands* cur : *move) {
2313 35670789 : InstructionOperand& from = cur->source();
2314 35670789 : InstructionOperand& to = cur->destination();
2315 : void* hint = &to;
2316 35670789 : UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to);
2317 : UsePosition* to_use = nullptr;
2318 : int phi_vreg = -1;
2319 35670965 : if (to.IsUnallocated()) {
2320 : int to_vreg = UnallocatedOperand::cast(to).virtual_register();
2321 16954337 : TopLevelLiveRange* to_range =
2322 16954283 : data()->GetOrCreateLiveRangeFor(to_vreg);
2323 16954337 : if (to_range->is_phi()) {
2324 : phi_vreg = to_vreg;
2325 5080658 : if (to_range->is_non_loop_phi()) {
2326 4327014 : hint = to_range->current_hint_position();
2327 : hint_type = hint == nullptr ? UsePositionHintType::kNone
2328 4327014 : : UsePositionHintType::kUsePos;
2329 : } else {
2330 : hint_type = UsePositionHintType::kPhi;
2331 : hint = data()->GetPhiMapValueFor(to_vreg);
2332 : }
2333 : } else {
2334 11873679 : if (live->Contains(to_vreg)) {
2335 : to_use = Define(curr_position, &to, &from,
2336 10042371 : UsePosition::HintTypeForOperand(from));
2337 10042403 : live->Remove(to_vreg);
2338 : } else {
2339 : cur->Eliminate();
2340 : continue;
2341 : }
2342 : }
2343 : } else {
2344 : Define(curr_position, &to);
2345 : }
2346 : UsePosition* from_use =
2347 33839655 : Use(block_start_position, curr_position, &from, hint, hint_type);
2348 : // Mark range live.
2349 33839452 : if (from.IsUnallocated()) {
2350 : live->Add(UnallocatedOperand::cast(from).virtual_register());
2351 : }
2352 : // Resolve use position hints just created.
2353 33839452 : if (to_use != nullptr && from_use != nullptr) {
2354 : to_use->ResolveHint(from_use);
2355 : from_use->ResolveHint(to_use);
2356 : }
2357 : DCHECK_IMPLIES(to_use != nullptr, to_use->IsResolved());
2358 : DCHECK_IMPLIES(from_use != nullptr, from_use->IsResolved());
2359 : // Potentially resolve phi hint.
2360 33839452 : if (phi_vreg != -1) ResolvePhiHint(&from, from_use);
2361 : }
2362 : }
2363 : }
2364 19440614 : }
2365 :
2366 21539482 : void LiveRangeBuilder::ProcessPhis(const InstructionBlock* block,
2367 : BitVector* live) {
2368 40980101 : for (PhiInstruction* phi : block->phis()) {
2369 : // The live range interval already ends at the first instruction of the
2370 : // block.
2371 : int phi_vreg = phi->virtual_register();
2372 2098886 : live->Remove(phi_vreg);
2373 : // Select a hint from a predecessor block that precedes this block in the
2374 : // rpo order. In order of priority:
2375 : // - Avoid hints from deferred blocks.
2376 : // - Prefer hints from allocated (or explicit) operands.
2377 : // - Prefer hints from empty blocks (containing just parallel moves and a
2378 : // jump). In these cases, if we can elide the moves, the jump threader
2379 : // is likely to be able to elide the jump.
2380 : // The enforcement of hinting in rpo order is required because hint
2381 : // resolution that happens later in the compiler pipeline visits
2382 : // instructions in reverse rpo order, relying on the fact that phis are
2383 : // encountered before their hints.
2384 : InstructionOperand* hint = nullptr;
2385 : int hint_preference = 0;
2386 :
2387 : // The cost of hinting increases with the number of predecessors. At the
2388 : // same time, the typical benefit decreases, since this hinting only
2389 : // optimises the execution path through one predecessor. A limit of 2 is
2390 : // sufficient to hit the common if/else pattern.
2391 : int predecessor_limit = 2;
2392 :
2393 6674835 : for (RpoNumber predecessor : block->predecessors()) {
2394 11466780 : const InstructionBlock* predecessor_block =
2395 4200367 : code()->InstructionBlockAt(predecessor);
2396 : DCHECK_EQ(predecessor_block->rpo_number(), predecessor);
2397 :
2398 : // Only take hints from earlier rpo numbers.
2399 4200369 : if (predecessor >= block->rpo_number()) continue;
2400 :
2401 : // Look up the predecessor instruction.
2402 : const Instruction* predecessor_instr =
2403 3822272 : GetLastInstruction(code(), predecessor_block);
2404 : InstructionOperand* predecessor_hint = nullptr;
2405 : // Phis are assigned in the END position of the last instruction in each
2406 : // predecessor block.
2407 8666073 : for (MoveOperands* move :
2408 8666046 : *predecessor_instr->GetParallelMove(Instruction::END)) {
2409 : InstructionOperand& to = move->destination();
2410 9687594 : if (to.IsUnallocated() &&
2411 : UnallocatedOperand::cast(to).virtual_register() == phi_vreg) {
2412 3822233 : predecessor_hint = &move->source();
2413 3822233 : break;
2414 : }
2415 : }
2416 : DCHECK_NOT_NULL(predecessor_hint);
2417 :
2418 : // For each predecessor, generate a score according to the priorities
2419 : // described above, and pick the best one. Flags in higher-order bits have
2420 : // a higher priority than those in lower-order bits.
2421 : int predecessor_hint_preference = 0;
2422 : const int kNotDeferredBlockPreference = (1 << 2);
2423 : const int kMoveIsAllocatedPreference = (1 << 1);
2424 : const int kBlockIsEmptyPreference = (1 << 0);
2425 :
2426 : // - Avoid hints from deferred blocks.
2427 3822260 : if (!predecessor_block->IsDeferred()) {
2428 : predecessor_hint_preference |= kNotDeferredBlockPreference;
2429 : }
2430 :
2431 : // - Prefer hints from allocated (or explicit) operands.
2432 : //
2433 : // Already-allocated or explicit operands are typically assigned using
2434 : // the parallel moves on the last instruction. For example:
2435 : //
2436 : // gap (v101 = [x0|R|w32]) (v100 = v101)
2437 : // ArchJmp
2438 : // ...
2439 : // phi: v100 = v101 v102
2440 : //
2441 : // We have already found the END move, so look for a matching START move
2442 : // from an allocated (or explicit) operand.
2443 : //
2444 : // Note that we cannot simply look up data()->live_ranges()[vreg] here
2445 : // because the live ranges are still being built when this function is
2446 : // called.
2447 : // TODO(v8): Find a way to separate hinting from live range analysis in
2448 : // BuildLiveRanges so that we can use the O(1) live-range look-up.
2449 : auto moves = predecessor_instr->GetParallelMove(Instruction::START);
2450 3822260 : if (moves != nullptr) {
2451 807221 : for (MoveOperands* move : *moves) {
2452 : InstructionOperand& to = move->destination();
2453 370389 : if (predecessor_hint->Equals(to)) {
2454 303946 : if (move->source().IsAllocated() || move->source().IsExplicit()) {
2455 303946 : predecessor_hint_preference |= kMoveIsAllocatedPreference;
2456 : }
2457 : break;
2458 : }
2459 : }
2460 : }
2461 :
2462 : // - Prefer hints from empty blocks.
2463 3822260 : if (predecessor_block->last_instruction_index() ==
2464 : predecessor_block->first_instruction_index()) {
2465 1432185 : predecessor_hint_preference |= kBlockIsEmptyPreference;
2466 : }
2467 :
2468 7644520 : if ((hint == nullptr) ||
2469 3822260 : (predecessor_hint_preference > hint_preference)) {
2470 : // Take the hint from this predecessor.
2471 : hint = predecessor_hint;
2472 : hint_preference = predecessor_hint_preference;
2473 : }
2474 :
2475 3822260 : if (--predecessor_limit <= 0) break;
2476 : }
2477 : DCHECK_NOT_NULL(hint);
2478 :
2479 : LifetimePosition block_start = LifetimePosition::GapFromInstructionIndex(
2480 2098873 : block->first_instruction_index());
2481 2098881 : UsePosition* use_pos = Define(block_start, &phi->output(), hint,
2482 4197754 : UsePosition::HintTypeForOperand(*hint));
2483 : MapPhiHint(hint, use_pos);
2484 : }
2485 19440606 : }
2486 :
2487 487074 : void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block,
2488 2274846 : BitVector* live) {
2489 : DCHECK(block->IsLoopHeader());
2490 : // Add a live range stretching from the first loop instruction to the last
2491 : // for each value live on entry to the header.
2492 243537 : BitVector::Iterator iterator(live);
2493 : LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
2494 243537 : block->first_instruction_index());
2495 : LifetimePosition end = LifetimePosition::GapFromInstructionIndex(
2496 243537 : code()->LastLoopInstructionIndex(block))
2497 243537 : .NextFullStart();
2498 5280305 : while (!iterator.Done()) {
2499 2274846 : int operand_index = iterator.Current();
2500 2274846 : TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
2501 2274847 : range->EnsureInterval(start, end, allocation_zone());
2502 2274849 : iterator.Advance();
2503 : }
2504 : // Insert all values into the live in sets of all blocks in the loop.
2505 4047611 : for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt();
2506 : ++i) {
2507 11412222 : live_in_sets()[i]->Union(*live);
2508 : }
2509 243537 : }
2510 :
2511 87212231 : void LiveRangeBuilder::BuildLiveRanges() {
2512 : // Process the blocks in reverse order.
2513 23723880 : for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
2514 : --block_id) {
2515 : InstructionBlock* block =
2516 19440597 : code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
2517 19440584 : BitVector* live = ComputeLiveOut(block, data());
2518 : // Initially consider all live_out values live for the entire block. We
2519 : // will shorten these intervals if necessary.
2520 19440773 : AddInitialIntervals(block, live);
2521 : // Process the instructions in reverse order, generating and killing
2522 : // live values.
2523 19440090 : ProcessInstructions(block, live);
2524 : // All phi output operands are killed by this block.
2525 19440645 : ProcessPhis(block, live);
2526 : // Now live is live_in for this block except not including values live
2527 : // out on backward successor edges.
2528 19440625 : if (block->IsLoopHeader()) ProcessLoopHeader(block, live);
2529 58322340 : live_in_sets()[block_id] = live;
2530 : }
2531 : // Postprocess the ranges.
2532 2141733 : const size_t live_ranges_size = data()->live_ranges().size();
2533 136359936 : for (TopLevelLiveRange* range : data()->live_ranges()) {
2534 161428560 : CHECK_EQ(live_ranges_size,
2535 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
2536 80714039 : if (range == nullptr) continue;
2537 : // Give slots to all ranges with a non fixed slot use.
2538 39879018 : if (range->has_slot_use() && range->HasNoSpillType()) {
2539 1226604 : data()->AssignSpillRangeToLiveRange(range);
2540 : }
2541 : // TODO(bmeurer): This is a horrible hack to make sure that for constant
2542 : // live ranges, every use requires the constant to be in a register.
2543 : // Without this hack, all uses with "any" policy would get the constant
2544 : // operand assigned.
2545 51362431 : if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
2546 31713142 : for (UsePosition* pos = range->first_pos(); pos != nullptr;
2547 : pos = pos->next()) {
2548 19185165 : if (pos->type() == UsePositionType::kRequiresSlot ||
2549 : pos->type() == UsePositionType::kRegisterOrSlotOrConstant) {
2550 : continue;
2551 : }
2552 : UsePositionType new_type = UsePositionType::kRegisterOrSlot;
2553 : // Can't mark phis as needing a register.
2554 19185180 : if (!pos->pos().IsGapPosition()) {
2555 : new_type = UsePositionType::kRequiresRegister;
2556 : }
2557 : pos->set_type(new_type, true);
2558 : }
2559 : }
2560 : }
2561 4356168 : for (auto preassigned : data()->preassigned_slot_ranges()) {
2562 0 : TopLevelLiveRange* range = preassigned.first;
2563 : int slot_id = preassigned.second;
2564 : SpillRange* spill = range->HasSpillRange()
2565 : ? range->GetSpillRange()
2566 146228 : : data()->AssignSpillRangeToLiveRange(range);
2567 : spill->set_assigned_slot(slot_id);
2568 : }
2569 : #ifdef DEBUG
2570 : Verify();
2571 : #endif
2572 2141500 : }
2573 :
2574 0 : void LiveRangeBuilder::MapPhiHint(InstructionOperand* operand,
2575 : UsePosition* use_pos) {
2576 : DCHECK(!use_pos->IsResolved());
2577 4197764 : auto res = phi_hints_.insert(std::make_pair(operand, use_pos));
2578 : DCHECK(res.second);
2579 : USE(res);
2580 0 : }
2581 :
2582 5080665 : void LiveRangeBuilder::ResolvePhiHint(InstructionOperand* operand,
2583 : UsePosition* use_pos) {
2584 : auto it = phi_hints_.find(operand);
2585 10161322 : if (it == phi_hints_.end()) return;
2586 : DCHECK(!it->second->IsResolved());
2587 2098888 : it->second->ResolveHint(use_pos);
2588 : }
2589 :
2590 0 : void LiveRangeBuilder::Verify() const {
2591 0 : for (auto& hint : phi_hints_) {
2592 0 : CHECK(hint.second->IsResolved());
2593 : }
2594 0 : for (const TopLevelLiveRange* current : data()->live_ranges()) {
2595 0 : if (current != nullptr && !current->IsEmpty()) {
2596 : // New LiveRanges should not be split.
2597 0 : CHECK_NULL(current->next());
2598 : // General integrity check.
2599 0 : current->Verify();
2600 0 : const UseInterval* first = current->first_interval();
2601 0 : if (first->next() == nullptr) continue;
2602 :
2603 : // Consecutive intervals should not end and start in the same block,
2604 : // otherwise the intervals should have been joined, because the
2605 : // variable is live throughout that block.
2606 0 : CHECK(NextIntervalStartsInDifferentBlocks(first));
2607 :
2608 0 : for (const UseInterval* i = first->next(); i != nullptr; i = i->next()) {
2609 : // Except for the first interval, the other intevals must start at
2610 : // a block boundary, otherwise data wouldn't flow to them.
2611 0 : CHECK(IntervalStartsAtBlockBoundary(i));
2612 : // The last instruction of the predecessors of the block the interval
2613 : // starts must be covered by the range.
2614 0 : CHECK(IntervalPredecessorsCoveredByRange(i, current));
2615 0 : if (i->next() != nullptr) {
2616 : // Check the consecutive intervals property, except for the last
2617 : // interval, where it doesn't apply.
2618 0 : CHECK(NextIntervalStartsInDifferentBlocks(i));
2619 : }
2620 : }
2621 : }
2622 : }
2623 0 : }
2624 :
2625 0 : bool LiveRangeBuilder::IntervalStartsAtBlockBoundary(
2626 0 : const UseInterval* interval) const {
2627 : LifetimePosition start = interval->start();
2628 0 : if (!start.IsFullStart()) return false;
2629 : int instruction_index = start.ToInstructionIndex();
2630 0 : const InstructionBlock* block =
2631 0 : data()->code()->GetInstructionBlock(instruction_index);
2632 0 : return block->first_instruction_index() == instruction_index;
2633 : }
2634 :
2635 0 : bool LiveRangeBuilder::IntervalPredecessorsCoveredByRange(
2636 0 : const UseInterval* interval, const TopLevelLiveRange* range) const {
2637 : LifetimePosition start = interval->start();
2638 : int instruction_index = start.ToInstructionIndex();
2639 : const InstructionBlock* block =
2640 0 : data()->code()->GetInstructionBlock(instruction_index);
2641 0 : for (RpoNumber pred_index : block->predecessors()) {
2642 0 : const InstructionBlock* predecessor =
2643 0 : data()->code()->InstructionBlockAt(pred_index);
2644 : LifetimePosition last_pos = LifetimePosition::GapFromInstructionIndex(
2645 : predecessor->last_instruction_index());
2646 : last_pos = last_pos.NextStart().End();
2647 0 : if (!range->Covers(last_pos)) return false;
2648 : }
2649 0 : return true;
2650 : }
2651 :
2652 0 : bool LiveRangeBuilder::NextIntervalStartsInDifferentBlocks(
2653 0 : const UseInterval* interval) const {
2654 : DCHECK_NOT_NULL(interval->next());
2655 : LifetimePosition end = interval->end();
2656 : LifetimePosition next_start = interval->next()->start();
2657 : // Since end is not covered, but the previous position is, move back a
2658 : // position
2659 0 : end = end.IsStart() ? end.PrevStart().End() : end.Start();
2660 : int last_covered_index = end.ToInstructionIndex();
2661 : const InstructionBlock* block =
2662 0 : data()->code()->GetInstructionBlock(last_covered_index);
2663 : const InstructionBlock* next_block =
2664 0 : data()->code()->GetInstructionBlock(next_start.ToInstructionIndex());
2665 0 : return block->rpo_number() < next_block->rpo_number();
2666 : }
2667 :
2668 32493143 : void BundleBuilder::BuildBundles() {
2669 2141416 : TRACE("Build bundles\n");
2670 : // Process the blocks in reverse order.
2671 23724074 : for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
2672 : --block_id) {
2673 : InstructionBlock* block =
2674 19441198 : code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
2675 19441369 : TRACE("Block B%d\n", block_id);
2676 40981345 : for (auto phi : block->phis()) {
2677 2098871 : LiveRange* out_range =
2678 2098876 : data()->GetOrCreateLiveRangeFor(phi->virtual_register());
2679 0 : LiveRangeBundle* out = out_range->get_bundle();
2680 2098871 : if (out == nullptr) {
2681 1589656 : out = new (data()->allocation_zone())
2682 1589656 : LiveRangeBundle(data()->allocation_zone(), next_bundle_id_++);
2683 1589657 : out->TryAddRange(out_range);
2684 : }
2685 2098861 : TRACE("Processing phi for v%d with %d:%d\n", phi->virtual_register(),
2686 : out_range->TopLevel()->vreg(), out_range->relative_id());
2687 9278387 : for (auto input : phi->operands()) {
2688 10161293 : LiveRange* input_range = data()->GetOrCreateLiveRangeFor(input);
2689 5080643 : TRACE("Input value v%d with range %d:%d\n", input,
2690 : input_range->TopLevel()->vreg(), input_range->relative_id());
2691 : LiveRangeBundle* input_bundle = input_range->get_bundle();
2692 5080646 : if (input_bundle != nullptr) {
2693 644439 : TRACE("Merge\n");
2694 644439 : if (out->TryMerge(input_bundle))
2695 378914 : TRACE("Merged %d and %d to %d\n", phi->virtual_register(), input,
2696 : out->id());
2697 : } else {
2698 4436207 : TRACE("Add\n");
2699 4436207 : if (out->TryAddRange(input_range))
2700 4002892 : TRACE("Added %d and %d to %d\n", phi->virtual_register(), input,
2701 : out->id());
2702 : }
2703 : }
2704 : }
2705 19441236 : TRACE("Done block B%d\n", block_id);
2706 : }
2707 2141526 : }
2708 :
2709 6025847 : bool LiveRangeBundle::TryAddRange(LiveRange* range) {
2710 : DCHECK_NULL(range->get_bundle());
2711 : // We may only add a new live range if its use intervals do not
2712 : // overlap with existing intervals in the bundle.
2713 11618374 : if (UsesOverlap(range->first_interval())) return false;
2714 : ranges_.insert(range);
2715 5592527 : range->set_bundle(this);
2716 5592527 : InsertUses(range->first_interval());
2717 5592543 : return true;
2718 : }
2719 644439 : bool LiveRangeBundle::TryMerge(LiveRangeBundle* other) {
2720 644439 : if (other == this) return true;
2721 :
2722 : auto iter1 = uses_.begin();
2723 : auto iter2 = other->uses_.begin();
2724 :
2725 2610946 : while (iter1 != uses_.end() && iter2 != other->uses_.end()) {
2726 1136757 : if (iter1->start > iter2->end) {
2727 : ++iter2;
2728 455831 : } else if (iter2->start > iter1->end) {
2729 : ++iter1;
2730 : } else {
2731 265523 : TRACE("No merge %d:%d %d:%d\n", iter1->start, iter1->end, iter2->start,
2732 : iter2->end);
2733 : return false;
2734 : }
2735 : }
2736 : // Uses are disjoint, merging is possible.
2737 195798 : for (auto it = other->ranges_.begin(); it != other->ranges_.end(); ++it) {
2738 141952 : (*it)->set_bundle(this);
2739 141952 : InsertUses((*it)->first_interval());
2740 : }
2741 : ranges_.insert(other->ranges_.begin(), other->ranges_.end());
2742 : other->ranges_.clear();
2743 :
2744 26923 : return true;
2745 : }
2746 :
2747 5592536 : void LiveRangeBundle::MergeSpillRanges() {
2748 : SpillRange* target = nullptr;
2749 56360008 : for (auto range : ranges_) {
2750 45174924 : if (range->TopLevel()->HasSpillRange()) {
2751 3652530 : SpillRange* current = range->TopLevel()->GetSpillRange();
2752 3652530 : if (target == nullptr) {
2753 : target = current;
2754 1499591 : } else if (target != current) {
2755 99468 : target->TryMerge(current);
2756 : }
2757 : }
2758 : }
2759 5592548 : }
2760 :
2761 9325216 : RegisterAllocator::RegisterAllocator(RegisterAllocationData* data,
2762 : RegisterKind kind)
2763 : : data_(data),
2764 : mode_(kind),
2765 : num_registers_(GetRegisterCount(data->config(), kind)),
2766 : num_allocatable_registers_(
2767 : GetAllocatableRegisterCount(data->config(), kind)),
2768 : allocatable_register_codes_(
2769 : GetAllocatableRegisterCodes(data->config(), kind)),
2770 9325216 : check_fp_aliasing_(false) {
2771 : if (!kSimpleFPAliasing && kind == FP_REGISTERS) {
2772 : check_fp_aliasing_ = (data->code()->representation_mask() &
2773 : (kFloat32Bit | kSimd128Bit)) != 0;
2774 : }
2775 2331304 : }
2776 :
2777 0 : LifetimePosition RegisterAllocator::GetSplitPositionForInstruction(
2778 12169724 : const LiveRange* range, int instruction_index) {
2779 : LifetimePosition ret = LifetimePosition::Invalid();
2780 :
2781 : ret = LifetimePosition::GapFromInstructionIndex(instruction_index);
2782 24340240 : if (range->Start() >= ret || ret >= range->End()) {
2783 : return LifetimePosition::Invalid();
2784 : }
2785 0 : return ret;
2786 : }
2787 :
2788 120329495 : void RegisterAllocator::SplitAndSpillRangesDefinedByMemoryOperand() {
2789 2331820 : size_t initial_range_count = data()->live_ranges().size();
2790 120329295 : for (size_t i = 0; i < initial_range_count; ++i) {
2791 235995350 : CHECK_EQ(initial_range_count,
2792 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
2793 135632674 : TopLevelLiveRange* range = data()->live_ranges()[i];
2794 117997397 : if (!CanProcessRange(range)) continue;
2795 82560148 : if (range->HasNoSpillType() ||
2796 2085635 : (range->HasSpillRange() && !range->has_slot_use())) {
2797 : continue;
2798 : }
2799 0 : LifetimePosition start = range->Start();
2800 17635269 : TRACE("Live range %d:%d is defined by a spill operand.\n",
2801 : range->TopLevel()->vreg(), range->relative_id());
2802 : LifetimePosition next_pos = start;
2803 17635277 : if (next_pos.IsGapPosition()) {
2804 : next_pos = next_pos.NextStart();
2805 : }
2806 :
2807 : // With splinters, we can be more strict and skip over positions
2808 : // not strictly needing registers.
2809 : UsePosition* pos =
2810 : range->IsSplinter()
2811 3064720 : ? range->NextRegisterPosition(next_pos)
2812 20699997 : : range->NextUsePositionRegisterIsBeneficial(next_pos);
2813 : // If the range already has a spill operand and it doesn't need a
2814 : // register immediately, split it and spill the first part of the range.
2815 17635281 : if (pos == nullptr) {
2816 5193233 : Spill(range);
2817 12442048 : } else if (pos->pos() > range->Start().NextStart()) {
2818 : // Do not spill live range eagerly if use position that can benefit from
2819 : // the register is too close to the start of live range.
2820 : LifetimePosition split_pos = GetSplitPositionForInstruction(
2821 : range, pos->pos().ToInstructionIndex());
2822 : // There is no place to split, so we can't split and spill.
2823 12170516 : if (!split_pos.IsValid()) continue;
2824 :
2825 : split_pos =
2826 12169722 : FindOptimalSplitPos(range->Start().NextFullStart(), split_pos);
2827 :
2828 12169689 : SplitRangeAt(range, split_pos);
2829 12169664 : Spill(range);
2830 : }
2831 : }
2832 2331620 : }
2833 :
2834 25099630 : LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
2835 : LifetimePosition pos) {
2836 : DCHECK(!range->TopLevel()->IsFixed());
2837 25099630 : TRACE("Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(),
2838 : range->relative_id(), pos.value());
2839 :
2840 25099646 : if (pos <= range->Start()) return range;
2841 :
2842 : // We can't properly connect liveranges if splitting occurred at the end
2843 : // a block.
2844 : DCHECK(pos.IsStart() || pos.IsGapPosition() ||
2845 : (GetInstructionBlock(code(), pos)->last_instruction_index() !=
2846 : pos.ToInstructionIndex()));
2847 :
2848 22643379 : LiveRange* result = range->SplitAt(pos, allocation_zone());
2849 22643326 : return result;
2850 : }
2851 :
2852 3538216 : LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
2853 : LifetimePosition start,
2854 : LifetimePosition end) {
2855 : DCHECK(!range->TopLevel()->IsFixed());
2856 3538216 : TRACE("Splitting live range %d:%d in position between [%d, %d]\n",
2857 : range->TopLevel()->vreg(), range->relative_id(), start.value(),
2858 : end.value());
2859 :
2860 3538216 : LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2861 : DCHECK(split_pos >= start);
2862 3538219 : return SplitRangeAt(range, split_pos);
2863 : }
2864 :
2865 15707892 : LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
2866 : LifetimePosition end) {
2867 : int start_instr = start.ToInstructionIndex();
2868 : int end_instr = end.ToInstructionIndex();
2869 : DCHECK_LE(start_instr, end_instr);
2870 :
2871 : // We have no choice
2872 15707892 : if (start_instr == end_instr) return end;
2873 :
2874 : const InstructionBlock* start_block = GetInstructionBlock(code(), start);
2875 : const InstructionBlock* end_block = GetInstructionBlock(code(), end);
2876 :
2877 9592489 : if (end_block == start_block) {
2878 : // The interval is split in the same basic block. Split at the latest
2879 : // possible position.
2880 6774008 : return end;
2881 : }
2882 :
2883 178976 : const InstructionBlock* block = end_block;
2884 : // Find header of outermost loop.
2885 : do {
2886 : const InstructionBlock* loop = GetContainingLoop(code(), block);
2887 2935058 : if (loop == nullptr ||
2888 : loop->rpo_number().ToInt() <= start_block->rpo_number().ToInt()) {
2889 : // No more loops or loop starts before the lifetime start.
2890 : break;
2891 : }
2892 : block = loop;
2893 : } while (true);
2894 :
2895 : // We did not find any suitable outer loop. Split at the latest possible
2896 : // position unless end_block is a loop header itself.
2897 5523990 : if (block == end_block && !end_block->IsLoopHeader()) return end;
2898 :
2899 : return LifetimePosition::GapFromInstructionIndex(
2900 : block->first_instruction_index());
2901 : }
2902 :
2903 1273664 : LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
2904 : LiveRange* range, LifetimePosition pos) {
2905 : const InstructionBlock* block = GetInstructionBlock(code(), pos.Start());
2906 91160 : const InstructionBlock* loop_header =
2907 1273664 : block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
2908 :
2909 1273664 : if (loop_header == nullptr) return pos;
2910 :
2911 : const UsePosition* prev_use =
2912 : range->PreviousUsePositionRegisterIsBeneficial(pos);
2913 :
2914 153963 : while (loop_header != nullptr) {
2915 : // We are going to spill live range inside the loop.
2916 : // If possible try to move spilling position backwards to loop header.
2917 : // This will reduce number of memory moves on the back edge.
2918 : LifetimePosition loop_start = LifetimePosition::GapFromInstructionIndex(
2919 : loop_header->first_instruction_index());
2920 :
2921 91160 : if (range->Covers(loop_start)) {
2922 69675 : if (prev_use == nullptr || prev_use->pos() < loop_start) {
2923 : // No register beneficial use inside the loop before the pos.
2924 : pos = loop_start;
2925 : }
2926 : }
2927 :
2928 : // Try hoisting out to an outer loop.
2929 : loop_header = GetContainingLoop(code(), loop_header);
2930 : }
2931 :
2932 62803 : return pos;
2933 : }
2934 :
2935 29179110 : void RegisterAllocator::Spill(LiveRange* range) {
2936 : DCHECK(!range->spilled());
2937 0 : TopLevelLiveRange* first = range->TopLevel();
2938 24669605 : TRACE("Spilling live range %d:%d\n", first->vreg(), range->relative_id());
2939 :
2940 24669682 : if (first->HasNoSpillType()) {
2941 4509505 : data()->AssignSpillRangeToLiveRange(first);
2942 : }
2943 : range->Spill();
2944 24669682 : }
2945 :
2946 0 : const char* RegisterAllocator::RegisterName(int register_code) const {
2947 : return mode() == GENERAL_REGISTERS
2948 : ? i::RegisterName(Register::from_code(register_code))
2949 0 : : i::RegisterName(DoubleRegister::from_code(register_code));
2950 : }
2951 :
2952 2331271 : LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
2953 : RegisterKind kind, Zone* local_zone)
2954 : : RegisterAllocator(data, kind),
2955 : unhandled_live_ranges_(local_zone),
2956 : active_live_ranges_(local_zone),
2957 : inactive_live_ranges_(local_zone),
2958 : next_active_ranges_change_(LifetimePosition::Invalid()),
2959 4662560 : next_inactive_ranges_change_(LifetimePosition::Invalid()) {
2960 2331289 : active_live_ranges().reserve(8);
2961 2331662 : inactive_live_ranges().reserve(8);
2962 2331657 : }
2963 :
2964 0 : void LinearScanAllocator::MaybeUndoPreviousSplit(LiveRange* range) {
2965 0 : if (range->next() != nullptr && range->next()->ShouldRecombine()) {
2966 0 : LiveRange* to_remove = range->next();
2967 0 : TRACE("Recombining %d:%d with %d\n", range->TopLevel()->vreg(),
2968 : range->relative_id(), to_remove->relative_id());
2969 :
2970 : // Remove the range from unhandled, as attaching it will change its
2971 : // state and hence ordering in the unhandled set.
2972 : auto removed_cnt = unhandled_live_ranges().erase(to_remove);
2973 : DCHECK_EQ(removed_cnt, 1);
2974 : USE(removed_cnt);
2975 :
2976 0 : range->AttachToNext();
2977 0 : } else if (range->next() != nullptr) {
2978 0 : TRACE("No recombine for %d:%d to %d\n", range->TopLevel()->vreg(),
2979 : range->relative_id(), range->next()->relative_id());
2980 : }
2981 0 : }
2982 :
2983 0 : void LinearScanAllocator::SpillNotLiveRanges(RangeWithRegisterSet& to_be_live,
2984 : LifetimePosition position) {
2985 0 : for (auto it = active_live_ranges().begin();
2986 : it != active_live_ranges().end();) {
2987 0 : LiveRange* active_range = *it;
2988 0 : TopLevelLiveRange* toplevel = (*it)->TopLevel();
2989 0 : auto found = to_be_live.find({toplevel, kUnassignedRegister});
2990 0 : if (found == to_be_live.end()) {
2991 : // Is not contained in {to_be_live}, spill it.
2992 : // Fixed registers are exempt from this. They might have been
2993 : // added from inactive at the block boundary but we know that
2994 : // they cannot conflict as they are built before register
2995 : // allocation starts. It would be algorithmically fine to split
2996 : // them and reschedule but the code does not allow to do this.
2997 0 : if (toplevel->IsFixed()) {
2998 0 : TRACE("Keeping reactivated fixed range for %s\n",
2999 : RegisterName(toplevel->assigned_register()));
3000 : ++it;
3001 : } else {
3002 : // When spilling a previously spilled/reloaded range, we add back the
3003 : // tail that we might have split off when we reloaded/spilled it
3004 : // previously. Otherwise we might keep generating small split-offs.
3005 0 : MaybeUndoPreviousSplit(active_range);
3006 0 : TRACE("Putting back %d:%d\n", toplevel->vreg(),
3007 : active_range->relative_id());
3008 0 : LiveRange* split = SplitRangeAt(active_range, position);
3009 : DCHECK_NE(split, active_range);
3010 :
3011 : // Make sure we revisit this range once it has a use that requires
3012 : // a register.
3013 0 : UsePosition* next_use = split->NextRegisterPosition(position);
3014 0 : if (next_use != nullptr) {
3015 : // Move to the start of the gap before use so that we have a space
3016 : // to perform the potential reload. Otherwise, do not spill but add
3017 : // to unhandled for reallocation.
3018 : LifetimePosition revisit_at = next_use->pos().FullStart();
3019 0 : TRACE("Next use at %d\n", revisit_at.value());
3020 0 : if (!data()->IsBlockBoundary(revisit_at)) {
3021 : // Leave some space so we have enough gap room.
3022 : revisit_at = revisit_at.PrevStart().FullStart();
3023 : }
3024 : // If this range became life right at the block boundary that we are
3025 : // currently processing, we do not need to split it. Instead move it
3026 : // to unhandled right away.
3027 0 : if (position < revisit_at) {
3028 0 : LiveRange* third_part = SplitRangeAt(split, revisit_at);
3029 : DCHECK_NE(split, third_part);
3030 0 : Spill(split);
3031 0 : TRACE("Marking %d:%d to recombine\n", toplevel->vreg(),
3032 : third_part->relative_id());
3033 : third_part->SetRecombine();
3034 0 : AddToUnhandled(third_part);
3035 : } else {
3036 0 : AddToUnhandled(split);
3037 : }
3038 : } else {
3039 0 : Spill(split);
3040 : }
3041 0 : it = ActiveToHandled(it);
3042 : }
3043 : } else {
3044 : // This range is contained in {to_be_live}, so we can keep it.
3045 0 : int expected_register = (*found).expected_register;
3046 : to_be_live.erase(found);
3047 0 : if (expected_register == active_range->assigned_register()) {
3048 : // Was life and in correct register, simply pass through.
3049 0 : TRACE("Keeping %d:%d in %s\n", toplevel->vreg(),
3050 : active_range->relative_id(),
3051 : RegisterName(active_range->assigned_register()));
3052 : ++it;
3053 : } else {
3054 : // Was life but wrong register. Split and schedule for
3055 : // allocation.
3056 0 : TRACE("Scheduling %d:%d\n", toplevel->vreg(),
3057 : active_range->relative_id());
3058 0 : LiveRange* split = SplitRangeAt(active_range, position);
3059 0 : AddToUnhandled(split);
3060 0 : it = ActiveToHandled(it);
3061 : }
3062 : }
3063 : }
3064 0 : }
3065 :
3066 0 : LiveRange* LinearScanAllocator::AssignRegisterOnReload(LiveRange* range,
3067 : int reg) {
3068 : // We know the register is currently free but it might be in
3069 : // use by a currently inactive range. So we might not be able
3070 : // to reload for the full distance. In such case, split here.
3071 : // TODO(herhut):
3072 : // It might be better if we could use the normal unhandled queue and
3073 : // give reloading registers pecedence. That way we would compute the
3074 : // intersection for the entire future.
3075 : LifetimePosition new_end = range->End();
3076 0 : for (const auto inactive : inactive_live_ranges()) {
3077 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3078 0 : if (inactive->assigned_register() != reg) continue;
3079 : } else {
3080 : bool conflict = inactive->assigned_register() == reg;
3081 : if (!conflict) {
3082 : int alias_base_index = -1;
3083 : int aliases = data()->config()->GetAliases(range->representation(), reg,
3084 : inactive->representation(),
3085 : &alias_base_index);
3086 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3087 : while (aliases-- && !conflict) {
3088 : int aliased_reg = alias_base_index + aliases;
3089 : if (aliased_reg == reg) {
3090 : conflict = true;
3091 : }
3092 : }
3093 : }
3094 : if (!conflict) continue;
3095 : }
3096 0 : for (auto interval = inactive->first_interval(); interval != nullptr;
3097 : interval = interval->next()) {
3098 0 : if (interval->start() > new_end) break;
3099 0 : if (interval->end() <= range->Start()) continue;
3100 0 : if (new_end > interval->start()) new_end = interval->start();
3101 : }
3102 : }
3103 0 : if (new_end != range->End()) {
3104 0 : TRACE("Found new end for %d:%d at %d\n", range->TopLevel()->vreg(),
3105 : range->relative_id(), new_end.value());
3106 0 : LiveRange* tail = SplitRangeAt(range, new_end);
3107 0 : AddToUnhandled(tail);
3108 : }
3109 0 : SetLiveRangeAssignedRegister(range, reg);
3110 0 : return range;
3111 : }
3112 :
3113 0 : void LinearScanAllocator::ReloadLiveRanges(RangeWithRegisterSet& to_be_live,
3114 : LifetimePosition position) {
3115 : // Assumption: All ranges in {to_be_live} are currently spilled and there are
3116 : // no conflicting registers in the active ranges.
3117 : // The former is ensured by SpillNotLiveRanges, the latter is by construction
3118 : // of the to_be_live set.
3119 0 : for (RangeWithRegister range_with_register : to_be_live) {
3120 0 : TopLevelLiveRange* range = range_with_register.range;
3121 : int reg = range_with_register.expected_register;
3122 0 : LiveRange* to_resurrect = range->GetChildCovers(position);
3123 0 : if (to_resurrect == nullptr) {
3124 : // While the range was life until the end of the predecessor block, it is
3125 : // not live in this block. Either there is a lifetime gap or the range
3126 : // died.
3127 0 : TRACE("No candidate for %d at %d\n", range->vreg(), position.value());
3128 : } else {
3129 : // We might be resurrecting a range that we spilled until its next use
3130 : // before. In such cases, we have to unsplit it before processing as
3131 : // otherwise we might get register changes from one range to the other
3132 : // in the middle of blocks.
3133 : // If there is a gap between this range and the next, we can just keep
3134 : // it as a register change won't hurt.
3135 0 : MaybeUndoPreviousSplit(to_resurrect);
3136 0 : if (to_resurrect->Start() == position) {
3137 : // This range already starts at this block. It might have been spilled,
3138 : // so we have to unspill it. Otherwise, it is already in the unhandled
3139 : // queue waiting for processing.
3140 : DCHECK(!to_resurrect->HasRegisterAssigned());
3141 0 : TRACE("Reload %d:%d starting at %d itself\n", range->vreg(),
3142 : to_resurrect->relative_id(), position.value());
3143 0 : if (to_resurrect->spilled()) {
3144 : to_resurrect->Unspill();
3145 0 : AddToUnhandled(to_resurrect);
3146 : } else {
3147 : // Assign the preassigned register if we know. Otherwise, nothing to
3148 : // do as already in unhandeled.
3149 0 : if (reg != kUnassignedRegister) {
3150 : auto erased_cnt = unhandled_live_ranges().erase(to_resurrect);
3151 : DCHECK_EQ(erased_cnt, 1);
3152 : USE(erased_cnt);
3153 : // We know that there is no conflict with active ranges, so just
3154 : // assign the register to the range.
3155 0 : to_resurrect = AssignRegisterOnReload(to_resurrect, reg);
3156 0 : AddToActive(to_resurrect);
3157 : }
3158 : }
3159 : } else {
3160 : // This range was spilled before. We have to split it and schedule the
3161 : // second part for allocation (or assign the register if we know).
3162 : DCHECK(to_resurrect->spilled());
3163 0 : LiveRange* split = SplitRangeAt(to_resurrect, position);
3164 0 : TRACE("Reload %d:%d starting at %d as %d\n", range->vreg(),
3165 : to_resurrect->relative_id(), split->Start().value(),
3166 : split->relative_id());
3167 : DCHECK_NE(split, to_resurrect);
3168 0 : if (reg != kUnassignedRegister) {
3169 : // We know that there is no conflict with active ranges, so just
3170 : // assign the register to the range.
3171 0 : split = AssignRegisterOnReload(split, reg);
3172 0 : AddToActive(split);
3173 : } else {
3174 : // Let normal register assignment find a suitable register.
3175 0 : AddToUnhandled(split);
3176 : }
3177 : }
3178 : }
3179 : }
3180 0 : }
3181 :
3182 0 : RpoNumber LinearScanAllocator::ChooseOneOfTwoPredecessorStates(
3183 : InstructionBlock* current_block, LifetimePosition boundary) {
3184 : using SmallRangeVector =
3185 : base::SmallVector<TopLevelLiveRange*,
3186 : RegisterConfiguration::kMaxRegisters>;
3187 : // Pick the state that would generate the least spill/reloads.
3188 : // Compute vectors of ranges with imminent use for both sides.
3189 : // As GetChildCovers is cached, it is cheaper to repeatedly
3190 : // call is rather than compute a shared set first.
3191 0 : auto& left = data()->GetSpillState(current_block->predecessors()[0]);
3192 : auto& right = data()->GetSpillState(current_block->predecessors()[1]);
3193 : SmallRangeVector left_used;
3194 0 : for (const auto item : left) {
3195 0 : LiveRange* at_next_block = item->TopLevel()->GetChildCovers(boundary);
3196 0 : if (at_next_block != nullptr &&
3197 0 : at_next_block->NextUsePositionRegisterIsBeneficial(boundary) !=
3198 : nullptr) {
3199 0 : left_used.emplace_back(item->TopLevel());
3200 : }
3201 : }
3202 : SmallRangeVector right_used;
3203 0 : for (const auto item : right) {
3204 0 : LiveRange* at_next_block = item->TopLevel()->GetChildCovers(boundary);
3205 0 : if (at_next_block != nullptr &&
3206 0 : at_next_block->NextUsePositionRegisterIsBeneficial(boundary) !=
3207 : nullptr) {
3208 0 : right_used.emplace_back(item->TopLevel());
3209 : }
3210 : }
3211 0 : if (left_used.empty() && right_used.empty()) {
3212 : // There are no beneficial register uses. Look at any use at
3213 : // all. We do not account for all uses, like flowing into a phi.
3214 : // So we just look at ranges still being live.
3215 0 : TRACE("Looking at only uses\n");
3216 0 : for (const auto item : left) {
3217 0 : LiveRange* at_next_block = item->TopLevel()->GetChildCovers(boundary);
3218 0 : if (at_next_block != nullptr &&
3219 0 : at_next_block->NextUsePosition(boundary) != nullptr) {
3220 0 : left_used.emplace_back(item->TopLevel());
3221 : }
3222 : }
3223 0 : for (const auto item : right) {
3224 0 : LiveRange* at_next_block = item->TopLevel()->GetChildCovers(boundary);
3225 0 : if (at_next_block != nullptr &&
3226 0 : at_next_block->NextUsePosition(boundary) != nullptr) {
3227 0 : right_used.emplace_back(item->TopLevel());
3228 : }
3229 : }
3230 : }
3231 : // Now left_used and right_used contains those ranges that matter.
3232 : // Count which side matches this most.
3233 0 : TRACE("Vote went %zu vs %zu\n", left_used.size(), right_used.size());
3234 0 : return left_used.size() > right_used.size()
3235 0 : ? current_block->predecessors()[0]
3236 0 : : current_block->predecessors()[1];
3237 : }
3238 :
3239 0 : void LinearScanAllocator::ComputeStateFromManyPredecessors(
3240 : InstructionBlock* current_block, RangeWithRegisterSet* to_be_live) {
3241 : struct Vote {
3242 : size_t count;
3243 : int used_registers[RegisterConfiguration::kMaxRegisters];
3244 : };
3245 0 : ZoneMap<TopLevelLiveRange*, Vote> counts(data()->allocation_zone());
3246 : int deferred_blocks = 0;
3247 0 : for (RpoNumber pred : current_block->predecessors()) {
3248 0 : if (!ConsiderBlockForControlFlow(current_block, pred)) {
3249 : // Back edges of a loop count as deferred here too.
3250 0 : deferred_blocks++;
3251 0 : continue;
3252 : }
3253 : const auto& pred_state = data()->GetSpillState(pred);
3254 0 : for (LiveRange* range : pred_state) {
3255 : // We might have spilled the register backwards, so the range we
3256 : // stored might have lost its register. Ignore those.
3257 0 : if (!range->HasRegisterAssigned()) continue;
3258 0 : TopLevelLiveRange* toplevel = range->TopLevel();
3259 : auto previous = counts.find(toplevel);
3260 0 : if (previous == counts.end()) {
3261 0 : auto result = counts.emplace(std::make_pair(toplevel, Vote{1, {0}}));
3262 0 : CHECK(result.second);
3263 0 : result.first->second.used_registers[range->assigned_register()]++;
3264 : } else {
3265 0 : previous->second.count++;
3266 0 : previous->second.used_registers[range->assigned_register()]++;
3267 : }
3268 : }
3269 : }
3270 :
3271 : // Choose the live ranges from the majority.
3272 : const size_t majority =
3273 0 : (current_block->PredecessorCount() + 2 - deferred_blocks) / 2;
3274 0 : bool taken_registers[RegisterConfiguration::kMaxRegisters] = {0};
3275 : auto assign_to_live = [this, counts, majority](
3276 : std::function<bool(TopLevelLiveRange*)> filter,
3277 : RangeWithRegisterSet* to_be_live,
3278 0 : bool* taken_registers) {
3279 0 : for (const auto& val : counts) {
3280 0 : if (!filter(val.first)) continue;
3281 0 : if (val.second.count >= majority) {
3282 : int register_max = 0;
3283 0 : int reg = kUnassignedRegister;
3284 0 : for (int idx = 0; idx < RegisterConfiguration::kMaxRegisters; idx++) {
3285 0 : int uses = val.second.used_registers[idx];
3286 0 : if (uses > register_max) {
3287 0 : reg = idx;
3288 : register_max = val.second.used_registers[idx];
3289 0 : } else if (taken_registers[reg] && uses == register_max) {
3290 0 : reg = idx;
3291 : }
3292 : }
3293 0 : if (taken_registers[reg]) {
3294 0 : reg = kUnassignedRegister;
3295 : } else {
3296 0 : taken_registers[reg] = true;
3297 : }
3298 0 : to_be_live->emplace(val.first, reg);
3299 0 : TRACE("Reset %d as live due vote %zu in %s\n",
3300 : val.first->TopLevel()->vreg(), val.second.count,
3301 : reg == kUnassignedRegister ? "unassigned" : RegisterName(reg));
3302 : }
3303 : }
3304 0 : };
3305 : // First round, process fixed registers, as these have precedence.
3306 : // There is only one fixed range per register, so we cannot have
3307 : // conflicts.
3308 0 : assign_to_live([](TopLevelLiveRange* r) { return r->IsFixed(); }, to_be_live,
3309 0 : taken_registers);
3310 : // Second round, process the rest.
3311 0 : assign_to_live([](TopLevelLiveRange* r) { return !r->IsFixed(); }, to_be_live,
3312 0 : taken_registers);
3313 0 : }
3314 :
3315 0 : bool LinearScanAllocator::ConsiderBlockForControlFlow(
3316 0 : InstructionBlock* current_block, RpoNumber predecessor) {
3317 : // We ignore predecessors on back edges when looking for control flow effects,
3318 : // as those lie in the future of allocation and we have no data yet. Also,
3319 : // deferred bocks are ignored on deferred to non-deferred boundaries, as we do
3320 : // not want them to influence allocation of non deferred code.
3321 0 : return (predecessor < current_block->rpo_number()) &&
3322 0 : (current_block->IsDeferred() ||
3323 0 : !code()->InstructionBlockAt(predecessor)->IsDeferred());
3324 : }
3325 :
3326 0 : bool LinearScanAllocator::BlockOrImmediatePredecessorIsDeferred(
3327 0 : const InstructionBlock* block) {
3328 0 : if (!FLAG_turbo_preprocess_ranges) return false;
3329 0 : if (block->IsDeferred()) return true;
3330 0 : if (block->PredecessorCount() == 0) return false;
3331 : bool pred_is_splinter = false;
3332 0 : for (auto pred : block->predecessors()) {
3333 0 : if (pred.IsNext(block->rpo_number())) {
3334 0 : pred_is_splinter = code()->InstructionBlockAt(pred)->IsDeferred();
3335 0 : break;
3336 : }
3337 : }
3338 0 : return pred_is_splinter;
3339 : }
3340 :
3341 2331622 : void LinearScanAllocator::AllocateRegisters() {
3342 : DCHECK(unhandled_live_ranges().empty());
3343 : DCHECK(active_live_ranges().empty());
3344 : DCHECK(inactive_live_ranges().empty());
3345 :
3346 127323257 : SplitAndSpillRangesDefinedByMemoryOperand();
3347 : data()->ResetSpillState();
3348 :
3349 2331619 : if (FLAG_trace_alloc) {
3350 0 : PrintRangeOverview(std::cout);
3351 : }
3352 :
3353 2331677 : const size_t live_ranges_size = data()->live_ranges().size();
3354 122660086 : for (TopLevelLiveRange* range : data()->live_ranges()) {
3355 235993720 : CHECK_EQ(live_ranges_size,
3356 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
3357 117997162 : if (!CanProcessRange(range)) continue;
3358 106898342 : for (LiveRange* to_add = range; to_add != nullptr;
3359 : to_add = to_add->next()) {
3360 53449386 : if (!to_add->spilled()) {
3361 36086679 : AddToUnhandled(to_add);
3362 : }
3363 : }
3364 : }
3365 :
3366 2331549 : if (mode() == GENERAL_REGISTERS) {
3367 38545121 : for (TopLevelLiveRange* current : data()->fixed_live_ranges()) {
3368 34261695 : if (current != nullptr) AddToInactive(current);
3369 : }
3370 : } else {
3371 3420388 : for (TopLevelLiveRange* current : data()->fixed_double_live_ranges()) {
3372 3040310 : if (current != nullptr) AddToInactive(current);
3373 : }
3374 : if (!kSimpleFPAliasing && check_fp_aliasing()) {
3375 : for (TopLevelLiveRange* current : data()->fixed_float_live_ranges()) {
3376 : if (current != nullptr) AddToInactive(current);
3377 : }
3378 : for (TopLevelLiveRange* current : data()->fixed_simd128_live_ranges()) {
3379 : if (current != nullptr) AddToInactive(current);
3380 : }
3381 : }
3382 : }
3383 :
3384 : RpoNumber last_block = RpoNumber::FromInt(0);
3385 : RpoNumber max_blocks =
3386 4663910 : RpoNumber::FromInt(code()->InstructionBlockCount() - 1);
3387 : LifetimePosition next_block_boundary =
3388 : LifetimePosition::InstructionFromInstructionIndex(
3389 : data()
3390 : ->code()
3391 : ->InstructionBlockAt(last_block)
3392 2331955 : ->last_instruction_index())
3393 : .NextFullStart();
3394 : // Process all ranges. We also need to ensure that we have seen all block
3395 : // boundaries. Linear scan might have assigned and spilled ranges before
3396 : // reaching the last block and hence we would ignore control flow effects for
3397 : // those. Not only does this produce a potentially bad assignment, it also
3398 : // breaks with the invariant that we undo spills that happen in deferred code
3399 : // when crossing a deferred/non-deferred boundary.
3400 49950487 : while (
3401 47618994 : !unhandled_live_ranges().empty() ||
3402 0 : (FLAG_turbo_control_flow_aware_allocation && last_block < max_blocks)) {
3403 0 : LiveRange* current = unhandled_live_ranges().empty()
3404 : ? nullptr
3405 90574873 : : *unhandled_live_ranges().begin();
3406 : LifetimePosition position =
3407 45287426 : current ? current->Start() : next_block_boundary;
3408 : #ifdef DEBUG
3409 : allocation_finger_ = position;
3410 : #endif
3411 45287426 : if (FLAG_turbo_control_flow_aware_allocation) {
3412 : // Check whether we just moved across a block boundary. This will trigger
3413 : // for the first range that is past the current boundary.
3414 0 : if (position >= next_block_boundary) {
3415 0 : TRACE("Processing boundary at %d leaving %d\n",
3416 : next_block_boundary.value(), last_block.ToInt());
3417 :
3418 : // Forward state to before block boundary
3419 0 : LifetimePosition end_of_block = next_block_boundary.PrevStart().End();
3420 0 : ForwardStateTo(end_of_block);
3421 :
3422 : // Remember this state.
3423 0 : InstructionBlock* current_block = data()->code()->GetInstructionBlock(
3424 0 : next_block_boundary.ToInstructionIndex());
3425 :
3426 : // Store current spill state (as the state at end of block). For
3427 : // simplicity, we store the active ranges, e.g., the live ranges that
3428 : // are not spilled.
3429 : data()->RememberSpillState(last_block, active_live_ranges());
3430 :
3431 0 : bool fallthrough = (current_block->PredecessorCount() == 1) &&
3432 : current_block->predecessors()[0].IsNext(
3433 : current_block->rpo_number());
3434 :
3435 : // Only reset the state if this was not a direct fallthrough. Otherwise
3436 : // control flow resolution will get confused (it does not expect changes
3437 : // across fallthrough edges.).
3438 :
3439 : // Also do not process deferred code boundaries. Splintering takes care
3440 : // of their control flow.
3441 : fallthrough =
3442 0 : fallthrough || BlockOrImmediatePredecessorIsDeferred(current_block);
3443 :
3444 0 : if (!fallthrough) {
3445 : #ifdef DEBUG
3446 : // Allow allocation at current position.
3447 : allocation_finger_ = next_block_boundary;
3448 : #endif
3449 :
3450 : // We are currently at next_block_boundary - 1. Move the state to the
3451 : // actual block boundary position. In particular, we have to
3452 : // reactivate inactive ranges so that they get rescheduled for
3453 : // allocation if they were not live at the predecessors.
3454 0 : ForwardStateTo(next_block_boundary);
3455 0 : RangeWithRegisterSet to_be_live(data()->allocation_zone());
3456 :
3457 : // If we end up deciding to use the state of the immediate
3458 : // predecessor, it is better not to perform a change. It would lead to
3459 : // the same outcome anyway.
3460 : bool no_change_required = false;
3461 :
3462 : auto pick_state_from = [this, current_block](
3463 : RpoNumber pred,
3464 0 : RangeWithRegisterSet* to_be_live) -> bool {
3465 0 : TRACE("Using information from B%d\n", pred.ToInt());
3466 0 : bool is_noop = pred.IsNext(current_block->rpo_number());
3467 0 : if (!is_noop) {
3468 0 : auto& spill_state = data()->GetSpillState(pred);
3469 0 : TRACE("Not a fallthrough. Adding %zu elements...\n",
3470 : spill_state.size());
3471 0 : for (const auto range : spill_state) {
3472 : // Filter out ranges that had their register stolen by backwards
3473 : // working spill heuristics. These have been spilled after the
3474 : // fact, so ignore them.
3475 0 : if (!range->HasRegisterAssigned()) continue;
3476 : to_be_live->emplace(range);
3477 : }
3478 : }
3479 0 : return is_noop;
3480 0 : };
3481 :
3482 : // Multiple cases here:
3483 : // 1) We have a single predecessor => this is a control flow split, so
3484 : // just restore the predecessor state.
3485 : // 2) We have two predecessors => this is a conditional, so break ties
3486 : // based on what to do based on forward uses, trying to benefit
3487 : // the same branch if in doubt (make one path fast).
3488 : // 3) We have many predecessors => this is a switch. Compute union
3489 : // based on majority, break ties by looking forward.
3490 0 : if (current_block->PredecessorCount() == 1) {
3491 0 : TRACE("Single predecessor for B%d\n",
3492 : current_block->rpo_number().ToInt());
3493 : no_change_required =
3494 0 : pick_state_from(current_block->predecessors()[0], &to_be_live);
3495 0 : } else if (current_block->PredecessorCount() == 2) {
3496 0 : TRACE("Two predecessors for B%d\n",
3497 : current_block->rpo_number().ToInt());
3498 : // If one of the branches does not contribute any information,
3499 : // e.g. because it is deferred or a back edge, we can short cut
3500 : // here right away.
3501 : RpoNumber chosen_predecessor = RpoNumber::Invalid();
3502 0 : if (!ConsiderBlockForControlFlow(
3503 0 : current_block, current_block->predecessors()[0])) {
3504 0 : chosen_predecessor = current_block->predecessors()[1];
3505 0 : } else if (!ConsiderBlockForControlFlow(
3506 0 : current_block, current_block->predecessors()[1])) {
3507 0 : chosen_predecessor = current_block->predecessors()[0];
3508 : } else {
3509 : chosen_predecessor = ChooseOneOfTwoPredecessorStates(
3510 0 : current_block, next_block_boundary);
3511 : }
3512 : no_change_required =
3513 0 : pick_state_from(chosen_predecessor, &to_be_live);
3514 :
3515 : } else {
3516 : // Merge at the end of, e.g., a switch.
3517 0 : ComputeStateFromManyPredecessors(current_block, &to_be_live);
3518 : }
3519 :
3520 0 : if (!no_change_required) {
3521 0 : SpillNotLiveRanges(to_be_live, next_block_boundary);
3522 0 : ReloadLiveRanges(to_be_live, next_block_boundary);
3523 : }
3524 :
3525 : // TODO(herhut) Check removal.
3526 : // Now forward to current position
3527 0 : ForwardStateTo(next_block_boundary);
3528 : }
3529 : // Update block information
3530 : last_block = current_block->rpo_number();
3531 : next_block_boundary = LifetimePosition::InstructionFromInstructionIndex(
3532 : current_block->last_instruction_index())
3533 : .NextFullStart();
3534 :
3535 : // We might have created new unhandled live ranges, so cycle around the
3536 : // loop to make sure we pick the top most range in unhandled for
3537 : // processing.
3538 : continue;
3539 : }
3540 : }
3541 :
3542 : DCHECK_NOT_NULL(current);
3543 :
3544 45287426 : TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
3545 : current->relative_id(), position.value());
3546 :
3547 : // Now we can erase current, as we are sure to process it.
3548 : unhandled_live_ranges().erase(unhandled_live_ranges().begin());
3549 :
3550 45287476 : if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
3551 : continue;
3552 :
3553 45274775 : ForwardStateTo(position);
3554 :
3555 : DCHECK(!current->HasRegisterAssigned() && !current->spilled());
3556 :
3557 45274816 : ProcessCurrentRange(current);
3558 : }
3559 :
3560 2331568 : if (FLAG_trace_alloc) {
3561 0 : PrintRangeOverview(std::cout);
3562 : }
3563 2331568 : }
3564 :
3565 2470759 : bool LinearScanAllocator::TrySplitAndSpillSplinter(LiveRange* range) {
3566 : DCHECK(range->TopLevel()->IsSplinter());
3567 : // If we can spill the whole range, great. Otherwise, split above the
3568 : // first use needing a register and spill the top part.
3569 2470759 : const UsePosition* next_reg = range->NextRegisterPosition(range->Start());
3570 2470752 : if (next_reg == nullptr) {
3571 1826138 : Spill(range);
3572 1826142 : return true;
3573 644615 : } else if (range->FirstHintPosition() == nullptr) {
3574 : // If there was no hint, but we have a use position requiring a
3575 : // register, apply the hot path heuristics.
3576 : return false;
3577 496178 : } else if (next_reg->pos().PrevStart() > range->Start()) {
3578 253579 : LiveRange* tail = SplitRangeAt(range, next_reg->pos().PrevStart());
3579 253579 : AddToUnhandled(tail);
3580 253579 : Spill(range);
3581 253579 : return true;
3582 : }
3583 : return false;
3584 : }
3585 :
3586 39254464 : void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
3587 : int reg) {
3588 41245140 : data()->MarkAllocated(range->representation(), reg);
3589 : range->set_assigned_register(reg);
3590 39254567 : range->SetUseHints(reg);
3591 : range->UpdateBundleRegister(reg);
3592 61280785 : if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
3593 : data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
3594 : }
3595 39254675 : }
3596 :
3597 39254602 : void LinearScanAllocator::AddToActive(LiveRange* range) {
3598 39254602 : TRACE("Add live range %d:%d in %s to active\n", range->TopLevel()->vreg(),
3599 : range->relative_id(), RegisterName(range->assigned_register()));
3600 39254602 : active_live_ranges().push_back(range);
3601 : next_active_ranges_change_ =
3602 117763734 : std::min(next_active_ranges_change_, range->NextEndAfter(range->Start()));
3603 39254578 : }
3604 :
3605 21033519 : void LinearScanAllocator::AddToInactive(LiveRange* range) {
3606 21033519 : TRACE("Add live range %d:%d to inactive\n", range->TopLevel()->vreg(),
3607 : range->relative_id());
3608 21033519 : inactive_live_ranges().push_back(range);
3609 : next_inactive_ranges_change_ = std::min(
3610 63100395 : next_inactive_ranges_change_, range->NextStartAfter(range->Start()));
3611 21033465 : }
3612 :
3613 45287694 : void LinearScanAllocator::AddToUnhandled(LiveRange* range) {
3614 135862464 : if (range == nullptr || range->IsEmpty()) return;
3615 : DCHECK(!range->HasRegisterAssigned() && !range->spilled());
3616 : DCHECK(allocation_finger_ <= range->Start());
3617 :
3618 45287888 : TRACE("Add live range %d:%d to unhandled\n", range->TopLevel()->vreg(),
3619 : range->relative_id());
3620 : unhandled_live_ranges().insert(range);
3621 : }
3622 :
3623 45493323 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::ActiveToHandled(
3624 : const ZoneVector<LiveRange*>::iterator it) {
3625 45493323 : TRACE("Moving live range %d:%d from active to handled\n",
3626 : (*it)->TopLevel()->vreg(), (*it)->relative_id());
3627 45493323 : return active_live_ranges().erase(it);
3628 : }
3629 :
3630 24753463 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::ActiveToInactive(
3631 : const ZoneVector<LiveRange*>::iterator it, LifetimePosition position) {
3632 24753463 : LiveRange* range = *it;
3633 24753463 : inactive_live_ranges().push_back(range);
3634 24753465 : TRACE("Moving live range %d:%d from active to inactive\n",
3635 : (range)->TopLevel()->vreg(), range->relative_id());
3636 : next_inactive_ranges_change_ =
3637 74260416 : std::min(next_inactive_ranges_change_, range->NextStartAfter(position));
3638 24753472 : return active_live_ranges().erase(it);
3639 : }
3640 :
3641 6623624 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::InactiveToHandled(
3642 : ZoneVector<LiveRange*>::iterator it) {
3643 6623624 : TRACE("Moving live range %d:%d from inactive to handled\n",
3644 : (*it)->TopLevel()->vreg(), (*it)->relative_id());
3645 6623624 : return inactive_live_ranges().erase(it);
3646 : }
3647 :
3648 34377760 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::InactiveToActive(
3649 : ZoneVector<LiveRange*>::iterator it, LifetimePosition position) {
3650 34377760 : LiveRange* range = *it;
3651 34377760 : active_live_ranges().push_back(range);
3652 34377766 : TRACE("Moving live range %d:%d from inactive to active\n",
3653 : range->TopLevel()->vreg(), range->relative_id());
3654 : next_active_ranges_change_ =
3655 103133304 : std::min(next_active_ranges_change_, range->NextEndAfter(position));
3656 34377768 : return inactive_live_ranges().erase(it);
3657 : }
3658 :
3659 45274713 : void LinearScanAllocator::ForwardStateTo(LifetimePosition position) {
3660 45274713 : if (position >= next_active_ranges_change_) {
3661 27042239 : next_active_ranges_change_ = LifetimePosition::MaxPosition();
3662 161409697 : for (auto it = active_live_ranges().begin();
3663 : it != active_live_ranges().end();) {
3664 107325341 : LiveRange* cur_active = *it;
3665 107325341 : if (cur_active->End() <= position) {
3666 44219152 : it = ActiveToHandled(it);
3667 63106189 : } else if (!cur_active->Covers(position)) {
3668 24753479 : it = ActiveToInactive(it, position);
3669 : } else {
3670 : next_active_ranges_change_ = std::min(
3671 76705970 : next_active_ranges_change_, cur_active->NextEndAfter(position));
3672 : ++it;
3673 : }
3674 : }
3675 : }
3676 :
3677 45274591 : if (position >= next_inactive_ranges_change_) {
3678 11101409 : next_inactive_ranges_change_ = LifetimePosition::MaxPosition();
3679 153642030 : for (auto it = inactive_live_ranges().begin();
3680 : it != inactive_live_ranges().end();) {
3681 131439188 : LiveRange* cur_inactive = *it;
3682 131439188 : if (cur_inactive->End() <= position) {
3683 6623382 : it = InactiveToHandled(it);
3684 124815806 : } else if (cur_inactive->Covers(position)) {
3685 34377755 : it = InactiveToActive(it, position);
3686 : } else {
3687 : next_inactive_ranges_change_ =
3688 : std::min(next_inactive_ranges_change_,
3689 180877154 : cur_inactive->NextStartAfter(position));
3690 : ++it;
3691 : }
3692 : }
3693 : }
3694 45274615 : }
3695 :
3696 0 : void LinearScanAllocator::GetFPRegisterSet(MachineRepresentation rep,
3697 : int* num_regs, int* num_codes,
3698 : const int** codes) const {
3699 : DCHECK(!kSimpleFPAliasing);
3700 0 : if (rep == MachineRepresentation::kFloat32) {
3701 0 : *num_regs = data()->config()->num_float_registers();
3702 0 : *num_codes = data()->config()->num_allocatable_float_registers();
3703 0 : *codes = data()->config()->allocatable_float_codes();
3704 0 : } else if (rep == MachineRepresentation::kSimd128) {
3705 0 : *num_regs = data()->config()->num_simd128_registers();
3706 0 : *num_codes = data()->config()->num_allocatable_simd128_registers();
3707 0 : *codes = data()->config()->allocatable_simd128_codes();
3708 : } else {
3709 0 : UNREACHABLE();
3710 : }
3711 0 : }
3712 :
3713 45275974 : void LinearScanAllocator::FindFreeRegistersForRange(
3714 : LiveRange* range, Vector<LifetimePosition> positions) {
3715 45275974 : int num_regs = num_registers();
3716 : int num_codes = num_allocatable_registers();
3717 : const int* codes = allocatable_register_codes();
3718 : MachineRepresentation rep = range->representation();
3719 : if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
3720 : rep == MachineRepresentation::kSimd128))
3721 : GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
3722 : DCHECK_GE(positions.length(), num_regs);
3723 :
3724 769675344 : for (int i = 0; i < num_regs; ++i) {
3725 1448798740 : positions[i] = LifetimePosition::MaxPosition();
3726 : }
3727 :
3728 234761960 : for (LiveRange* cur_active : active_live_ranges()) {
3729 : int cur_reg = cur_active->assigned_register();
3730 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3731 288420026 : positions[cur_reg] = LifetimePosition::GapFromInstructionIndex(0);
3732 144210013 : TRACE("Register %s is free until pos %d (1) due to %d\n",
3733 : RegisterName(cur_reg),
3734 : LifetimePosition::GapFromInstructionIndex(0).value(),
3735 : cur_active->TopLevel()->vreg());
3736 : } else {
3737 : int alias_base_index = -1;
3738 : int aliases = data()->config()->GetAliases(
3739 : cur_active->representation(), cur_reg, rep, &alias_base_index);
3740 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3741 : while (aliases--) {
3742 : int aliased_reg = alias_base_index + aliases;
3743 : positions[aliased_reg] = LifetimePosition::GapFromInstructionIndex(0);
3744 : }
3745 : }
3746 : }
3747 :
3748 567675831 : for (LiveRange* cur_inactive : inactive_live_ranges()) {
3749 : DCHECK(cur_inactive->End() > range->Start());
3750 : int cur_reg = cur_inactive->assigned_register();
3751 : // No need to carry out intersections, when this register won't be
3752 : // interesting to this range anyway.
3753 : // TODO(mtrofin): extend to aliased ranges, too.
3754 477124641 : if ((kSimpleFPAliasing || !check_fp_aliasing()) &&
3755 477124641 : positions[cur_reg] < range->Start()) {
3756 401846190 : continue;
3757 : }
3758 :
3759 373178573 : LifetimePosition next_intersection = cur_inactive->FirstIntersection(range);
3760 373181164 : if (!next_intersection.IsValid()) continue;
3761 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3762 75281042 : positions[cur_reg] = Min(positions[cur_reg], next_intersection);
3763 75281042 : TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
3764 : Min(positions[cur_reg], next_intersection).value());
3765 : } else {
3766 : int alias_base_index = -1;
3767 : int aliases = data()->config()->GetAliases(
3768 : cur_inactive->representation(), cur_reg, rep, &alias_base_index);
3769 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3770 : while (aliases--) {
3771 : int aliased_reg = alias_base_index + aliases;
3772 : positions[aliased_reg] = Min(positions[aliased_reg], next_intersection);
3773 : }
3774 : }
3775 : }
3776 45275217 : }
3777 :
3778 : // High-level register allocation summary:
3779 : //
3780 : // For regular, or hot (i.e. not splinter) ranges, we attempt to first
3781 : // allocate first the preferred (hint) register. If that is not possible,
3782 : // we find a register that's free, and allocate that. If that's not possible,
3783 : // we search for a register to steal from a range that was allocated. The
3784 : // goal is to optimize for throughput by avoiding register-to-memory
3785 : // moves, which are expensive.
3786 : //
3787 : // For splinters, the goal is to minimize the number of moves. First we try
3788 : // to allocate the preferred register (more discussion follows). Failing that,
3789 : // we bail out and spill as far as we can, unless the first use is at start,
3790 : // case in which we apply the same behavior as we do for regular ranges.
3791 : // If there is no hint, we apply the hot-path behavior.
3792 : //
3793 : // For the splinter, the hint register may come from:
3794 : //
3795 : // - the hot path (we set it at splintering time with SetHint). In this case, if
3796 : // we cannot offer the hint register, spilling is better because it's at most
3797 : // 1 move, while trying to find and offer another register is at least 1 move.
3798 : //
3799 : // - a constraint. If we cannot offer that register, it's because there is some
3800 : // interference. So offering the hint register up to the interference would
3801 : // result
3802 : // in a move at the interference, plus a move to satisfy the constraint. This is
3803 : // also the number of moves if we spill, with the potential of the range being
3804 : // already spilled and thus saving a move (the spill).
3805 : // Note that this can only be an input constraint, if it were an output one,
3806 : // the range wouldn't be a splinter because it means it'd be defined in a
3807 : // deferred
3808 : // block, and we don't mark those as splinters (they live in deferred blocks
3809 : // only).
3810 : //
3811 : // - a phi. The same analysis as in the case of the input constraint applies.
3812 : //
3813 72383611 : void LinearScanAllocator::ProcessCurrentRange(LiveRange* current) {
3814 : EmbeddedVector<LifetimePosition, RegisterConfiguration::kMaxRegisters>
3815 : free_until_pos;
3816 45274892 : FindFreeRegistersForRange(current, free_until_pos);
3817 45275227 : if (!TryAllocatePreferredReg(current, free_until_pos)) {
3818 27108719 : if (current->TopLevel()->IsSplinter()) {
3819 4550482 : if (TrySplitAndSpillSplinter(current)) return;
3820 : }
3821 25028993 : if (!TryAllocateFreeReg(current, free_until_pos)) {
3822 5214247 : AllocateBlockedReg(current);
3823 : }
3824 : }
3825 43195196 : if (current->HasRegisterAssigned()) {
3826 39254601 : AddToActive(current);
3827 : }
3828 : }
3829 :
3830 50684270 : bool LinearScanAllocator::TryAllocatePreferredReg(
3831 58347616 : LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
3832 : int hint_register;
3833 74223664 : if (current->FirstHintPosition(&hint_register) != nullptr ||
3834 : current->RegisterFromBundle(&hint_register)) {
3835 29173905 : TRACE(
3836 : "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
3837 : RegisterName(hint_register), free_until_pos[hint_register].value(),
3838 : current->TopLevel()->vreg(), current->relative_id(),
3839 : current->End().value());
3840 :
3841 : // The desired register is free until the end of the current live range.
3842 58347616 : if (free_until_pos[hint_register] >= current->End()) {
3843 20740512 : TRACE("Assigning preferred reg %s to live range %d:%d\n",
3844 : RegisterName(hint_register), current->TopLevel()->vreg(),
3845 : current->relative_id());
3846 20740512 : SetLiveRangeAssignedRegister(current, hint_register);
3847 20740594 : return true;
3848 : }
3849 : }
3850 : return false;
3851 : }
3852 :
3853 28753008 : int LinearScanAllocator::PickRegisterThatIsAvailableLongest(
3854 220913194 : LiveRange* current, int hint_reg,
3855 349316495 : const Vector<LifetimePosition>& free_until_pos) {
3856 : int num_regs = 0; // used only for the call to GetFPRegisterSet.
3857 249666202 : int num_codes = num_allocatable_registers();
3858 : const int* codes = allocatable_register_codes();
3859 : MachineRepresentation rep = current->representation();
3860 : if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
3861 : rep == MachineRepresentation::kSimd128)) {
3862 : GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
3863 : }
3864 :
3865 : DCHECK_GE(free_until_pos.length(), num_codes);
3866 :
3867 : // Find the register which stays free for the longest time. Check for
3868 : // the hinted register first, as we might want to use that one. Only
3869 : // count full instructions for free ranges, as an instruction's internal
3870 : // positions do not help but might shadow a hinted register. This is
3871 : // typically the case for function calls, where all registered are
3872 : // cloberred after the call except for the argument registers, which are
3873 : // set before the call. Hence, the argument registers always get ignored,
3874 : // as their available time is shorter.
3875 28753008 : int reg = hint_reg == kUnassignedRegister ? codes[0] : hint_reg;
3876 378068879 : for (int i = 0; i < num_codes; ++i) {
3877 349316495 : int code = codes[i];
3878 : // Prefer registers that have no fixed uses to avoid blocking later hints.
3879 : // We use the first register that has no fixed uses to ensure we use
3880 : // byte addressable registers in ia32 first.
3881 349316495 : int candidate_free = free_until_pos[code].ToInstructionIndex();
3882 349316495 : int current_free = free_until_pos[reg].ToInstructionIndex();
3883 1032954915 : if (candidate_free > current_free ||
3884 555235097 : (candidate_free == current_free && reg != hint_reg &&
3885 302848985 : data()->HasFixedUse(current->representation(), reg) &&
3886 81935769 : !data()->HasFixedUse(current->representation(), code))) {
3887 : reg = code;
3888 : }
3889 : }
3890 28752384 : return reg;
3891 : }
3892 :
3893 25028847 : bool LinearScanAllocator::TryAllocateFreeReg(
3894 44843740 : LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
3895 : // Compute register hint, if such exists.
3896 25028847 : int hint_reg = kUnassignedRegister;
3897 25028847 : current->FirstHintPosition(&hint_reg) != nullptr ||
3898 25028953 : current->RegisterFromBundle(&hint_reg);
3899 :
3900 : int reg =
3901 25028953 : PickRegisterThatIsAvailableLongest(current, hint_reg, free_until_pos);
3902 :
3903 50057918 : LifetimePosition pos = free_until_pos[reg];
3904 :
3905 25028959 : if (pos <= current->Start()) {
3906 : // All registers are blocked.
3907 : return false;
3908 : }
3909 :
3910 19814781 : if (pos < current->End()) {
3911 : // Register reg is available at the range start but becomes blocked before
3912 : // the range end. Split current at position where it becomes blocked.
3913 5409281 : LiveRange* tail = SplitRangeAt(current, pos);
3914 5409281 : AddToUnhandled(tail);
3915 :
3916 : // Try to allocate preferred register once more.
3917 5409281 : if (TryAllocatePreferredReg(current, free_until_pos)) return true;
3918 : }
3919 :
3920 : // Register reg is available at the range start and is free until the range
3921 : // end.
3922 : DCHECK(pos >= current->End());
3923 17240595 : TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
3924 : current->TopLevel()->vreg(), current->relative_id());
3925 17240595 : SetLiveRangeAssignedRegister(current, reg);
3926 :
3927 17240613 : return true;
3928 : }
3929 :
3930 6487903 : void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
3931 5214243 : UsePosition* register_use = current->NextRegisterPosition(current->Start());
3932 5214280 : if (register_use == nullptr) {
3933 : // There is no use in the current live range that requires a register.
3934 : // We can just spill it.
3935 1490775 : Spill(current);
3936 1490776 : return;
3937 : }
3938 :
3939 : MachineRepresentation rep = current->representation();
3940 :
3941 : // use_pos keeps track of positions a register/alias is used at.
3942 : // block_pos keeps track of positions where a register/alias is blocked
3943 : // from.
3944 : EmbeddedVector<LifetimePosition, RegisterConfiguration::kMaxRegisters>
3945 : use_pos(LifetimePosition::MaxPosition());
3946 : EmbeddedVector<LifetimePosition, RegisterConfiguration::kMaxRegisters>
3947 : block_pos(LifetimePosition::MaxPosition());
3948 :
3949 97450869 : for (LiveRange* range : active_live_ranges()) {
3950 : int cur_reg = range->assigned_register();
3951 : bool is_fixed_or_cant_spill =
3952 60596257 : range->TopLevel()->IsFixed() || !range->CanBeSpilled(current->Start());
3953 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3954 45001917 : if (is_fixed_or_cant_spill) {
3955 60926224 : block_pos[cur_reg] = use_pos[cur_reg] =
3956 30463112 : LifetimePosition::GapFromInstructionIndex(0);
3957 : } else {
3958 : DCHECK_NE(LifetimePosition::GapFromInstructionIndex(0),
3959 : block_pos[cur_reg]);
3960 14538805 : use_pos[cur_reg] =
3961 14538805 : range->NextLifetimePositionRegisterIsBeneficial(current->Start());
3962 : }
3963 : } else {
3964 : int alias_base_index = -1;
3965 : int aliases = data()->config()->GetAliases(
3966 : range->representation(), cur_reg, rep, &alias_base_index);
3967 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3968 : while (aliases--) {
3969 : int aliased_reg = alias_base_index + aliases;
3970 : if (is_fixed_or_cant_spill) {
3971 : block_pos[aliased_reg] = use_pos[aliased_reg] =
3972 : LifetimePosition::GapFromInstructionIndex(0);
3973 : } else {
3974 : use_pos[aliased_reg] =
3975 : Min(block_pos[aliased_reg],
3976 : range->NextLifetimePositionRegisterIsBeneficial(
3977 : current->Start()));
3978 : }
3979 : }
3980 : }
3981 : }
3982 :
3983 41166648 : for (LiveRange* range : inactive_live_ranges()) {
3984 : DCHECK(range->End() > current->Start());
3985 : int cur_reg = range->assigned_register();
3986 16859853 : bool is_fixed = range->TopLevel()->IsFixed();
3987 :
3988 : // Don't perform costly intersections if they are guaranteed to not update
3989 : // block_pos or use_pos.
3990 : // TODO(mtrofin): extend to aliased ranges, too.
3991 : if ((kSimpleFPAliasing || !check_fp_aliasing())) {
3992 16859853 : if (is_fixed) {
3993 45976534 : if (block_pos[cur_reg] < range->Start()) continue;
3994 : } else {
3995 4192748 : if (use_pos[cur_reg] < range->Start()) continue;
3996 : }
3997 : }
3998 :
3999 14092621 : LifetimePosition next_intersection = range->FirstIntersection(current);
4000 14092611 : if (!next_intersection.IsValid()) continue;
4001 :
4002 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
4003 410267 : if (is_fixed) {
4004 785886 : block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
4005 1178829 : use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
4006 : } else {
4007 34648 : use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
4008 : }
4009 : } else {
4010 : int alias_base_index = -1;
4011 : int aliases = data()->config()->GetAliases(
4012 : range->representation(), cur_reg, rep, &alias_base_index);
4013 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
4014 : while (aliases--) {
4015 : int aliased_reg = alias_base_index + aliases;
4016 : if (is_fixed) {
4017 : block_pos[aliased_reg] =
4018 : Min(block_pos[aliased_reg], next_intersection);
4019 : use_pos[aliased_reg] =
4020 : Min(block_pos[aliased_reg], use_pos[aliased_reg]);
4021 : } else {
4022 : use_pos[aliased_reg] = Min(use_pos[aliased_reg], next_intersection);
4023 : }
4024 : }
4025 : }
4026 : }
4027 :
4028 : // Compute register hint if it exists.
4029 3723466 : int hint_reg = kUnassignedRegister;
4030 3723466 : register_use->HintRegister(&hint_reg) ||
4031 3723463 : current->RegisterFromBundle(&hint_reg);
4032 3723463 : int reg = PickRegisterThatIsAvailableLongest(current, hint_reg, use_pos);
4033 :
4034 7446934 : if (use_pos[reg] < register_use->pos()) {
4035 : // If there is a gap position before the next register use, we can
4036 : // spill until there. The gap position will then fit the fill move.
4037 2449806 : if (LifetimePosition::ExistsGapPositionBetween(current->Start(),
4038 : register_use->pos())) {
4039 : SpillBetween(current, current->Start(), register_use->pos());
4040 : return;
4041 : }
4042 : }
4043 :
4044 : // We couldn't spill until the next register use. Split before the register
4045 : // is blocked, if applicable.
4046 2547320 : if (block_pos[reg] < current->End()) {
4047 : // Register becomes blocked before the current range end. Split before that
4048 : // position.
4049 : LiveRange* tail =
4050 14285 : SplitBetween(current, current->Start(), block_pos[reg].Start());
4051 14285 : AddToUnhandled(tail);
4052 : }
4053 :
4054 : // Register reg is not blocked for the whole range.
4055 : DCHECK(block_pos[reg] >= current->End());
4056 1273663 : TRACE("Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg),
4057 : current->TopLevel()->vreg(), current->relative_id());
4058 1273663 : SetLiveRangeAssignedRegister(current, reg);
4059 :
4060 : // This register was not free. Thus we need to find and spill
4061 : // parts of active and inactive live regions that use the same register
4062 : // at the same lifetime positions as current.
4063 1273664 : SplitAndSpillIntersecting(current);
4064 : }
4065 :
4066 1273674 : void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
4067 : DCHECK(current->HasRegisterAssigned());
4068 : int reg = current->assigned_register();
4069 : LifetimePosition split_pos = current->Start();
4070 17861008 : for (auto it = active_live_ranges().begin();
4071 : it != active_live_ranges().end();) {
4072 15313667 : LiveRange* range = *it;
4073 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
4074 15313667 : if (range->assigned_register() != reg) {
4075 : ++it;
4076 14039996 : continue;
4077 : }
4078 : } else {
4079 : if (!data()->config()->AreAliases(current->representation(), reg,
4080 : range->representation(),
4081 : range->assigned_register())) {
4082 : ++it;
4083 : continue;
4084 : }
4085 : }
4086 :
4087 1273671 : UsePosition* next_pos = range->NextRegisterPosition(current->Start());
4088 1273664 : LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
4089 1273664 : if (next_pos == nullptr) {
4090 204856 : SpillAfter(range, spill_pos);
4091 : } else {
4092 : // When spilling between spill_pos and next_pos ensure that the range
4093 : // remains spilled at least until the start of the current live range.
4094 : // This guarantees that we will not introduce new unhandled ranges that
4095 : // start before the current range as this violates allocation invariants
4096 : // and will lead to an inconsistent state of active and inactive
4097 : // live-ranges: ranges are allocated in order of their start positions,
4098 : // ranges are retired from active/inactive when the start of the
4099 : // current live-range is larger than their end.
4100 : DCHECK(LifetimePosition::ExistsGapPositionBetween(current->Start(),
4101 : next_pos->pos()));
4102 1068808 : SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
4103 : }
4104 1273664 : it = ActiveToHandled(it);
4105 : }
4106 :
4107 17339189 : for (auto it = inactive_live_ranges().begin();
4108 : it != inactive_live_ranges().end();) {
4109 15070755 : LiveRange* range = *it;
4110 : DCHECK(range->End() > current->Start());
4111 14791858 : if (range->TopLevel()->IsFixed()) {
4112 : ++it;
4113 : continue;
4114 : }
4115 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
4116 278897 : if (range->assigned_register() != reg) {
4117 : ++it;
4118 : continue;
4119 : }
4120 : } else {
4121 : if (!data()->config()->AreAliases(current->representation(), reg,
4122 : range->representation(),
4123 : range->assigned_register())) {
4124 : ++it;
4125 : continue;
4126 : }
4127 : }
4128 :
4129 20916 : LifetimePosition next_intersection = range->FirstIntersection(current);
4130 20913 : if (next_intersection.IsValid()) {
4131 108 : UsePosition* next_pos = range->NextRegisterPosition(current->Start());
4132 108 : if (next_pos == nullptr) {
4133 63 : SpillAfter(range, split_pos);
4134 : } else {
4135 : next_intersection = Min(next_intersection, next_pos->pos());
4136 : SpillBetween(range, split_pos, next_intersection);
4137 : }
4138 108 : it = InactiveToHandled(it);
4139 : } else {
4140 : ++it;
4141 : }
4142 : }
4143 1273664 : }
4144 :
4145 23916798 : bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
4146 23916798 : if (!range->is_phi()) return false;
4147 :
4148 : DCHECK(!range->HasSpillOperand());
4149 : // Check how many operands belong to the same bundle as the output.
4150 2015324 : LiveRangeBundle* out_bundle = range->get_bundle();
4151 2015314 : RegisterAllocationData::PhiMapValue* phi_map_value =
4152 6861759 : data()->GetPhiMapValueFor(range);
4153 : const PhiInstruction* phi = phi_map_value->phi();
4154 : const InstructionBlock* block = phi_map_value->block();
4155 : // Count the number of spilled operands.
4156 : size_t spilled_count = 0;
4157 13723520 : for (size_t i = 0; i < phi->operands().size(); i++) {
4158 4846435 : int op = phi->operands()[i];
4159 5718415 : LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op);
4160 4846446 : if (!op_range->TopLevel()->HasSpillRange()) continue;
4161 285134 : const InstructionBlock* pred =
4162 570268 : code()->InstructionBlockAt(block->predecessors()[i]);
4163 : LifetimePosition pred_end =
4164 : LifetimePosition::InstructionFromInstructionIndex(
4165 : pred->last_instruction_index());
4166 1650070 : while (op_range != nullptr && !op_range->CanCover(pred_end)) {
4167 : op_range = op_range->next();
4168 : }
4169 754490 : if (op_range != nullptr && op_range->spilled() &&
4170 : op_range->get_bundle() == out_bundle) {
4171 146779 : spilled_count++;
4172 : }
4173 : }
4174 :
4175 : // Only continue if more than half of the operands are spilled to the same
4176 : // slot (because part of same bundle).
4177 2015325 : if (spilled_count * 2 <= phi->operands().size()) {
4178 : return false;
4179 : }
4180 :
4181 : // If the range does not need register soon, spill it to the merged
4182 : // spill range.
4183 : LifetimePosition next_pos = range->Start();
4184 13801 : if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
4185 13801 : UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
4186 13801 : if (pos == nullptr) {
4187 7436 : Spill(range);
4188 7436 : return true;
4189 6365 : } else if (pos->pos() > range->Start().NextStart()) {
4190 : SpillBetween(range, range->Start(), pos->pos());
4191 5318 : return true;
4192 : }
4193 : return false;
4194 : }
4195 :
4196 204919 : void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
4197 204919 : LiveRange* second_part = SplitRangeAt(range, pos);
4198 204919 : Spill(second_part);
4199 204919 : }
4200 :
4201 0 : void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
4202 : LifetimePosition end) {
4203 2455170 : SpillBetweenUntil(range, start, start, end);
4204 0 : }
4205 :
4206 3523975 : void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
4207 : LifetimePosition start,
4208 : LifetimePosition until,
4209 : LifetimePosition end) {
4210 3523975 : CHECK(start < end);
4211 7047908 : LiveRange* second_part = SplitRangeAt(range, start);
4212 :
4213 3523978 : if (second_part->Start() < end) {
4214 : // The split result intersects with [start, end[.
4215 : // Split it at position between ]start+1, end[, spill the middle part
4216 : // and put the rest to unhandled.
4217 :
4218 : // Make sure that the third part always starts after the start of the
4219 : // second part, as that likely is the current position of the register
4220 : // allocator and we cannot add ranges to unhandled that start before
4221 : // the current position.
4222 : LifetimePosition split_start = Max(second_part->Start().End(), until);
4223 :
4224 : // If end is an actual use (which it typically is) we have to split
4225 : // so that there is a gap before so that we have space for moving the
4226 : // value into its position.
4227 : // However, if we have no choice, split right where asked.
4228 3523933 : LifetimePosition third_part_end = Max(split_start, end.PrevStart().End());
4229 : // Instead of spliting right after or even before the block boundary,
4230 : // split on the boumndary to avoid extra moves.
4231 3523933 : if (data()->IsBlockBoundary(end.Start())) {
4232 0 : third_part_end = Max(split_start, end.Start());
4233 : }
4234 :
4235 : LiveRange* third_part =
4236 3523931 : SplitBetween(second_part, split_start, third_part_end);
4237 :
4238 : DCHECK(third_part != second_part);
4239 :
4240 3523934 : Spill(second_part);
4241 3523934 : AddToUnhandled(third_part);
4242 : } else {
4243 : // The split result does not intersect with [start, end[.
4244 : // Nothing to spill. Just put it to unhandled as whole.
4245 45 : AddToUnhandled(second_part);
4246 : }
4247 3523976 : }
4248 :
4249 2141547 : SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
4250 2141547 : : data_(data) {}
4251 :
4252 2141743 : void SpillSlotLocator::LocateSpillSlots() {
4253 2141743 : const InstructionSequence* code = data()->code();
4254 2141743 : const size_t live_ranges_size = data()->live_ranges().size();
4255 93351188 : for (TopLevelLiveRange* range : data()->live_ranges()) {
4256 161427864 : CHECK_EQ(live_ranges_size,
4257 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
4258 118455548 : if (range == nullptr || range->IsEmpty()) continue;
4259 : // We care only about ranges which spill in the frame.
4260 40527128 : if (!range->HasSpillRange() || range->IsSpilledOnlyInDeferredBlocks()) {
4261 : continue;
4262 : }
4263 : TopLevelLiveRange::SpillMoveInsertionList* spills =
4264 : range->GetSpillMoveInsertionLocations();
4265 : DCHECK_NOT_NULL(spills);
4266 7477606 : for (; spills != nullptr; spills = spills->next) {
4267 3740216 : code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
4268 : }
4269 : }
4270 2141576 : }
4271 :
4272 4282980 : OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
4273 :
4274 2141226 : void OperandAssigner::AssignSpillSlots() {
4275 84996135 : for (auto range : data()->live_ranges()) {
4276 80713377 : if (range != nullptr && range->get_bundle() != nullptr) {
4277 5592557 : range->get_bundle()->MergeSpillRanges();
4278 : }
4279 : }
4280 : ZoneVector<SpillRange*>& spill_ranges = data()->spill_ranges();
4281 : // Merge disjoint spill ranges
4282 84997112 : for (size_t i = 0; i < spill_ranges.size(); ++i) {
4283 1236302576 : SpillRange* range = spill_ranges[i];
4284 40357060 : if (range == nullptr) continue;
4285 4616524 : if (range->IsEmpty()) continue;
4286 2306893920 : for (size_t j = i + 1; j < spill_ranges.size(); ++j) {
4287 1150653331 : SpillRange* other = spill_ranges[j];
4288 1813424893 : if (other != nullptr && !other->IsEmpty()) {
4289 258633202 : range->TryMerge(other);
4290 : }
4291 : }
4292 : }
4293 : // Allocate slots for the merged spill ranges.
4294 88369762 : for (SpillRange* range : spill_ranges) {
4295 44973537 : if (range == nullptr || range->IsEmpty()) continue;
4296 : // Allocate a new operand referring to the spill slot.
4297 2793676 : if (!range->HasSlot()) {
4298 2720538 : int index = data()->frame()->AllocateSpillSlot(range->byte_width());
4299 : range->set_assigned_slot(index);
4300 : }
4301 : }
4302 2141524 : }
4303 :
4304 2141632 : void OperandAssigner::CommitAssignment() {
4305 2141632 : const size_t live_ranges_size = data()->live_ranges().size();
4306 156501913 : for (TopLevelLiveRange* top_range : data()->live_ranges()) {
4307 161427144 : CHECK_EQ(live_ranges_size,
4308 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
4309 199169433 : if (top_range == nullptr || top_range->IsEmpty()) continue;
4310 : InstructionOperand spill_operand;
4311 35910477 : if (top_range->HasSpillOperand()) {
4312 13620373 : spill_operand = *top_range->TopLevel()->GetSpillOperand();
4313 22290104 : } else if (top_range->TopLevel()->HasSpillRange()) {
4314 4616498 : spill_operand = top_range->TopLevel()->GetSpillRangeOperand();
4315 : }
4316 35910477 : if (top_range->is_phi()) {
4317 : data()->GetPhiMapValueFor(top_range)->CommitAssignment(
4318 4197762 : top_range->GetAssignedOperand());
4319 : }
4320 117120703 : for (LiveRange* range = top_range; range != nullptr;
4321 : range = range->next()) {
4322 81210007 : InstructionOperand assigned = range->GetAssignedOperand();
4323 : DCHECK(!assigned.IsUnallocated());
4324 81210013 : range->ConvertUsesToOperand(assigned, spill_operand);
4325 : }
4326 :
4327 35910696 : if (!spill_operand.IsInvalid()) {
4328 : // If this top level range has a child spilled in a deferred block, we use
4329 : // the range and control flow connection mechanism instead of spilling at
4330 : // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
4331 : // phases. Normally, when we spill at definition, we do not insert a
4332 : // connecting move when a successor child range is spilled - because the
4333 : // spilled range picks up its value from the slot which was assigned at
4334 : // definition. For ranges that are determined to spill only in deferred
4335 : // blocks, we let ConnectLiveRanges and ResolveControlFlow find the blocks
4336 : // where a spill operand is expected, and then finalize by inserting the
4337 : // spills in the deferred blocks dominators.
4338 18236902 : if (!top_range->IsSpilledOnlyInDeferredBlocks()) {
4339 : // Spill at definition if the range isn't spilled only in deferred
4340 : // blocks.
4341 : top_range->CommitSpillMoves(
4342 : data()->code(), spill_operand,
4343 50194786 : top_range->has_slot_use() || top_range->spilled());
4344 : }
4345 : }
4346 : }
4347 2141560 : }
4348 :
4349 2141577 : ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
4350 2141577 : : data_(data) {}
4351 :
4352 0 : bool ReferenceMapPopulator::SafePointsAreInOrder() const {
4353 : int safe_point = 0;
4354 0 : for (ReferenceMap* map : *data()->code()->reference_maps()) {
4355 0 : if (safe_point > map->instruction_position()) return false;
4356 : safe_point = map->instruction_position();
4357 : }
4358 0 : return true;
4359 : }
4360 :
4361 2141865 : void ReferenceMapPopulator::PopulateReferenceMaps() {
4362 : DCHECK(SafePointsAreInOrder());
4363 : // Map all delayed references.
4364 4283730 : for (RegisterAllocationData::DelayedReference& delayed_reference :
4365 : data()->delayed_references()) {
4366 : delayed_reference.map->RecordReference(
4367 0 : AllocatedOperand::cast(*delayed_reference.operand));
4368 : }
4369 : // Iterate over all safe point positions and record a pointer
4370 : // for all spilled live ranges at this point.
4371 : int last_range_start = 0;
4372 2141865 : const ReferenceMapDeque* reference_maps = data()->code()->reference_maps();
4373 : ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
4374 2141865 : const size_t live_ranges_size = data()->live_ranges().size();
4375 178610016 : for (TopLevelLiveRange* range : data()->live_ranges()) {
4376 161426772 : CHECK_EQ(live_ranges_size,
4377 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
4378 80712703 : if (range == nullptr) continue;
4379 : // Skip non-reference values.
4380 37741749 : if (!data()->IsReference(range)) continue;
4381 : // Skip empty live ranges.
4382 16308293 : if (range->IsEmpty()) continue;
4383 14509257 : if (range->has_preassigned_slot()) continue;
4384 :
4385 : // Find the extent of the range and its children.
4386 : int start = range->Start().ToInstructionIndex();
4387 : int end = 0;
4388 54968598 : for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) {
4389 : LifetimePosition this_end = cur->End();
4390 40532388 : if (this_end.ToInstructionIndex() > end)
4391 : end = this_end.ToInstructionIndex();
4392 : DCHECK(cur->Start().ToInstructionIndex() >= start);
4393 : }
4394 :
4395 : // Most of the ranges are in order, but not all. Keep an eye on when they
4396 : // step backwards and reset the first_it so we don't miss any safe points.
4397 14436210 : if (start < last_range_start) first_it = reference_maps->begin();
4398 : last_range_start = start;
4399 :
4400 : // Step across all the safe points that are before the start of this range,
4401 : // recording how far we step in order to save doing this for the next range.
4402 890191106 : for (; first_it != reference_maps->end(); ++first_it) {
4403 874774249 : ReferenceMap* map = *first_it;
4404 874774249 : if (map->instruction_position() >= start) break;
4405 : }
4406 :
4407 : InstructionOperand spill_operand;
4408 20316177 : if (((range->HasSpillOperand() &&
4409 27824974 : !range->GetSpillOperand()->IsConstant()) ||
4410 : range->HasSpillRange())) {
4411 3824029 : if (range->HasSpillOperand()) {
4412 1047448 : spill_operand = *range->GetSpillOperand();
4413 : } else {
4414 : spill_operand = range->GetSpillRangeOperand();
4415 : }
4416 : DCHECK(spill_operand.IsStackSlot());
4417 : DCHECK(CanBeTaggedPointer(
4418 : AllocatedOperand::cast(spill_operand).representation()));
4419 : }
4420 :
4421 68700075 : LiveRange* cur = range;
4422 : // Step through the safe points to see whether they are in the range.
4423 75942619 : for (auto it = first_it; it != reference_maps->end(); ++it) {
4424 57222290 : ReferenceMap* map = *it;
4425 : int safe_point = map->instruction_position();
4426 :
4427 : // The safe points are sorted so we can stop searching here.
4428 57222290 : if (safe_point - 1 > end) break;
4429 :
4430 : // Advance to the next active range that covers the current
4431 : // safe point position.
4432 : LifetimePosition safe_point_pos =
4433 : LifetimePosition::InstructionFromInstructionIndex(safe_point);
4434 :
4435 : // Search for the child range (cur) that covers safe_point_pos. If we
4436 : // don't find it before the children pass safe_point_pos, keep cur at
4437 : // the last child, because the next safe_point_pos may be covered by cur.
4438 : // This may happen if cur has more than one interval, and the current
4439 : // safe_point_pos is in between intervals.
4440 : // For that reason, cur may be at most the last child.
4441 : DCHECK_NOT_NULL(cur);
4442 : DCHECK(safe_point_pos >= cur->Start() || range == cur);
4443 : bool found = false;
4444 152159004 : while (!found) {
4445 68699804 : if (cur->Covers(safe_point_pos)) {
4446 : found = true;
4447 : } else {
4448 : LiveRange* next = cur->next();
4449 57152492 : if (next == nullptr || next->Start() > safe_point_pos) {
4450 : break;
4451 : }
4452 : cur = next;
4453 : }
4454 : }
4455 :
4456 47070200 : if (!found) {
4457 : continue;
4458 : }
4459 :
4460 : // Check if the live range is spilled and the safe point is after
4461 : // the spill position.
4462 : int spill_index = range->IsSpilledOnlyInDeferredBlocks()
4463 : ? cur->Start().ToInstructionIndex()
4464 36389272 : : range->spill_start_index();
4465 :
4466 36389272 : if (!spill_operand.IsInvalid() && safe_point >= spill_index) {
4467 23772149 : TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
4468 : range->vreg(), spill_index, safe_point);
4469 23772149 : map->RecordReference(AllocatedOperand::cast(spill_operand));
4470 : }
4471 :
4472 36389271 : if (!cur->spilled()) {
4473 0 : TRACE(
4474 : "Pointer in register for range %d:%d (start at %d) "
4475 : "at safe point %d\n",
4476 : range->vreg(), cur->relative_id(), cur->Start().value(),
4477 : safe_point);
4478 0 : InstructionOperand operand = cur->GetAssignedOperand();
4479 : DCHECK(!operand.IsStackSlot());
4480 : DCHECK(CanBeTaggedPointer(
4481 : AllocatedOperand::cast(operand).representation()));
4482 0 : map->RecordReference(AllocatedOperand::cast(operand));
4483 : }
4484 : }
4485 : }
4486 2141422 : }
4487 :
4488 4283145 : LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
4489 4283145 : : data_(data) {}
4490 :
4491 0 : bool LiveRangeConnector::CanEagerlyResolveControlFlow(
4492 : const InstructionBlock* block) const {
4493 20864175 : if (block->PredecessorCount() != 1) return false;
4494 0 : return block->predecessors()[0].IsNext(block->rpo_number());
4495 : }
4496 :
4497 2141267 : void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
4498 : // Lazily linearize live ranges in memory for fast lookup.
4499 2141267 : LiveRangeFinder finder(data(), local_zone);
4500 : ZoneVector<BitVector*>& live_in_sets = data()->live_in_sets();
4501 28694471 : for (const InstructionBlock* block : code()->instruction_blocks()) {
4502 27018505 : if (CanEagerlyResolveControlFlow(block)) continue;
4503 23729218 : BitVector* live = live_in_sets[block->rpo_number().ToInt()];
4504 11864609 : BitVector::Iterator iterator(live);
4505 203534760 : while (!iterator.Done()) {
4506 83970418 : int vreg = iterator.Current();
4507 83970418 : LiveRangeBoundArray* array = finder.ArrayFor(vreg);
4508 296723458 : for (const RpoNumber& pred : block->predecessors()) {
4509 : FindResult result;
4510 129072191 : const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
4511 128782874 : if (!array->FindConnectableSubranges(block, pred_block, &result)) {
4512 126179260 : continue;
4513 : }
4514 7393427 : InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
4515 7393429 : InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
4516 7393430 : if (pred_op.Equals(cur_op)) continue;
4517 5188061 : if (!pred_op.IsAnyRegister() && cur_op.IsAnyRegister()) {
4518 : // We're doing a reload.
4519 : // We don't need to, if:
4520 : // 1) there's no register use in this block, and
4521 : // 2) the range ends before the block does, and
4522 : // 3) we don't have a successor, or the successor is spilled.
4523 : LifetimePosition block_start =
4524 2484911 : LifetimePosition::GapFromInstructionIndex(block->code_start());
4525 : LifetimePosition block_end =
4526 : LifetimePosition::GapFromInstructionIndex(block->code_end());
4527 4871068 : const LiveRange* current = result.cur_cover_;
4528 : // TODO(herhut): This is not the successor if we have control flow!
4529 256744 : const LiveRange* successor = current->next();
4530 4969822 : if (current->End() < block_end &&
4531 256744 : (successor == nullptr || successor->spilled())) {
4532 : // verify point 1: no register use. We can go to the end of the
4533 : // range, since it's all within the block.
4534 :
4535 : bool uses_reg = false;
4536 542091 : for (const UsePosition* use = current->NextUsePosition(block_start);
4537 : use != nullptr; use = use->next()) {
4538 443339 : if (use->operand()->IsAnyRegister()) {
4539 : uses_reg = true;
4540 : break;
4541 : }
4542 : }
4543 640716 : if (!uses_reg) continue;
4544 : }
4545 2675311 : if (current->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
4546 : pred_block->IsDeferred()) {
4547 : // The spill location should be defined in pred_block, so add
4548 : // pred_block to the list of blocks requiring a spill operand.
4549 : current->TopLevel()->GetListOfBlocksRequiringSpillOperands()->Add(
4550 289154 : pred_block->rpo_number().ToInt());
4551 : }
4552 : }
4553 2604398 : int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op);
4554 : USE(move_loc);
4555 : DCHECK_IMPLIES(
4556 : result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
4557 : !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()),
4558 : code()->GetInstructionBlock(move_loc)->IsDeferred());
4559 : }
4560 83970459 : iterator.Advance();
4561 : }
4562 : }
4563 :
4564 : // At this stage, we collected blocks needing a spill operand from
4565 : // ConnectRanges and from ResolveControlFlow. Time to commit the spills for
4566 : // deferred blocks.
4567 2141553 : const size_t live_ranges_size = data()->live_ranges().size();
4568 121787558 : for (TopLevelLiveRange* top : data()->live_ranges()) {
4569 161428906 : CHECK_EQ(live_ranges_size,
4570 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
4571 154367390 : if (top == nullptr || top->IsEmpty() ||
4572 : !top->IsSpilledOnlyInDeferredBlocks())
4573 : continue;
4574 879134 : CommitSpillsInDeferredBlocks(top, finder.ArrayFor(top->vreg()), local_zone);
4575 : }
4576 2141595 : }
4577 :
4578 3701895 : int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
4579 : const InstructionOperand& cur_op,
4580 1506897 : const InstructionBlock* pred,
4581 : const InstructionOperand& pred_op) {
4582 : DCHECK(!pred_op.Equals(cur_op));
4583 : int gap_index;
4584 : Instruction::GapPosition position;
4585 2604396 : if (block->PredecessorCount() == 1) {
4586 : gap_index = block->first_instruction_index();
4587 : position = Instruction::START;
4588 : } else {
4589 : DCHECK_EQ(1, pred->SuccessorCount());
4590 : DCHECK(!code()
4591 : ->InstructionAt(pred->last_instruction_index())
4592 : ->HasReferenceMap());
4593 : gap_index = pred->last_instruction_index();
4594 : position = Instruction::END;
4595 : }
4596 2604396 : data()->AddGapMove(gap_index, position, pred_op, cur_op);
4597 2604395 : return gap_index;
4598 : }
4599 :
4600 2141393 : void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
4601 : DelayedInsertionMap delayed_insertion_map(local_zone);
4602 2141393 : const size_t live_ranges_size = data()->live_ranges().size();
4603 123695693 : for (TopLevelLiveRange* top_range : data()->live_ranges()) {
4604 161425806 : CHECK_EQ(live_ranges_size,
4605 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
4606 80712634 : if (top_range == nullptr) continue;
4607 : bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
4608 57940534 : LiveRange* first_range = top_range;
4609 128340643 : for (LiveRange *second_range = first_range->next(); second_range != nullptr;
4610 : first_range = second_range, second_range = second_range->next()) {
4611 : LifetimePosition pos = second_range->Start();
4612 : // Add gap move if the two live ranges touch and there is no block
4613 : // boundary.
4614 72122288 : if (second_range->spilled()) continue;
4615 20199353 : if (first_range->End() != pos) continue;
4616 21016560 : if (data()->IsBlockBoundary(pos) &&
4617 : !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
4618 : continue;
4619 : }
4620 18762993 : InstructionOperand prev_operand = first_range->GetAssignedOperand();
4621 18762962 : InstructionOperand cur_operand = second_range->GetAssignedOperand();
4622 18762982 : if (prev_operand.Equals(cur_operand)) continue;
4623 : bool delay_insertion = false;
4624 : Instruction::GapPosition gap_pos;
4625 : int gap_index = pos.ToInstructionIndex();
4626 20396262 : if (connect_spilled && !prev_operand.IsAnyRegister() &&
4627 : cur_operand.IsAnyRegister()) {
4628 958945 : const InstructionBlock* block = code()->GetInstructionBlock(gap_index);
4629 : DCHECK(block->IsDeferred());
4630 : // Performing a reload in this block, meaning the spill operand must
4631 : // be defined here.
4632 : top_range->GetListOfBlocksRequiringSpillOperands()->Add(
4633 : block->rpo_number().ToInt());
4634 : }
4635 :
4636 18477228 : if (pos.IsGapPosition()) {
4637 18477045 : gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
4638 : } else {
4639 183 : if (pos.IsStart()) {
4640 : delay_insertion = true;
4641 : } else {
4642 0 : gap_index++;
4643 : }
4644 183 : gap_pos = delay_insertion ? Instruction::END : Instruction::START;
4645 : }
4646 : // Reloads or spills for spilled in deferred blocks ranges must happen
4647 : // only in deferred blocks.
4648 : DCHECK_IMPLIES(connect_spilled && !(prev_operand.IsAnyRegister() &&
4649 : cur_operand.IsAnyRegister()),
4650 : code()->GetInstructionBlock(gap_index)->IsDeferred());
4651 :
4652 : ParallelMove* move =
4653 : code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
4654 18477264 : gap_pos, code_zone());
4655 18477148 : if (!delay_insertion) {
4656 : move->AddMove(prev_operand, cur_operand);
4657 : } else {
4658 : delayed_insertion_map.insert(
4659 183 : std::make_pair(std::make_pair(move, prev_operand), cur_operand));
4660 : }
4661 : }
4662 : }
4663 4282509 : if (delayed_insertion_map.empty()) return;
4664 : // Insert all the moves which should occur after the stored move.
4665 : ZoneVector<MoveOperands*> to_insert(local_zone);
4666 : ZoneVector<MoveOperands*> to_eliminate(local_zone);
4667 66 : to_insert.reserve(4);
4668 66 : to_eliminate.reserve(4);
4669 66 : ParallelMove* moves = delayed_insertion_map.begin()->first.first;
4670 : for (auto it = delayed_insertion_map.begin();; ++it) {
4671 : bool done = it == delayed_insertion_map.end();
4672 249 : if (done || it->first.first != moves) {
4673 : // Commit the MoveOperands for current ParallelMove.
4674 366 : for (MoveOperands* move : to_eliminate) {
4675 : move->Eliminate();
4676 : }
4677 549 : for (MoveOperands* move : to_insert) {
4678 183 : moves->push_back(move);
4679 : }
4680 183 : if (done) break;
4681 : // Reset state.
4682 : to_eliminate.clear();
4683 : to_insert.clear();
4684 117 : moves = it->first.first;
4685 : }
4686 : // Gather all MoveOperands for a single ParallelMove.
4687 : MoveOperands* move =
4688 183 : new (code_zone()) MoveOperands(it->first.second, it->second);
4689 183 : moves->PrepareInsertAfter(move, &to_eliminate);
4690 183 : to_insert.push_back(move);
4691 : }
4692 : }
4693 :
4694 879124 : void LiveRangeConnector::CommitSpillsInDeferredBlocks(
4695 4425436 : TopLevelLiveRange* range, LiveRangeBoundArray* array, Zone* temp_zone) {
4696 : DCHECK(range->IsSpilledOnlyInDeferredBlocks());
4697 : DCHECK(!range->spilled());
4698 :
4699 11587702 : InstructionSequence* code = data()->code();
4700 879124 : InstructionOperand spill_operand = range->GetSpillRangeOperand();
4701 :
4702 879124 : TRACE("Live Range %d will be spilled only in deferred blocks.\n",
4703 : range->vreg());
4704 : // If we have ranges that aren't spilled but require the operand on the stack,
4705 : // make sure we insert the spill.
4706 7987618 : for (const LiveRange* child = range; child != nullptr;
4707 : child = child->next()) {
4708 7378558 : for (const UsePosition* pos = child->first_pos(); pos != nullptr;
4709 : pos = pos->next()) {
4710 7846611 : if (pos->type() != UsePositionType::kRequiresSlot && !child->spilled())
4711 : continue;
4712 : range->AddBlockRequiringSpillOperand(
4713 : code->GetInstructionBlock(pos->pos().ToInstructionIndex())
4714 532288 : ->rpo_number());
4715 : }
4716 : }
4717 :
4718 879130 : ZoneQueue<int> worklist(temp_zone);
4719 :
4720 3290288 : for (BitVector::Iterator iterator(
4721 879124 : range->GetListOfBlocksRequiringSpillOperands());
4722 1532033 : !iterator.Done(); iterator.Advance()) {
4723 3064067 : worklist.push(iterator.Current());
4724 : }
4725 :
4726 : ZoneSet<std::pair<RpoNumber, int>> done_moves(temp_zone);
4727 : // Seek the deferred blocks that dominate locations requiring spill operands,
4728 : // and spill there. We only need to spill at the start of such blocks.
4729 : BitVector done_blocks(
4730 879129 : range->GetListOfBlocksRequiringSpillOperands()->length(), temp_zone);
4731 7893431 : while (!worklist.empty()) {
4732 6135195 : int block_id = worklist.front();
4733 : worklist.pop();
4734 6135202 : if (done_blocks.Contains(block_id)) continue;
4735 : done_blocks.Add(block_id);
4736 1333577 : InstructionBlock* spill_block =
4737 4771837 : code->InstructionBlockAt(RpoNumber::FromInt(block_id));
4738 :
4739 15480434 : for (const RpoNumber& pred : spill_block->predecessors()) {
4740 7270322 : const InstructionBlock* pred_block = code->InstructionBlockAt(pred);
4741 :
4742 5936739 : if (pred_block->IsDeferred()) {
4743 9206316 : worklist.push(pred_block->rpo_number().ToInt());
4744 : } else {
4745 : LifetimePosition pred_end =
4746 : LifetimePosition::InstructionFromInstructionIndex(
4747 : pred_block->last_instruction_index());
4748 :
4749 : LiveRangeBound* bound = array->Find(pred_end);
4750 :
4751 1333581 : InstructionOperand pred_op = bound->range_->GetAssignedOperand();
4752 :
4753 : RpoNumber spill_block_number = spill_block->rpo_number();
4754 1333577 : if (done_moves.find(std::make_pair(
4755 2667168 : spill_block_number, range->vreg())) == done_moves.end()) {
4756 : data()->AddGapMove(spill_block->first_instruction_index(),
4757 : Instruction::GapPosition::START, pred_op,
4758 1333577 : spill_operand);
4759 2667178 : done_moves.insert(std::make_pair(spill_block_number, range->vreg()));
4760 : spill_block->mark_needs_frame();
4761 : }
4762 : }
4763 : }
4764 : }
4765 879133 : }
4766 :
4767 : #undef TRACE
4768 :
4769 : } // namespace compiler
4770 : } // namespace internal
4771 178779 : } // namespace v8
|