Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/backend/register-allocator.h"
6 :
7 : #include <iomanip>
8 :
9 : #include "src/assembler-inl.h"
10 : #include "src/base/adapters.h"
11 : #include "src/compiler/linkage.h"
12 : #include "src/string-stream.h"
13 :
14 : namespace v8 {
15 : namespace internal {
16 : namespace compiler {
17 :
18 : #define TRACE(...) \
19 : do { \
20 : if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
21 : } while (false)
22 :
23 : namespace {
24 :
25 : static constexpr int kFloat32Bit =
26 : RepresentationBit(MachineRepresentation::kFloat32);
27 : static constexpr int kSimd128Bit =
28 : RepresentationBit(MachineRepresentation::kSimd128);
29 :
30 208157 : int GetRegisterCount(const RegisterConfiguration* cfg, RegisterKind kind) {
31 : return kind == FP_REGISTERS ? cfg->num_double_registers()
32 3157416 : : cfg->num_general_registers();
33 : }
34 :
35 208116 : int GetAllocatableRegisterCount(const RegisterConfiguration* cfg,
36 : RegisterKind kind) {
37 : return kind == FP_REGISTERS ? cfg->num_allocatable_double_registers()
38 3157416 : : cfg->num_allocatable_general_registers();
39 : }
40 :
41 208085 : const int* GetAllocatableRegisterCodes(const RegisterConfiguration* cfg,
42 : RegisterKind kind) {
43 : return kind == FP_REGISTERS ? cfg->allocatable_double_codes()
44 3157416 : : cfg->allocatable_general_codes();
45 : }
46 :
47 441976 : const InstructionBlock* GetContainingLoop(const InstructionSequence* sequence,
48 : const InstructionBlock* block) {
49 : RpoNumber index = block->loop_header();
50 2941238 : if (!index.IsValid()) return nullptr;
51 441976 : return sequence->InstructionBlockAt(index);
52 : }
53 :
54 : const InstructionBlock* GetInstructionBlock(const InstructionSequence* code,
55 : LifetimePosition pos) {
56 20560843 : return code->GetInstructionBlock(pos.ToInstructionIndex());
57 : }
58 :
59 3965422 : Instruction* GetLastInstruction(InstructionSequence* code,
60 3965422 : const InstructionBlock* block) {
61 3965434 : return code->InstructionAt(block->last_instruction_index());
62 : }
63 :
64 : // TODO(dcarney): fix frame to allow frame accesses to half size location.
65 4367109 : int GetByteWidth(MachineRepresentation rep) {
66 4367109 : switch (rep) {
67 : case MachineRepresentation::kBit:
68 : case MachineRepresentation::kWord8:
69 : case MachineRepresentation::kWord16:
70 : case MachineRepresentation::kWord32:
71 : case MachineRepresentation::kFloat32:
72 : return kSystemPointerSize;
73 : case MachineRepresentation::kTaggedSigned:
74 : case MachineRepresentation::kTaggedPointer:
75 : case MachineRepresentation::kTagged:
76 : // TODO(ishell): kTaggedSize once half size locations are supported.
77 : return kSystemPointerSize;
78 : case MachineRepresentation::kWord64:
79 : case MachineRepresentation::kFloat64:
80 : return kDoubleSize;
81 : case MachineRepresentation::kSimd128:
82 1338 : return kSimd128Size;
83 : case MachineRepresentation::kNone:
84 : break;
85 : }
86 0 : UNREACHABLE();
87 : }
88 :
89 : } // namespace
90 :
91 : class LiveRangeBound {
92 : public:
93 32698873 : explicit LiveRangeBound(LiveRange* range, bool skip)
94 98096619 : : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
95 : DCHECK(!range->IsEmpty());
96 : }
97 :
98 : bool CanCover(LifetimePosition position) {
99 232569254 : return start_ <= position && position < end_;
100 : }
101 :
102 : LiveRange* const range_;
103 : const LifetimePosition start_;
104 : const LifetimePosition end_;
105 : const bool skip_;
106 :
107 : private:
108 : DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
109 : };
110 :
111 : struct FindResult {
112 : LiveRange* cur_cover_;
113 : LiveRange* pred_cover_;
114 : };
115 :
116 : class LiveRangeBoundArray {
117 : public:
118 82969590 : LiveRangeBoundArray() : length_(0), start_(nullptr) {}
119 :
120 : bool ShouldInitialize() { return start_ == nullptr; }
121 :
122 5792561 : void Initialize(Zone* zone, TopLevelLiveRange* range) {
123 5792561 : length_ = range->GetChildCount();
124 :
125 5792593 : start_ = zone->NewArray<LiveRangeBound>(length_);
126 : LiveRangeBound* curr = start_;
127 : // Normally, spilled ranges do not need connecting moves, because the spill
128 : // location has been assigned at definition. For ranges spilled in deferred
129 : // blocks, that is not the case, so we need to connect the spilled children.
130 38491563 : for (LiveRange *i = range; i != nullptr; i = i->next(), ++curr) {
131 32698970 : new (curr) LiveRangeBound(i, i->spilled());
132 : }
133 5792593 : }
134 :
135 : LiveRangeBound* Find(const LifetimePosition position) const {
136 : size_t left_index = 0;
137 157009592 : size_t right_index = length_;
138 : while (true) {
139 551300283 : size_t current_index = left_index + (right_index - left_index) / 2;
140 : DCHECK(right_index > current_index);
141 551300283 : LiveRangeBound* bound = &start_[current_index];
142 551300283 : if (bound->start_ <= position) {
143 374672234 : if (position < bound->end_) return bound;
144 : DCHECK(left_index < current_index);
145 : left_index = current_index;
146 : } else {
147 : right_index = current_index;
148 : }
149 : }
150 : }
151 :
152 : LiveRangeBound* FindPred(const InstructionBlock* pred) {
153 : LifetimePosition pred_end =
154 : LifetimePosition::InstructionFromInstructionIndex(
155 : pred->last_instruction_index());
156 : return Find(pred_end);
157 : }
158 :
159 : LiveRangeBound* FindSucc(const InstructionBlock* succ) {
160 : LifetimePosition succ_start = LifetimePosition::GapFromInstructionIndex(
161 : succ->first_instruction_index());
162 : return Find(succ_start);
163 : }
164 :
165 233594798 : bool FindConnectableSubranges(const InstructionBlock* block,
166 116797399 : const InstructionBlock* pred,
167 : FindResult* result) const {
168 : LifetimePosition pred_end =
169 : LifetimePosition::InstructionFromInstructionIndex(
170 : pred->last_instruction_index());
171 : LiveRangeBound* bound = Find(pred_end);
172 116797399 : result->pred_cover_ = bound->range_;
173 : LifetimePosition cur_start = LifetimePosition::GapFromInstructionIndex(
174 : block->first_instruction_index());
175 :
176 116797399 : if (bound->CanCover(cur_start)) {
177 : // Both blocks are covered by the same range, so there is nothing to
178 : // connect.
179 : return false;
180 : }
181 : bound = Find(cur_start);
182 38871869 : if (bound->skip_) {
183 : return false;
184 : }
185 7552485 : result->cur_cover_ = bound->range_;
186 : DCHECK(result->pred_cover_ != nullptr && result->cur_cover_ != nullptr);
187 7552485 : return (result->cur_cover_ != result->pred_cover_);
188 : }
189 :
190 : private:
191 : size_t length_;
192 : LiveRangeBound* start_;
193 :
194 : DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
195 : };
196 :
197 : class LiveRangeFinder {
198 : public:
199 2949260 : explicit LiveRangeFinder(const RegisterAllocationData* data, Zone* zone)
200 : : data_(data),
201 2949260 : bounds_length_(static_cast<int>(data_->live_ranges().size())),
202 2949260 : bounds_(zone->NewArray<LiveRangeBoundArray>(bounds_length_)),
203 8848250 : zone_(zone) {
204 85919381 : for (int i = 0; i < bounds_length_; ++i) {
205 82969651 : new (&bounds_[i]) LiveRangeBoundArray();
206 : }
207 2949730 : }
208 :
209 76963002 : LiveRangeBoundArray* ArrayFor(int operand_index) {
210 : DCHECK(operand_index < bounds_length_);
211 153926004 : TopLevelLiveRange* range = data_->live_ranges()[operand_index];
212 : DCHECK(range != nullptr && !range->IsEmpty());
213 76963002 : LiveRangeBoundArray* array = &bounds_[operand_index];
214 76963002 : if (array->ShouldInitialize()) {
215 5792571 : array->Initialize(zone_, range);
216 : }
217 76963021 : return array;
218 : }
219 :
220 : private:
221 : const RegisterAllocationData* const data_;
222 : const int bounds_length_;
223 : LiveRangeBoundArray* const bounds_;
224 : Zone* const zone_;
225 :
226 : DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
227 : };
228 :
229 : typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
230 :
231 : struct DelayedInsertionMapCompare {
232 : bool operator()(const DelayedInsertionMapKey& a,
233 : const DelayedInsertionMapKey& b) const {
234 697 : if (a.first == b.first) {
235 : return a.second.Compare(b.second);
236 : }
237 697 : return a.first < b.first;
238 : }
239 : };
240 :
241 : typedef ZoneMap<DelayedInsertionMapKey, InstructionOperand,
242 : DelayedInsertionMapCompare>
243 : DelayedInsertionMap;
244 :
245 104176241 : UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
246 : void* hint, UsePositionHintType hint_type)
247 104176241 : : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) {
248 : DCHECK_IMPLIES(hint == nullptr, hint_type == UsePositionHintType::kNone);
249 : bool register_beneficial = true;
250 : UsePositionType type = UsePositionType::kRegisterOrSlot;
251 206158059 : if (operand_ != nullptr && operand_->IsUnallocated()) {
252 : const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
253 101981937 : if (unalloc->HasRegisterPolicy()) {
254 : type = UsePositionType::kRequiresRegister;
255 61707074 : } else if (unalloc->HasSlotPolicy()) {
256 : type = UsePositionType::kRequiresSlot;
257 : register_beneficial = false;
258 49237419 : } else if (unalloc->HasRegisterOrSlotOrConstantPolicy()) {
259 : type = UsePositionType::kRegisterOrSlotOrConstant;
260 : register_beneficial = false;
261 : } else {
262 49237497 : register_beneficial = !unalloc->HasRegisterOrSlotPolicy();
263 : }
264 : }
265 208352482 : flags_ = TypeField::encode(type) | HintTypeField::encode(hint_type) |
266 104176241 : RegisterBeneficialField::encode(register_beneficial) |
267 104176241 : AssignedRegisterField::encode(kUnassignedRegister);
268 : DCHECK(pos_.IsValid());
269 104176241 : }
270 :
271 0 : bool UsePosition::HasHint() const {
272 : int hint_register;
273 104316806 : return HintRegister(&hint_register);
274 : }
275 :
276 378376176 : bool UsePosition::HintRegister(int* register_code) const {
277 378376176 : if (hint_ == nullptr) return false;
278 168816182 : switch (HintTypeField::decode(flags_)) {
279 : case UsePositionHintType::kNone:
280 : case UsePositionHintType::kUnresolved:
281 : return false;
282 : case UsePositionHintType::kUsePos: {
283 : UsePosition* use_pos = reinterpret_cast<UsePosition*>(hint_);
284 24636542 : int assigned_register = AssignedRegisterField::decode(use_pos->flags_);
285 24636542 : if (assigned_register == kUnassignedRegister) return false;
286 11004225 : *register_code = assigned_register;
287 11004225 : return true;
288 : }
289 : case UsePositionHintType::kOperand: {
290 : InstructionOperand* operand =
291 : reinterpret_cast<InstructionOperand*>(hint_);
292 48946715 : *register_code = LocationOperand::cast(operand)->register_code();
293 48946715 : return true;
294 : }
295 : case UsePositionHintType::kPhi: {
296 1554847 : RegisterAllocationData::PhiMapValue* phi =
297 : reinterpret_cast<RegisterAllocationData::PhiMapValue*>(hint_);
298 : int assigned_register = phi->assigned_register();
299 1554847 : if (assigned_register == kUnassignedRegister) return false;
300 261680 : *register_code = assigned_register;
301 261680 : return true;
302 : }
303 : }
304 0 : UNREACHABLE();
305 : }
306 :
307 47789129 : UsePositionHintType UsePosition::HintTypeForOperand(
308 : const InstructionOperand& op) {
309 47789129 : switch (op.kind()) {
310 : case InstructionOperand::CONSTANT:
311 : case InstructionOperand::IMMEDIATE:
312 : case InstructionOperand::EXPLICIT:
313 : return UsePositionHintType::kNone;
314 : case InstructionOperand::UNALLOCATED:
315 22148392 : return UsePositionHintType::kUnresolved;
316 : case InstructionOperand::ALLOCATED:
317 27043670 : if (op.IsRegister() || op.IsFPRegister()) {
318 : return UsePositionHintType::kOperand;
319 : } else {
320 : DCHECK(op.IsStackSlot() || op.IsFPStackSlot());
321 1232455 : return UsePositionHintType::kNone;
322 : }
323 : case InstructionOperand::INVALID:
324 : break;
325 : }
326 0 : UNREACHABLE();
327 : }
328 :
329 0 : void UsePosition::SetHint(UsePosition* use_pos) {
330 : DCHECK_NOT_NULL(use_pos);
331 9637921 : hint_ = use_pos;
332 19275842 : flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
333 0 : }
334 :
335 0 : void UsePosition::ResolveHint(UsePosition* use_pos) {
336 : DCHECK_NOT_NULL(use_pos);
337 15727300 : if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved) return;
338 7863653 : hint_ = use_pos;
339 7863653 : flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
340 : }
341 :
342 0 : void UsePosition::set_type(UsePositionType type, bool register_beneficial) {
343 : DCHECK_IMPLIES(type == UsePositionType::kRequiresSlot, !register_beneficial);
344 : DCHECK_EQ(kUnassignedRegister, AssignedRegisterField::decode(flags_));
345 19547343 : flags_ = TypeField::encode(type) |
346 19547343 : RegisterBeneficialField::encode(register_beneficial) |
347 19547343 : HintTypeField::encode(HintTypeField::decode(flags_)) |
348 19547343 : AssignedRegisterField::encode(kUnassignedRegister);
349 0 : }
350 :
351 42557584 : UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
352 : DCHECK(Contains(pos) && pos != start());
353 42557584 : UseInterval* after = new (zone) UseInterval(pos, end_);
354 42557511 : after->next_ = next_;
355 42557511 : next_ = nullptr;
356 42557511 : end_ = pos;
357 42557511 : return after;
358 : }
359 :
360 0 : void LifetimePosition::Print() const { StdoutStream{} << *this << std::endl; }
361 :
362 0 : std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) {
363 0 : os << '@' << pos.ToInstructionIndex();
364 0 : if (pos.IsGapPosition()) {
365 : os << 'g';
366 : } else {
367 : os << 'i';
368 : }
369 0 : if (pos.IsStart()) {
370 : os << 's';
371 : } else {
372 : os << 'e';
373 : }
374 0 : return os;
375 : }
376 :
377 0 : LiveRange::LiveRange(int relative_id, MachineRepresentation rep,
378 : TopLevelLiveRange* top_level)
379 : : relative_id_(relative_id),
380 : bits_(0),
381 : last_interval_(nullptr),
382 : first_interval_(nullptr),
383 : first_pos_(nullptr),
384 : top_level_(top_level),
385 : next_(nullptr),
386 : current_interval_(nullptr),
387 : last_processed_use_(nullptr),
388 : current_hint_position_(nullptr),
389 150852516 : splitting_pointer_(nullptr) {
390 : DCHECK(AllocatedOperand::IsSupportedRepresentation(rep));
391 150852516 : bits_ = AssignedRegisterField::encode(kUnassignedRegister) |
392 150852516 : RepresentationField::encode(rep);
393 0 : }
394 :
395 0 : void LiveRange::VerifyPositions() const {
396 : // Walk the positions, verifying that each is in an interval.
397 0 : UseInterval* interval = first_interval_;
398 0 : for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
399 0 : CHECK(Start() <= pos->pos());
400 0 : CHECK(pos->pos() <= End());
401 0 : CHECK_NOT_NULL(interval);
402 0 : while (!interval->Contains(pos->pos()) && interval->end() != pos->pos()) {
403 : interval = interval->next();
404 0 : CHECK_NOT_NULL(interval);
405 : }
406 : }
407 0 : }
408 :
409 0 : void LiveRange::VerifyIntervals() const {
410 : DCHECK(first_interval()->start() == Start());
411 : LifetimePosition last_end = first_interval()->end();
412 0 : for (UseInterval* interval = first_interval()->next(); interval != nullptr;
413 : interval = interval->next()) {
414 : DCHECK(last_end <= interval->start());
415 : last_end = interval->end();
416 : }
417 : DCHECK(last_end == End());
418 0 : }
419 :
420 0 : void LiveRange::set_assigned_register(int reg) {
421 : DCHECK(!HasRegisterAssigned() && !spilled());
422 176690953 : bits_ = AssignedRegisterField::update(bits_, reg);
423 0 : }
424 :
425 0 : void LiveRange::UnsetAssignedRegister() {
426 : DCHECK(HasRegisterAssigned() && !spilled());
427 0 : bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
428 0 : }
429 :
430 0 : void LiveRange::Spill() {
431 : DCHECK(!spilled());
432 : DCHECK(!TopLevel()->HasNoSpillType());
433 : set_spilled(true);
434 24126537 : bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
435 0 : }
436 :
437 118184088 : RegisterKind LiveRange::kind() const {
438 118184088 : return IsFloatingPoint(representation()) ? FP_REGISTERS : GENERAL_REGISTERS;
439 : }
440 :
441 78137403 : UsePosition* LiveRange::FirstHintPosition(int* register_index) const {
442 313506073 : for (UsePosition* pos = first_pos_; pos != nullptr; pos = pos->next()) {
443 270740364 : if (pos->HintRegister(register_index)) return pos;
444 : }
445 : return nullptr;
446 : }
447 :
448 80493088 : UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
449 230717948 : UsePosition* use_pos = last_processed_use_;
450 40818249 : if (use_pos == nullptr || use_pos->pos() > start) {
451 : use_pos = first_pos();
452 : }
453 230717948 : while (use_pos != nullptr && use_pos->pos() < start) {
454 : use_pos = use_pos->next();
455 : }
456 40818249 : last_processed_use_ = use_pos;
457 40818249 : return use_pos;
458 : }
459 :
460 21841885 : UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
461 : LifetimePosition start) const {
462 63188412 : UsePosition* pos = NextUsePosition(start);
463 85030405 : while (pos != nullptr && !pos->RegisterIsBeneficial()) {
464 : pos = pos->next();
465 : }
466 21841939 : return pos;
467 : }
468 :
469 6460599 : LifetimePosition LiveRange::NextLifetimePositionRegisterIsBeneficial(
470 1883240 : const LifetimePosition& start) const {
471 6460599 : UsePosition* next_use = NextUsePositionRegisterIsBeneficial(start);
472 6460615 : if (next_use == nullptr) return End();
473 : return next_use->pos();
474 : }
475 :
476 0 : UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
477 60392 : LifetimePosition start) const {
478 505094 : UsePosition* pos = first_pos();
479 : UsePosition* prev = nullptr;
480 312939 : while (pos != nullptr && pos->pos() < start) {
481 252547 : if (pos->RegisterIsBeneficial()) prev = pos;
482 : pos = pos->next();
483 : }
484 0 : return prev;
485 : }
486 :
487 16869023 : UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) const {
488 55203337 : UsePosition* pos = NextUsePosition(start);
489 72072434 : while (pos != nullptr && pos->type() != UsePositionType::kRequiresRegister) {
490 : pos = pos->next();
491 : }
492 16869060 : return pos;
493 : }
494 :
495 1583582 : UsePosition* LiveRange::NextSlotPosition(LifetimePosition start) const {
496 6572538 : for (UsePosition* pos = NextUsePosition(start); pos != nullptr;
497 : pos = pos->next()) {
498 4989642 : if (pos->type() != UsePositionType::kRequiresSlot) continue;
499 : return pos;
500 : }
501 : return nullptr;
502 : }
503 :
504 6860220 : bool LiveRange::CanBeSpilled(LifetimePosition pos) const {
505 : // We cannot spill a live range that has a use requiring a register
506 : // at the current or the immediate next position.
507 6860220 : UsePosition* use_pos = NextRegisterPosition(pos);
508 6860222 : if (use_pos == nullptr) return true;
509 : return use_pos->pos() > pos.NextStart().End();
510 : }
511 :
512 86181525 : bool LiveRange::IsTopLevel() const { return top_level_ == this; }
513 :
514 196986366 : InstructionOperand LiveRange::GetAssignedOperand() const {
515 138450356 : if (HasRegisterAssigned()) {
516 : DCHECK(!spilled());
517 : return AllocatedOperand(LocationOperand::REGISTER, representation(),
518 79914346 : assigned_register());
519 : }
520 : DCHECK(spilled());
521 : DCHECK(!HasRegisterAssigned());
522 58536010 : if (TopLevel()->HasSpillOperand()) {
523 41215147 : InstructionOperand* op = TopLevel()->GetSpillOperand();
524 : DCHECK(!op->IsUnallocated());
525 41215147 : return *op;
526 : }
527 17320863 : return TopLevel()->GetSpillRangeOperand();
528 : }
529 :
530 0 : UseInterval* LiveRange::FirstSearchIntervalForPosition(
531 : LifetimePosition position) const {
532 867724820 : if (current_interval_ == nullptr) return first_interval_;
533 611635515 : if (current_interval_->start() > position) {
534 2644542 : current_interval_ = nullptr;
535 2172284 : return first_interval_;
536 : }
537 : return current_interval_;
538 : }
539 :
540 0 : void LiveRange::AdvanceLastProcessedMarker(
541 : UseInterval* to_start_of, LifetimePosition but_not_past) const {
542 488715941 : if (to_start_of == nullptr) return;
543 488721912 : if (to_start_of->start() > but_not_past) return;
544 255762019 : LifetimePosition start = current_interval_ == nullptr
545 : ? LifetimePosition::Invalid()
546 255762019 : : current_interval_->start();
547 255762019 : if (to_start_of->start() > start) {
548 99051526 : current_interval_ = to_start_of;
549 : }
550 : }
551 :
552 157368236 : LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
553 : int new_id = TopLevel()->GetNextChildId();
554 : LiveRange* child = new (zone) LiveRange(new_id, representation(), TopLevel());
555 39342165 : child->set_bundle(bundle_);
556 : // If we split, we do so because we're about to switch registers or move
557 : // to/from a slot, so there's no value in connecting hints.
558 39342165 : DetachAt(position, child, zone, DoNotConnectHints);
559 :
560 39342017 : child->top_level_ = TopLevel();
561 39342017 : child->next_ = next_;
562 39342017 : next_ = child;
563 39342017 : return child;
564 : }
565 :
566 60605652 : UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
567 : Zone* zone,
568 52106833 : HintConnectionOption connect_hints) {
569 : DCHECK(Start() < position);
570 : DCHECK(End() > position);
571 : DCHECK(result->IsEmpty());
572 : // Find the last interval that ends before the position. If the
573 : // position is contained in one of the intervals in the chain, we
574 : // split that interval and use the first part.
575 34601636 : UseInterval* current = FirstSearchIntervalForPosition(position);
576 :
577 : // If the split position coincides with the beginning of a use interval
578 : // we need to split use positons in a special way.
579 : bool split_at_start = false;
580 :
581 60605652 : if (current->start() == position) {
582 : // When splitting at start we need to locate the previous use interval.
583 1228 : current = first_interval_;
584 : }
585 :
586 : UseInterval* after = nullptr;
587 77155853 : while (current != nullptr) {
588 77156137 : if (current->Contains(position)) {
589 42554501 : after = current->SplitAt(position, zone);
590 42557468 : break;
591 : }
592 : UseInterval* next = current->next();
593 34601636 : if (next->start() >= position) {
594 : split_at_start = (next->start() == position);
595 : after = next;
596 : current->set_next(nullptr);
597 : break;
598 : }
599 : current = next;
600 : }
601 : DCHECK_NOT_NULL(after);
602 :
603 : // Partition original use intervals to the two live ranges.
604 : UseInterval* before = current;
605 : result->last_interval_ =
606 60608619 : (last_interval_ == before)
607 : ? after // Only interval in the range after split.
608 60608619 : : last_interval_; // Last interval of the original range.
609 60608619 : result->first_interval_ = after;
610 60608619 : last_interval_ = before;
611 :
612 : // Find the last use position before the split and the first use
613 : // position after it.
614 46097829 : UsePosition* use_after =
615 70310072 : splitting_pointer_ == nullptr || splitting_pointer_->pos() > position
616 : ? first_pos()
617 112715452 : : splitting_pointer_;
618 : UsePosition* use_before = nullptr;
619 60608619 : if (split_at_start) {
620 : // The split position coincides with the beginning of a use interval (the
621 : // end of a lifetime hole). Use at this position should be attributed to
622 : // the split child because split child owns use interval covering it.
623 3753601 : while (use_after != nullptr && use_after->pos() < position) {
624 : use_before = use_after;
625 : use_after = use_after->next();
626 : }
627 : } else {
628 102952847 : while (use_after != nullptr && use_after->pos() <= position) {
629 : use_before = use_after;
630 : use_after = use_after->next();
631 : }
632 : }
633 :
634 : // Partition original use positions to the two live ranges.
635 60608619 : if (use_before != nullptr) {
636 : use_before->set_next(nullptr);
637 : } else {
638 37502093 : first_pos_ = nullptr;
639 : }
640 60608619 : result->first_pos_ = use_after;
641 :
642 : // Discard cached iteration state. It might be pointing
643 : // to the use that no longer belongs to this live range.
644 60608619 : last_processed_use_ = nullptr;
645 60608619 : current_interval_ = nullptr;
646 :
647 71999150 : if (connect_hints == ConnectHints && use_before != nullptr &&
648 11390531 : use_after != nullptr) {
649 : use_after->SetHint(use_before);
650 : }
651 : #ifdef DEBUG
652 : VerifyChildStructure();
653 : result->VerifyChildStructure();
654 : #endif
655 60608619 : return use_before;
656 : }
657 :
658 0 : void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
659 31346591 : LiveRange* child = this;
660 35935017 : for (; child != nullptr; child = child->next()) {
661 31346591 : child->top_level_ = new_top_level;
662 : }
663 0 : }
664 :
665 81054899 : void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
666 81054899 : const InstructionOperand& spill_op) {
667 285334924 : for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
668 : DCHECK(Start() <= pos->pos() && pos->pos() <= End());
669 102299423 : if (!pos->HasOperand()) continue;
670 101980602 : switch (pos->type()) {
671 : case UsePositionType::kRequiresSlot:
672 : DCHECK(spill_op.IsStackSlot() || spill_op.IsFPStackSlot());
673 : InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
674 : break;
675 : case UsePositionType::kRequiresRegister:
676 : DCHECK(op.IsRegister() || op.IsFPRegister());
677 : V8_FALLTHROUGH;
678 : case UsePositionType::kRegisterOrSlot:
679 : case UsePositionType::kRegisterOrSlotOrConstant:
680 : InstructionOperand::ReplaceWith(pos->operand(), &op);
681 : break;
682 : }
683 : }
684 81054899 : }
685 :
686 : // This implements an ordering on live ranges so that they are ordered by their
687 : // start positions. This is needed for the correctness of the register
688 : // allocation algorithm. If two live ranges start at the same offset then there
689 : // is a tie breaker based on where the value is first used. This part of the
690 : // ordering is merely a heuristic.
691 29475641 : bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
692 : LifetimePosition start = Start();
693 : LifetimePosition other_start = other->Start();
694 308000751 : if (start == other_start) {
695 : UsePosition* pos = first_pos();
696 15939023 : if (pos == nullptr) return false;
697 : UsePosition* other_pos = other->first_pos();
698 13536618 : if (other_pos == nullptr) return true;
699 : return pos->pos() < other_pos->pos();
700 : }
701 0 : return start < other_start;
702 : }
703 :
704 40298358 : void LiveRange::SetUseHints(int register_index) {
705 201018379 : for (UsePosition* pos = first_pos(); pos != nullptr; pos = pos->next()) {
706 80519495 : if (!pos->HasOperand()) continue;
707 80200526 : switch (pos->type()) {
708 : case UsePositionType::kRequiresSlot:
709 : break;
710 : case UsePositionType::kRequiresRegister:
711 : case UsePositionType::kRegisterOrSlot:
712 : case UsePositionType::kRegisterOrSlotOrConstant:
713 : pos->set_assigned_register(register_index);
714 : break;
715 : }
716 : }
717 40298358 : }
718 :
719 222221890 : bool LiveRange::CanCover(LifetimePosition position) const {
720 248165768 : if (IsEmpty()) return false;
721 470388178 : return Start() <= position && position < End();
722 : }
723 :
724 247487644 : bool LiveRange::Covers(LifetimePosition position) const {
725 247487644 : if (!CanCover(position)) return false;
726 : UseInterval* start_search = FirstSearchIntervalForPosition(position);
727 347436117 : for (UseInterval* interval = start_search; interval != nullptr;
728 : interval = interval->next()) {
729 : DCHECK(interval->next() == nullptr ||
730 : interval->next()->start() >= interval->start());
731 : AdvanceLastProcessedMarker(interval, position);
732 347434008 : if (interval->Contains(position)) return true;
733 244148195 : if (interval->start() > position) return false;
734 : }
735 : return false;
736 : }
737 :
738 0 : LifetimePosition LiveRange::NextEndAfter(LifetimePosition position) const {
739 0 : UseInterval* start_search = FirstSearchIntervalForPosition(position);
740 112044194 : while (start_search->end() < position) {
741 : start_search = start_search->next();
742 : }
743 0 : return start_search->end();
744 : }
745 :
746 0 : LifetimePosition LiveRange::NextStartAfter(LifetimePosition position) const {
747 89013447 : UseInterval* start_search = FirstSearchIntervalForPosition(position);
748 228278265 : while (start_search->start() < position) {
749 : start_search = start_search->next();
750 : }
751 0 : return start_search->start();
752 : }
753 :
754 1824700202 : LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
755 111845717 : UseInterval* b = other->first_interval();
756 360849677 : if (b == nullptr) return LifetimePosition::Invalid();
757 : LifetimePosition advance_last_processed_up_to = b->start();
758 349750506 : UseInterval* a = FirstSearchIntervalForPosition(b->start());
759 974827004 : while (a != nullptr && b != nullptr) {
760 577768225 : if (a->start() > other->End()) break;
761 536533116 : if (b->start() > End()) break;
762 531369159 : LifetimePosition cur_intersection = a->Intersect(b);
763 531390695 : if (cur_intersection.IsValid()) {
764 69794472 : return cur_intersection;
765 : }
766 461596223 : if (a->start() < b->start()) {
767 : a = a->next();
768 699299690 : if (a == nullptr || a->start() > other->End()) break;
769 : AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
770 : } else {
771 : b = b->next();
772 : }
773 : }
774 : return LifetimePosition::Invalid();
775 : }
776 :
777 0 : void LiveRange::Print(const RegisterConfiguration* config,
778 : bool with_children) const {
779 0 : StdoutStream os;
780 : PrintableLiveRange wrapper;
781 0 : wrapper.register_configuration_ = config;
782 0 : for (const LiveRange* i = this; i != nullptr; i = i->next()) {
783 0 : wrapper.range_ = i;
784 0 : os << wrapper << std::endl;
785 0 : if (!with_children) break;
786 0 : }
787 0 : }
788 :
789 0 : void LiveRange::Print(bool with_children) const {
790 0 : Print(RegisterConfiguration::Default(), with_children);
791 0 : }
792 :
793 0 : bool LiveRange::RegisterFromBundle(int* hint) const {
794 45466363 : if (bundle_ == nullptr || bundle_->reg() == kUnassignedRegister) return false;
795 2764050 : *hint = bundle_->reg();
796 0 : return true;
797 : }
798 :
799 0 : void LiveRange::UpdateBundleRegister(int reg) const {
800 40298390 : if (bundle_ == nullptr || bundle_->reg() != kUnassignedRegister) return;
801 : bundle_->set_reg(reg);
802 : }
803 :
804 : struct TopLevelLiveRange::SpillMoveInsertionList : ZoneObject {
805 : SpillMoveInsertionList(int gap_index, InstructionOperand* operand,
806 : SpillMoveInsertionList* next)
807 23812448 : : gap_index(gap_index), operand(operand), next(next) {}
808 : const int gap_index;
809 : InstructionOperand* const operand;
810 : SpillMoveInsertionList* const next;
811 : };
812 :
813 71 : TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineRepresentation rep)
814 : : LiveRange(0, rep, this),
815 : vreg_(vreg),
816 : last_child_id_(0),
817 : splintered_from_(nullptr),
818 : spill_operand_(nullptr),
819 : spill_move_insertion_locations_(nullptr),
820 : spilled_in_deferred_blocks_(false),
821 : spill_start_index_(kMaxInt),
822 : last_pos_(nullptr),
823 : splinter_(nullptr),
824 101633314 : has_preassigned_slot_(false) {
825 : bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
826 71 : }
827 :
828 : #if DEBUG
829 : int TopLevelLiveRange::debug_virt_reg() const {
830 : return IsSplinter() ? splintered_from()->vreg() : vreg();
831 : }
832 : #endif
833 :
834 0 : void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index,
835 : InstructionOperand* operand) {
836 : DCHECK(HasNoSpillType());
837 : spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList(
838 47624896 : gap_index, operand, spill_move_insertion_locations_);
839 0 : }
840 :
841 17765703 : void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
842 : const InstructionOperand& op,
843 23281914 : bool might_be_duplicated) {
844 : DCHECK_IMPLIES(op.IsConstant(), GetSpillMoveInsertionLocations() == nullptr);
845 : Zone* zone = sequence->zone();
846 :
847 20924680 : for (SpillMoveInsertionList* to_spill = GetSpillMoveInsertionLocations();
848 : to_spill != nullptr; to_spill = to_spill->next) {
849 3158903 : Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
850 : ParallelMove* move =
851 3158944 : instr->GetOrCreateParallelMove(Instruction::START, zone);
852 : // Skip insertion if it's possible that the move exists already as a
853 : // constraint move from a fixed output register to a slot.
854 5516233 : if (might_be_duplicated || has_preassigned_slot()) {
855 : bool found = false;
856 1919010 : for (MoveOperands* move_op : *move) {
857 672445 : if (move_op->IsEliminated()) continue;
858 2008186 : if (move_op->source().Equals(*to_spill->operand) &&
859 : move_op->destination().Equals(op)) {
860 : found = true;
861 550473 : if (has_preassigned_slot()) move_op->Eliminate();
862 : break;
863 : }
864 : }
865 898519 : if (found) continue;
866 : }
867 2608455 : if (!has_preassigned_slot()) {
868 2511581 : move->AddMove(*to_spill->operand, op);
869 : }
870 : }
871 17765777 : }
872 :
873 0 : void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) {
874 : DCHECK(HasNoSpillType());
875 : DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
876 : set_spill_type(SpillType::kSpillOperand);
877 14609721 : spill_operand_ = operand;
878 0 : }
879 :
880 0 : void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
881 : DCHECK(!HasSpillOperand());
882 : DCHECK(spill_range);
883 7722256 : spill_range_ = spill_range;
884 0 : }
885 :
886 24147554 : AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
887 24147554 : SpillRange* spill_range = GetSpillRange();
888 : int index = spill_range->assigned_slot();
889 881020 : return AllocatedOperand(LocationOperand::STACK_SLOT, representation(), index);
890 : }
891 :
892 11390692 : void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end,
893 48504936 : Zone* zone) {
894 : DCHECK(start != Start() || end != End());
895 : DCHECK(start < end);
896 :
897 32658421 : TopLevelLiveRange splinter_temp(-1, representation());
898 : UsePosition* last_in_splinter = nullptr;
899 : // Live ranges defined in deferred blocks stay in deferred blocks, so we
900 : // don't need to splinter them. That means that start should always be
901 : // after the beginning of the range.
902 : DCHECK(start > Start());
903 :
904 11390692 : if (end >= End()) {
905 : DCHECK(start > Start());
906 1513508 : DetachAt(start, &splinter_temp, zone, ConnectHints);
907 1513647 : next_ = nullptr;
908 : } else {
909 : DCHECK(start < End() && Start() < end);
910 :
911 : const int kInvalidId = std::numeric_limits<int>::max();
912 :
913 9877184 : UsePosition* last = DetachAt(start, &splinter_temp, zone, ConnectHints);
914 :
915 : LiveRange end_part(kInvalidId, this->representation(), nullptr);
916 : // The last chunk exits the deferred region, and we don't want to connect
917 : // hints here, because the non-deferred region shouldn't be affected
918 : // by allocation decisions on the deferred path.
919 : last_in_splinter =
920 9877037 : splinter_temp.DetachAt(end, &end_part, zone, DoNotConnectHints);
921 :
922 9877056 : next_ = end_part.next_;
923 9877056 : last_interval_->set_next(end_part.first_interval_);
924 : // The next splinter will happen either at or after the current interval.
925 : // We can optimize DetachAt by setting current_interval_ accordingly,
926 : // which will then be picked up by FirstSearchIntervalForPosition.
927 9877056 : current_interval_ = last_interval_;
928 9877056 : last_interval_ = end_part.last_interval_;
929 :
930 9877056 : if (first_pos_ == nullptr) {
931 946916 : first_pos_ = end_part.first_pos_;
932 : } else {
933 8930140 : splitting_pointer_ = last;
934 8930140 : if (last != nullptr) last->set_next(end_part.first_pos_);
935 : }
936 : }
937 :
938 11390703 : if (splinter()->IsEmpty()) {
939 4588432 : splinter()->first_interval_ = splinter_temp.first_interval_;
940 4588432 : splinter()->last_interval_ = splinter_temp.last_interval_;
941 : } else {
942 6802271 : splinter()->last_interval_->set_next(splinter_temp.first_interval_);
943 6802271 : splinter()->last_interval_ = splinter_temp.last_interval_;
944 : }
945 11390703 : if (splinter()->first_pos() == nullptr) {
946 8988349 : splinter()->first_pos_ = splinter_temp.first_pos_;
947 : } else {
948 2402354 : splinter()->last_pos_->set_next(splinter_temp.first_pos_);
949 : }
950 11390703 : if (last_in_splinter != nullptr) {
951 2458101 : splinter()->last_pos_ = last_in_splinter;
952 : } else {
953 11939852 : if (splinter()->first_pos() != nullptr &&
954 3007250 : splinter()->last_pos_ == nullptr) {
955 1350352 : splinter()->last_pos_ = splinter()->first_pos();
956 2942124 : for (UsePosition* pos = splinter()->first_pos(); pos != nullptr;
957 : pos = pos->next()) {
958 1591772 : splinter()->last_pos_ = pos;
959 : }
960 : }
961 : }
962 : #if DEBUG
963 : Verify();
964 : splinter()->Verify();
965 : #endif
966 11390703 : }
967 :
968 4588343 : void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) {
969 4588343 : splintered_from_ = splinter_parent;
970 4588343 : if (!HasSpillOperand() && splinter_parent->spill_range_ != nullptr) {
971 2305022 : SetSpillRange(splinter_parent->spill_range_);
972 : }
973 4588343 : }
974 :
975 5218462 : void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) {
976 : DCHECK(merged->TopLevel() == this);
977 :
978 5548516 : if (HasNoSpillType() && merged->HasSpillRange()) {
979 : set_spill_type(merged->spill_type());
980 : DCHECK_LT(0, GetSpillRange()->live_ranges().size());
981 630036 : merged->spill_range_ = nullptr;
982 : merged->bits_ =
983 1260072 : SpillTypeField::update(merged->bits_, SpillType::kNoSpillType);
984 : }
985 4588426 : }
986 :
987 8376043 : void TopLevelLiveRange::Merge(TopLevelLiveRange* other, Zone* zone) {
988 : DCHECK(Start() < other->Start());
989 : DCHECK(other->splintered_from() == this);
990 :
991 57231297 : LiveRange* first = this;
992 16634341 : LiveRange* second = other;
993 : DCHECK(first->Start() < second->Start());
994 50870043 : while (first != nullptr && second != nullptr) {
995 : DCHECK(first != second);
996 : // Make sure the ranges are in order each time we iterate.
997 41693287 : if (second->Start() < first->Start()) {
998 : LiveRange* tmp = second;
999 : second = first;
1000 : first = tmp;
1001 : continue;
1002 : }
1003 :
1004 25013890 : if (first->End() <= second->Start()) {
1005 12170682 : if (first->next() == nullptr ||
1006 : first->next()->Start() > second->Start()) {
1007 : // First is in order before second.
1008 : LiveRange* temp = first->next();
1009 4633801 : first->next_ = second;
1010 : first = temp;
1011 : } else {
1012 : // First is in order before its successor (or second), so advance first.
1013 : first = first->next();
1014 : }
1015 : continue;
1016 : }
1017 :
1018 : DCHECK(first->Start() < second->Start());
1019 : // If first and second intersect, split first.
1020 16634341 : if (first->Start() < second->End() && second->Start() < first->End()) {
1021 16634219 : LiveRange* temp = first->SplitAt(second->Start(), zone);
1022 16634315 : CHECK(temp != first);
1023 : temp->set_spilled(first->spilled());
1024 16634315 : if (!temp->spilled())
1025 : temp->set_assigned_register(first->assigned_register());
1026 :
1027 16634315 : first->next_ = second;
1028 : first = temp;
1029 16634315 : continue;
1030 : }
1031 : DCHECK(first->End() <= second->Start());
1032 : }
1033 :
1034 13765269 : TopLevel()->UpdateParentForAllChildren(TopLevel());
1035 4588426 : TopLevel()->UpdateSpillRangePostMerge(other);
1036 12964547 : TopLevel()->set_has_slot_use(TopLevel()->has_slot_use() ||
1037 : other->has_slot_use());
1038 :
1039 : #if DEBUG
1040 : Verify();
1041 : #endif
1042 4588417 : }
1043 :
1044 0 : void TopLevelLiveRange::VerifyChildrenInOrder() const {
1045 0 : LifetimePosition last_end = End();
1046 0 : for (const LiveRange* child = this->next(); child != nullptr;
1047 : child = child->next()) {
1048 : DCHECK(last_end <= child->Start());
1049 : last_end = child->End();
1050 : }
1051 0 : }
1052 :
1053 0 : void TopLevelLiveRange::Verify() const {
1054 : VerifyChildrenInOrder();
1055 0 : for (const LiveRange* child = this; child != nullptr; child = child->next()) {
1056 : VerifyChildStructure();
1057 : }
1058 0 : }
1059 :
1060 64140813 : void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
1061 64140813 : TRACE("Shorten live range %d to [%d\n", vreg(), start.value());
1062 : DCHECK_NOT_NULL(first_interval_);
1063 : DCHECK(first_interval_->start() <= start);
1064 : DCHECK(start < first_interval_->end());
1065 64140813 : first_interval_->set_start(start);
1066 64140813 : }
1067 :
1068 2273247 : void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
1069 0 : LifetimePosition end, Zone* zone) {
1070 2273247 : TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
1071 : end.value());
1072 : LifetimePosition new_end = end;
1073 8818567 : while (first_interval_ != nullptr && first_interval_->start() <= end) {
1074 4272069 : if (first_interval_->end() > end) {
1075 : new_end = first_interval_->end();
1076 : }
1077 4272069 : first_interval_ = first_interval_->next();
1078 : }
1079 :
1080 4546501 : UseInterval* new_interval = new (zone) UseInterval(start, new_end);
1081 2273252 : new_interval->set_next(first_interval_);
1082 2273252 : first_interval_ = new_interval;
1083 2273252 : if (new_interval->next() == nullptr) {
1084 903514 : last_interval_ = new_interval;
1085 : }
1086 2273252 : }
1087 :
1088 363293909 : void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
1089 0 : LifetimePosition end, Zone* zone) {
1090 363293909 : TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(),
1091 : end.value());
1092 363304828 : if (first_interval_ == nullptr) {
1093 83865402 : UseInterval* interval = new (zone) UseInterval(start, end);
1094 83865551 : first_interval_ = interval;
1095 83865551 : last_interval_ = interval;
1096 : } else {
1097 279439426 : if (end == first_interval_->start()) {
1098 : first_interval_->set_start(start);
1099 171025870 : } else if (end < first_interval_->start()) {
1100 114440494 : UseInterval* interval = new (zone) UseInterval(start, end);
1101 114439892 : interval->set_next(first_interval_);
1102 114439892 : first_interval_ = interval;
1103 : } else {
1104 : // Order of instruction's processing (see ProcessInstructions) guarantees
1105 : // that each new use interval either precedes, intersects with or touches
1106 : // the last added interval.
1107 : DCHECK(start <= first_interval_->end());
1108 : first_interval_->set_start(Min(start, first_interval_->start()));
1109 56585376 : first_interval_->set_end(Max(end, first_interval_->end()));
1110 : }
1111 : }
1112 363304375 : }
1113 :
1114 104176038 : void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
1115 : LifetimePosition pos = use_pos->pos();
1116 104176038 : TRACE("Add to live range %d use position %d\n", vreg(), pos.value());
1117 : UsePosition* prev_hint = nullptr;
1118 140402 : UsePosition* prev = nullptr;
1119 104316689 : UsePosition* current = first_pos_;
1120 208492975 : while (current != nullptr && current->pos() < pos) {
1121 140403 : prev_hint = current->HasHint() ? current : prev_hint;
1122 : prev = current;
1123 : current = current->next();
1124 : }
1125 :
1126 104176286 : if (prev == nullptr) {
1127 104035884 : use_pos->set_next(first_pos_);
1128 104035884 : first_pos_ = use_pos;
1129 : } else {
1130 : use_pos->set_next(prev->next());
1131 : prev->set_next(use_pos);
1132 : }
1133 :
1134 208352689 : if (prev_hint == nullptr && use_pos->HasHint()) {
1135 24408707 : current_hint_position_ = use_pos;
1136 : }
1137 104176286 : }
1138 :
1139 116731333 : static bool AreUseIntervalsIntersecting(UseInterval* interval1,
1140 12222748 : UseInterval* interval2) {
1141 191319367 : while (interval1 != nullptr && interval2 != nullptr) {
1142 128766401 : if (interval1->start() < interval2->start()) {
1143 96106898 : if (interval1->end() > interval2->start()) {
1144 : return true;
1145 : }
1146 : interval1 = interval1->next();
1147 : } else {
1148 32659503 : if (interval2->end() > interval1->start()) {
1149 : return true;
1150 : }
1151 : interval2 = interval2->next();
1152 : }
1153 : }
1154 : return false;
1155 : }
1156 :
1157 0 : std::ostream& operator<<(std::ostream& os,
1158 : const PrintableLiveRange& printable_range) {
1159 0 : const LiveRange* range = printable_range.range_;
1160 0 : os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id()
1161 0 : << " ";
1162 0 : if (range->TopLevel()->is_phi()) os << "phi ";
1163 0 : if (range->TopLevel()->is_non_loop_phi()) os << "nlphi ";
1164 :
1165 0 : os << "{" << std::endl;
1166 0 : UseInterval* interval = range->first_interval();
1167 0 : UsePosition* use_pos = range->first_pos();
1168 0 : while (use_pos != nullptr) {
1169 0 : if (use_pos->HasOperand()) {
1170 0 : os << *use_pos->operand() << use_pos->pos() << " ";
1171 : }
1172 : use_pos = use_pos->next();
1173 : }
1174 : os << std::endl;
1175 :
1176 0 : while (interval != nullptr) {
1177 0 : os << '[' << interval->start() << ", " << interval->end() << ')'
1178 : << std::endl;
1179 : interval = interval->next();
1180 : }
1181 0 : os << "}";
1182 0 : return os;
1183 : }
1184 :
1185 : namespace {
1186 0 : void PrintBlockRow(std::ostream& os, const InstructionBlocks& blocks) {
1187 0 : os << " ";
1188 0 : for (auto block : blocks) {
1189 : LifetimePosition start_pos = LifetimePosition::GapFromInstructionIndex(
1190 : block->first_instruction_index());
1191 : LifetimePosition end_pos = LifetimePosition::GapFromInstructionIndex(
1192 : block->last_instruction_index())
1193 : .NextFullStart();
1194 0 : int length = end_pos.value() - start_pos.value();
1195 0 : constexpr int kMaxPrefixLength = 32;
1196 : char buffer[kMaxPrefixLength];
1197 : int rpo_number = block->rpo_number().ToInt();
1198 0 : const char* deferred_marker = block->IsDeferred() ? "(deferred)" : "";
1199 0 : int max_prefix_length = std::min(length, kMaxPrefixLength);
1200 : int prefix = snprintf(buffer, max_prefix_length, "[-B%d-%s", rpo_number,
1201 0 : deferred_marker);
1202 0 : os << buffer;
1203 0 : int remaining = length - std::min(prefix, max_prefix_length) - 1;
1204 0 : for (int i = 0; i < remaining; ++i) os << '-';
1205 : os << ']';
1206 : }
1207 : os << '\n';
1208 0 : }
1209 : } // namespace
1210 :
1211 0 : void LinearScanAllocator::PrintRangeRow(std::ostream& os,
1212 0 : const TopLevelLiveRange* toplevel) {
1213 : int position = 0;
1214 0 : os << std::setw(3) << toplevel->vreg()
1215 0 : << (toplevel->IsSplinter() ? "s:" : ": ");
1216 :
1217 0 : for (const LiveRange* range = toplevel; range != nullptr;
1218 : range = range->next()) {
1219 0 : for (UseInterval* interval = range->first_interval(); interval != nullptr;
1220 : interval = interval->next()) {
1221 : LifetimePosition start = interval->start();
1222 : LifetimePosition end = interval->end();
1223 0 : CHECK_GE(start.value(), position);
1224 0 : for (; start.value() > position; position++) {
1225 : os << ' ';
1226 : }
1227 0 : int length = end.value() - start.value();
1228 0 : constexpr int kMaxPrefixLength = 32;
1229 : char buffer[kMaxPrefixLength];
1230 0 : int max_prefix_length = std::min(length + 1, kMaxPrefixLength);
1231 : int prefix;
1232 0 : if (range->spilled()) {
1233 0 : prefix = snprintf(buffer, max_prefix_length, "|ss");
1234 : } else {
1235 : const char* reg_name;
1236 0 : if (range->assigned_register() == kUnassignedRegister) {
1237 : reg_name = "???";
1238 : } else {
1239 : reg_name = RegisterName(range->assigned_register());
1240 : }
1241 0 : prefix = snprintf(buffer, max_prefix_length, "|%s", reg_name);
1242 : }
1243 0 : os << buffer;
1244 0 : position += std::min(prefix, max_prefix_length - 1);
1245 0 : CHECK_GE(end.value(), position);
1246 0 : const char line_style = range->spilled() ? '-' : '=';
1247 0 : for (; end.value() > position; position++) {
1248 : os << line_style;
1249 : }
1250 : }
1251 : }
1252 : os << '\n';
1253 0 : }
1254 :
1255 0 : void LinearScanAllocator::PrintRangeOverview(std::ostream& os) {
1256 0 : PrintBlockRow(os, code()->instruction_blocks());
1257 0 : for (auto toplevel : data()->fixed_live_ranges()) {
1258 0 : if (toplevel == nullptr) continue;
1259 0 : PrintRangeRow(os, toplevel);
1260 : }
1261 : int rowcount = 0;
1262 0 : for (auto toplevel : data()->live_ranges()) {
1263 0 : if (!CanProcessRange(toplevel)) continue;
1264 0 : if (rowcount++ % 10 == 0) PrintBlockRow(os, code()->instruction_blocks());
1265 0 : PrintRangeRow(os, toplevel);
1266 : }
1267 0 : }
1268 :
1269 4367136 : SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone)
1270 : : live_ranges_(zone),
1271 : assigned_slot_(kUnassignedSlot),
1272 8734272 : byte_width_(GetByteWidth(parent->representation())) {
1273 : // Spill ranges are created for top level, non-splintered ranges. This is so
1274 : // that, when merging decisions are made, we consider the full extent of the
1275 : // virtual register, and avoid clobbering it.
1276 : DCHECK(!parent->IsSplinter());
1277 : UseInterval* result = nullptr;
1278 : UseInterval* node = nullptr;
1279 : // Copy the intervals for all ranges.
1280 11945572 : for (LiveRange* range = parent; range != nullptr; range = range->next()) {
1281 11253097 : UseInterval* src = range->first_interval();
1282 26409833 : while (src != nullptr) {
1283 : UseInterval* new_node = new (zone) UseInterval(src->start(), src->end());
1284 11253097 : if (result == nullptr) {
1285 : result = new_node;
1286 : } else {
1287 : node->set_next(new_node);
1288 : }
1289 : node = new_node;
1290 : src = src->next();
1291 : }
1292 : }
1293 4367204 : use_interval_ = result;
1294 4367204 : live_ranges().push_back(parent);
1295 4367223 : end_position_ = node->end();
1296 4367223 : parent->SetSpillRange(this);
1297 4367223 : }
1298 :
1299 63201817 : bool SpillRange::IsIntersectingWith(SpillRange* other) const {
1300 189605463 : if (this->use_interval_ == nullptr || other->use_interval_ == nullptr ||
1301 126334413 : this->End() <= other->use_interval_->start() ||
1302 : other->End() <= this->use_interval_->start()) {
1303 : return false;
1304 : }
1305 62365295 : return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
1306 : }
1307 :
1308 190321548 : bool SpillRange::TryMerge(SpillRange* other) {
1309 126966279 : if (HasSlot() || other->HasSlot()) return false;
1310 63355269 : if (byte_width() != other->byte_width() || IsIntersectingWith(other))
1311 : return false;
1312 :
1313 : LifetimePosition max = LifetimePosition::MaxPosition();
1314 1024210 : if (End() < other->End() && other->End() != max) {
1315 72997 : end_position_ = other->End();
1316 : }
1317 1024210 : other->end_position_ = max;
1318 :
1319 1024210 : MergeDisjointIntervals(other->use_interval_);
1320 1024209 : other->use_interval_ = nullptr;
1321 :
1322 3098429 : for (TopLevelLiveRange* range : other->live_ranges()) {
1323 : DCHECK(range->GetSpillRange() == other);
1324 : range->SetSpillRange(this);
1325 : }
1326 :
1327 : live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
1328 1024209 : other->live_ranges().end());
1329 : other->live_ranges().clear();
1330 :
1331 1024210 : return true;
1332 : }
1333 :
1334 1024210 : void SpillRange::MergeDisjointIntervals(UseInterval* other) {
1335 : UseInterval* tail = nullptr;
1336 1024210 : UseInterval* current = use_interval_;
1337 6889085 : while (other != nullptr) {
1338 : // Make sure the 'current' list starts first
1339 9681330 : if (current == nullptr || current->start() > other->start()) {
1340 : std::swap(current, other);
1341 : }
1342 : // Check disjointness
1343 : DCHECK(other == nullptr || current->end() <= other->start());
1344 : // Append the 'current' node to the result accumulator and move forward
1345 4840665 : if (tail == nullptr) {
1346 1024208 : use_interval_ = current;
1347 : } else {
1348 : tail->set_next(current);
1349 : }
1350 : tail = current;
1351 : current = current->next();
1352 : }
1353 : // Other list is empty => we are done
1354 1024210 : }
1355 :
1356 0 : void SpillRange::Print() const {
1357 0 : StdoutStream os;
1358 0 : os << "{" << std::endl;
1359 0 : for (TopLevelLiveRange* range : live_ranges()) {
1360 0 : os << range->vreg() << " ";
1361 : }
1362 : os << std::endl;
1363 :
1364 0 : for (UseInterval* i = interval(); i != nullptr; i = i->next()) {
1365 0 : os << '[' << i->start() << ", " << i->end() << ')' << std::endl;
1366 : }
1367 0 : os << "}" << std::endl;
1368 0 : }
1369 :
1370 0 : RegisterAllocationData::PhiMapValue::PhiMapValue(PhiInstruction* phi,
1371 : const InstructionBlock* block,
1372 : Zone* zone)
1373 : : phi_(phi),
1374 : block_(block),
1375 : incoming_operands_(zone),
1376 4339592 : assigned_register_(kUnassignedRegister) {
1377 4339592 : incoming_operands_.reserve(phi->operands().size());
1378 0 : }
1379 :
1380 0 : void RegisterAllocationData::PhiMapValue::AddOperand(
1381 : InstructionOperand* operand) {
1382 5185323 : incoming_operands_.push_back(operand);
1383 0 : }
1384 :
1385 0 : void RegisterAllocationData::PhiMapValue::CommitAssignment(
1386 : const InstructionOperand& assigned) {
1387 12540324 : for (InstructionOperand* operand : incoming_operands_) {
1388 : InstructionOperand::ReplaceWith(operand, &assigned);
1389 : }
1390 0 : }
1391 :
1392 2949624 : RegisterAllocationData::RegisterAllocationData(
1393 : const RegisterConfiguration* config, Zone* zone, Frame* frame,
1394 47194214 : InstructionSequence* code, const char* debug_name)
1395 : : allocation_zone_(zone),
1396 : frame_(frame),
1397 : code_(code),
1398 : debug_name_(debug_name),
1399 : config_(config),
1400 : phi_map_(allocation_zone()),
1401 : live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1402 : live_out_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1403 2949666 : live_ranges_(code->VirtualRegisterCount() * 2, nullptr,
1404 : allocation_zone()),
1405 2949649 : fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
1406 : allocation_zone()),
1407 : fixed_float_live_ranges_(allocation_zone()),
1408 2949666 : fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
1409 : allocation_zone()),
1410 : fixed_simd128_live_ranges_(allocation_zone()),
1411 : spill_ranges_(code->VirtualRegisterCount(), nullptr, allocation_zone()),
1412 : delayed_references_(allocation_zone()),
1413 : assigned_registers_(nullptr),
1414 : assigned_double_registers_(nullptr),
1415 : virtual_register_count_(code->VirtualRegisterCount()),
1416 26546806 : preassigned_slot_ranges_(zone) {
1417 : if (!kSimpleFPAliasing) {
1418 : fixed_float_live_ranges_.resize(this->config()->num_float_registers(),
1419 : nullptr);
1420 : fixed_simd128_live_ranges_.resize(this->config()->num_simd128_registers(),
1421 : nullptr);
1422 : }
1423 :
1424 : assigned_registers_ = new (code_zone())
1425 5899294 : BitVector(this->config()->num_general_registers(), code_zone());
1426 : assigned_double_registers_ = new (code_zone())
1427 5899012 : BitVector(this->config()->num_double_registers(), code_zone());
1428 : fixed_register_use_ = new (code_zone())
1429 5899205 : BitVector(this->config()->num_general_registers(), code_zone());
1430 : fixed_fp_register_use_ = new (code_zone())
1431 5899298 : BitVector(this->config()->num_double_registers(), code_zone());
1432 :
1433 2949659 : this->frame()->SetAllocatedRegisters(assigned_registers_);
1434 2949659 : this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1435 2949659 : }
1436 :
1437 39498065 : MoveOperands* RegisterAllocationData::AddGapMove(
1438 : int index, Instruction::GapPosition position,
1439 39498065 : const InstructionOperand& from, const InstructionOperand& to) {
1440 : Instruction* instr = code()->InstructionAt(index);
1441 39498095 : ParallelMove* moves = instr->GetOrCreateParallelMove(position, code_zone());
1442 39498276 : return moves->AddMove(from, to);
1443 : }
1444 :
1445 0 : MachineRepresentation RegisterAllocationData::RepresentationFor(
1446 66343948 : int virtual_register) {
1447 : DCHECK_LT(virtual_register, code()->VirtualRegisterCount());
1448 66343948 : return code()->GetRepresentation(virtual_register);
1449 : }
1450 :
1451 314247228 : TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) {
1452 628494456 : if (index >= static_cast<int>(live_ranges().size())) {
1453 0 : live_ranges().resize(index + 1, nullptr);
1454 : }
1455 628494456 : TopLevelLiveRange* result = live_ranges()[index];
1456 314247228 : if (result == nullptr) {
1457 38914330 : result = NewLiveRange(index, RepresentationFor(index));
1458 77827354 : live_ranges()[index] = result;
1459 : }
1460 314246635 : return result;
1461 : }
1462 :
1463 90242662 : TopLevelLiveRange* RegisterAllocationData::NewLiveRange(
1464 90242662 : int index, MachineRepresentation rep) {
1465 90242551 : return new (allocation_zone()) TopLevelLiveRange(index, rep);
1466 : }
1467 :
1468 4588397 : int RegisterAllocationData::GetNextLiveRangeId() {
1469 4588397 : int vreg = virtual_register_count_++;
1470 9176794 : if (vreg >= static_cast<int>(live_ranges().size())) {
1471 0 : live_ranges().resize(vreg + 1, nullptr);
1472 : }
1473 4588397 : return vreg;
1474 : }
1475 :
1476 4588384 : TopLevelLiveRange* RegisterAllocationData::NextLiveRange(
1477 : MachineRepresentation rep) {
1478 4588384 : int vreg = GetNextLiveRangeId();
1479 4588418 : TopLevelLiveRange* ret = NewLiveRange(vreg, rep);
1480 4588342 : return ret;
1481 : }
1482 :
1483 2169783 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap(
1484 4339579 : const InstructionBlock* block, PhiInstruction* phi) {
1485 : RegisterAllocationData::PhiMapValue* map_value = new (allocation_zone())
1486 : RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
1487 : auto res =
1488 4339591 : phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
1489 : DCHECK(res.second);
1490 : USE(res);
1491 2169795 : return map_value;
1492 : }
1493 :
1494 0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1495 : int virtual_register) {
1496 : auto it = phi_map_.find(virtual_register);
1497 : DCHECK(it != phi_map_.end());
1498 7072867 : return it->second;
1499 : }
1500 :
1501 0 : RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1502 6321410 : TopLevelLiveRange* top_range) {
1503 0 : return GetPhiMapValueFor(top_range->vreg());
1504 : }
1505 :
1506 42 : bool RegisterAllocationData::ExistsUseWithoutDefinition() {
1507 : bool found = false;
1508 42 : BitVector::Iterator iterator(live_in_sets()[0]);
1509 126 : while (!iterator.Done()) {
1510 : found = true;
1511 0 : int operand_index = iterator.Current();
1512 : PrintF("Register allocator error: live v%d reached first block.\n",
1513 0 : operand_index);
1514 0 : LiveRange* range = GetOrCreateLiveRangeFor(operand_index);
1515 0 : PrintF(" (first use is at %d)\n", range->first_pos()->pos().value());
1516 0 : if (debug_name() == nullptr) {
1517 0 : PrintF("\n");
1518 : } else {
1519 0 : PrintF(" (function: %s)\n", debug_name());
1520 : }
1521 0 : iterator.Advance();
1522 : }
1523 42 : return found;
1524 : }
1525 :
1526 : // If a range is defined in a deferred block, we can expect all the range
1527 : // to only cover positions in deferred blocks. Otherwise, a block on the
1528 : // hot path would be dominated by a deferred block, meaning it is unreachable
1529 : // without passing through the deferred block, which is contradictory.
1530 : // In particular, when such a range contributes a result back on the hot
1531 : // path, it will be as one of the inputs of a phi. In that case, the value
1532 : // will be transferred via a move in the Gap::END's of the last instruction
1533 : // of a deferred block.
1534 362 : bool RegisterAllocationData::RangesDefinedInDeferredStayInDeferred() {
1535 42 : const size_t live_ranges_size = live_ranges().size();
1536 770 : for (const TopLevelLiveRange* range : live_ranges()) {
1537 1372 : CHECK_EQ(live_ranges_size,
1538 : live_ranges().size()); // TODO(neis): crbug.com/831822
1539 1349 : if (range == nullptr || range->IsEmpty() ||
1540 : !code()
1541 : ->GetInstructionBlock(range->Start().ToInstructionIndex())
1542 320 : ->IsDeferred()) {
1543 : continue;
1544 : }
1545 0 : for (const UseInterval* i = range->first_interval(); i != nullptr;
1546 : i = i->next()) {
1547 : int first = i->FirstGapIndex();
1548 : int last = i->LastGapIndex();
1549 0 : for (int instr = first; instr <= last;) {
1550 0 : const InstructionBlock* block = code()->GetInstructionBlock(instr);
1551 0 : if (!block->IsDeferred()) return false;
1552 : instr = block->last_instruction_index() + 1;
1553 : }
1554 : }
1555 : }
1556 : return true;
1557 : }
1558 :
1559 4574665 : SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
1560 15333187 : TopLevelLiveRange* range) {
1561 : DCHECK(!range->HasSpillOperand());
1562 :
1563 : SpillRange* spill_range = range->GetAllocatedSpillRange();
1564 4574665 : if (spill_range == nullptr) {
1565 : DCHECK(!range->IsSplinter());
1566 2776670 : spill_range = new (allocation_zone()) SpillRange(range, allocation_zone());
1567 : }
1568 : range->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange);
1569 :
1570 : int spill_range_index =
1571 4574770 : range->IsSplinter() ? range->splintered_from()->vreg() : range->vreg();
1572 :
1573 9149540 : spill_ranges()[spill_range_index] = spill_range;
1574 :
1575 4574770 : return spill_range;
1576 : }
1577 :
1578 1590548 : SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
1579 1590548 : TopLevelLiveRange* range) {
1580 : DCHECK(!range->HasSpillOperand());
1581 : DCHECK(!range->IsSplinter());
1582 : SpillRange* spill_range =
1583 1590558 : new (allocation_zone()) SpillRange(range, allocation_zone());
1584 1590559 : return spill_range;
1585 : }
1586 :
1587 18234506 : void RegisterAllocationData::MarkFixedUse(MachineRepresentation rep,
1588 : int index) {
1589 18234506 : switch (rep) {
1590 : case MachineRepresentation::kFloat32:
1591 : case MachineRepresentation::kSimd128:
1592 : if (kSimpleFPAliasing) {
1593 25000 : fixed_fp_register_use_->Add(index);
1594 : } else {
1595 : int alias_base_index = -1;
1596 : int aliases = config()->GetAliases(
1597 : rep, index, MachineRepresentation::kFloat64, &alias_base_index);
1598 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
1599 : while (aliases--) {
1600 : int aliased_reg = alias_base_index + aliases;
1601 : fixed_fp_register_use_->Add(aliased_reg);
1602 : }
1603 : }
1604 : break;
1605 : case MachineRepresentation::kFloat64:
1606 73594 : fixed_fp_register_use_->Add(index);
1607 : break;
1608 : default:
1609 : DCHECK(!IsFloatingPoint(rep));
1610 18135912 : fixed_register_use_->Add(index);
1611 : break;
1612 : }
1613 18234506 : }
1614 :
1615 271557395 : bool RegisterAllocationData::HasFixedUse(MachineRepresentation rep, int index) {
1616 271557395 : switch (rep) {
1617 : case MachineRepresentation::kFloat32:
1618 : case MachineRepresentation::kSimd128:
1619 : if (kSimpleFPAliasing) {
1620 2567526 : return fixed_fp_register_use_->Contains(index);
1621 : } else {
1622 : int alias_base_index = -1;
1623 : int aliases = config()->GetAliases(
1624 : rep, index, MachineRepresentation::kFloat64, &alias_base_index);
1625 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
1626 : bool result = false;
1627 : while (aliases-- && !result) {
1628 : int aliased_reg = alias_base_index + aliases;
1629 : result |= fixed_fp_register_use_->Contains(aliased_reg);
1630 : }
1631 : return result;
1632 : }
1633 : break;
1634 : case MachineRepresentation::kFloat64:
1635 32824952 : return fixed_fp_register_use_->Contains(index);
1636 : break;
1637 : default:
1638 : DCHECK(!IsFloatingPoint(rep));
1639 507722312 : return fixed_register_use_->Contains(index);
1640 : break;
1641 : }
1642 : }
1643 :
1644 87037829 : void RegisterAllocationData::MarkAllocated(MachineRepresentation rep,
1645 : int index) {
1646 87037829 : switch (rep) {
1647 : case MachineRepresentation::kFloat32:
1648 : case MachineRepresentation::kSimd128:
1649 : if (kSimpleFPAliasing) {
1650 151487 : assigned_double_registers_->Add(index);
1651 : } else {
1652 : int alias_base_index = -1;
1653 : int aliases = config()->GetAliases(
1654 : rep, index, MachineRepresentation::kFloat64, &alias_base_index);
1655 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
1656 : while (aliases--) {
1657 : int aliased_reg = alias_base_index + aliases;
1658 : assigned_double_registers_->Add(aliased_reg);
1659 : }
1660 : }
1661 : break;
1662 : case MachineRepresentation::kFloat64:
1663 26923497 : assigned_double_registers_->Add(index);
1664 : break;
1665 : default:
1666 : DCHECK(!IsFloatingPoint(rep));
1667 59962845 : assigned_registers_->Add(index);
1668 : break;
1669 : }
1670 87037829 : }
1671 :
1672 39266816 : bool RegisterAllocationData::IsBlockBoundary(LifetimePosition pos) const {
1673 39266917 : return pos.IsFullStart() &&
1674 15879422 : code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
1675 23387495 : pos.ToInstructionIndex();
1676 : }
1677 :
1678 5899009 : ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data)
1679 5899009 : : data_(data) {}
1680 :
1681 27594658 : InstructionOperand* ConstraintBuilder::AllocateFixed(
1682 : UnallocatedOperand* operand, int pos, bool is_tagged, bool is_input) {
1683 27594658 : TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
1684 : DCHECK(operand->HasFixedPolicy());
1685 : InstructionOperand allocated;
1686 : MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
1687 : int virtual_register = operand->virtual_register();
1688 27594824 : if (virtual_register != InstructionOperand::kInvalidVirtualRegister) {
1689 : rep = data()->RepresentationFor(virtual_register);
1690 : }
1691 27594875 : if (operand->HasFixedSlotPolicy()) {
1692 : allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, rep,
1693 : operand->fixed_slot_index());
1694 26362419 : } else if (operand->HasFixedRegisterPolicy()) {
1695 : DCHECK(!IsFloatingPoint(rep));
1696 : DCHECK(data()->config()->IsAllocatableGeneralCode(
1697 : operand->fixed_register_index()));
1698 : allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1699 : operand->fixed_register_index());
1700 170670 : } else if (operand->HasFixedFPRegisterPolicy()) {
1701 : DCHECK(IsFloatingPoint(rep));
1702 : DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
1703 : allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1704 : operand->fixed_register_index());
1705 : } else {
1706 0 : UNREACHABLE();
1707 : }
1708 45925103 : if (is_input && allocated.IsAnyRegister()) {
1709 18234657 : data()->MarkFixedUse(rep, operand->fixed_register_index());
1710 : }
1711 : InstructionOperand::ReplaceWith(operand, &allocated);
1712 27594602 : if (is_tagged) {
1713 16719298 : TRACE("Fixed reg is tagged at %d\n", pos);
1714 16719406 : Instruction* instr = code()->InstructionAt(pos);
1715 16719406 : if (instr->HasReferenceMap()) {
1716 13374639 : instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand));
1717 : }
1718 : }
1719 27594712 : return operand;
1720 : }
1721 :
1722 2949432 : void ConstraintBuilder::MeetRegisterConstraints() {
1723 26484046 : for (InstructionBlock* block : code()->instruction_blocks()) {
1724 20584933 : MeetRegisterConstraints(block);
1725 : }
1726 2949681 : }
1727 :
1728 20585041 : void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
1729 : int start = block->first_instruction_index();
1730 : int end = block->last_instruction_index();
1731 : DCHECK_NE(-1, start);
1732 85442651 : for (int i = start; i <= end; ++i) {
1733 64857440 : MeetConstraintsBefore(i);
1734 64857929 : if (i != end) MeetConstraintsAfter(i);
1735 : }
1736 : // Meet register constraints for the instruction in the end.
1737 20585211 : MeetRegisterConstraintsForLastInstructionInBlock(block);
1738 20585160 : }
1739 :
1740 20585178 : void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
1741 20585178 : const InstructionBlock* block) {
1742 : int end = block->last_instruction_index();
1743 20752942 : Instruction* last_instruction = code()->InstructionAt(end);
1744 41505884 : for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
1745 : InstructionOperand* output_operand = last_instruction->OutputAt(i);
1746 : DCHECK(!output_operand->IsConstant());
1747 167786 : UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
1748 : int output_vreg = output->virtual_register();
1749 167786 : TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1750 : bool assigned = false;
1751 167788 : if (output->HasFixedPolicy()) {
1752 2 : AllocateFixed(output, -1, false, false);
1753 : // This value is produced on the stack, we never need to spill it.
1754 2 : if (output->IsStackSlot()) {
1755 : DCHECK(LocationOperand::cast(output)->index() <
1756 : data()->frame()->GetSpillSlotCount());
1757 : range->SetSpillOperand(LocationOperand::cast(output));
1758 : range->SetSpillStartIndex(end);
1759 : assigned = true;
1760 : }
1761 :
1762 6 : for (const RpoNumber& succ : block->successors()) {
1763 2 : const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1764 : DCHECK_EQ(1, successor->PredecessorCount());
1765 : int gap_index = successor->first_instruction_index();
1766 : // Create an unconstrained operand for the same virtual register
1767 : // and insert a gap move from the fixed output to the operand.
1768 : UnallocatedOperand output_copy(UnallocatedOperand::REGISTER_OR_SLOT,
1769 : output_vreg);
1770 2 : data()->AddGapMove(gap_index, Instruction::START, *output, output_copy);
1771 : }
1772 : }
1773 :
1774 167788 : if (!assigned) {
1775 671139 : for (const RpoNumber& succ : block->successors()) {
1776 335568 : const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1777 : DCHECK_EQ(1, successor->PredecessorCount());
1778 : int gap_index = successor->first_instruction_index();
1779 : range->RecordSpillLocation(allocation_zone(), gap_index, output);
1780 : range->SetSpillStartIndex(gap_index);
1781 : }
1782 : }
1783 : }
1784 20585156 : }
1785 :
1786 44272646 : void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
1787 125282861 : Instruction* first = code()->InstructionAt(instr_index);
1788 : // Handle fixed temporaries.
1789 90187138 : for (size_t i = 0; i < first->TempCount(); i++) {
1790 820978 : UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(i));
1791 820978 : if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index, false, false);
1792 : }
1793 : // Handle constant/fixed output operands.
1794 116105993 : for (size_t i = 0; i < first->OutputCount(); i++) {
1795 35916731 : InstructionOperand* output = first->OutputAt(i);
1796 35916731 : if (output->IsConstant()) {
1797 : int output_vreg = ConstantOperand::cast(output)->virtual_register();
1798 13502121 : TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1799 13502213 : range->SetSpillStartIndex(instr_index + 1);
1800 : range->SetSpillOperand(output);
1801 : continue;
1802 : }
1803 : UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
1804 : TopLevelLiveRange* range =
1805 22414610 : data()->GetOrCreateLiveRangeFor(first_output->virtual_register());
1806 : bool assigned = false;
1807 22414210 : if (first_output->HasFixedPolicy()) {
1808 : int output_vreg = first_output->virtual_register();
1809 : UnallocatedOperand output_copy(UnallocatedOperand::REGISTER_OR_SLOT,
1810 : output_vreg);
1811 : bool is_tagged = code()->IsReference(output_vreg);
1812 9099376 : if (first_output->HasSecondaryStorage()) {
1813 : range->MarkHasPreassignedSlot();
1814 : data()->preassigned_slot_ranges().push_back(
1815 310634 : std::make_pair(range, first_output->GetSecondaryStorage()));
1816 : }
1817 9099402 : AllocateFixed(first_output, instr_index, is_tagged, false);
1818 :
1819 : // This value is produced on the stack, we never need to spill it.
1820 9099525 : if (first_output->IsStackSlot()) {
1821 : DCHECK(LocationOperand::cast(first_output)->index() <
1822 : data()->frame()->GetTotalFrameSlotCount());
1823 : range->SetSpillOperand(LocationOperand::cast(first_output));
1824 1107506 : range->SetSpillStartIndex(instr_index + 1);
1825 : assigned = true;
1826 : }
1827 : data()->AddGapMove(instr_index + 1, Instruction::START, *first_output,
1828 18199050 : output_copy);
1829 : }
1830 : // Make sure we add a gap move for spilling (if we have not done
1831 : // so already).
1832 22414527 : if (!assigned) {
1833 : range->RecordSpillLocation(allocation_zone(), instr_index + 1,
1834 21307119 : first_output);
1835 : range->SetSpillStartIndex(instr_index + 1);
1836 : }
1837 : }
1838 44272561 : }
1839 :
1840 64857360 : void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
1841 293647246 : Instruction* second = code()->InstructionAt(instr_index);
1842 : // Handle fixed input operands of second instruction.
1843 384232838 : for (size_t i = 0; i < second->InputCount(); i++) {
1844 127258667 : InstructionOperand* input = second->InputAt(i);
1845 127258667 : if (input->IsImmediate() || input->IsExplicit()) {
1846 : continue; // Ignore immediates and explicitly reserved registers.
1847 : }
1848 : UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
1849 66821174 : if (cur_input->HasFixedPolicy()) {
1850 : int input_vreg = cur_input->virtual_register();
1851 : UnallocatedOperand input_copy(UnallocatedOperand::REGISTER_OR_SLOT,
1852 : input_vreg);
1853 : bool is_tagged = code()->IsReference(input_vreg);
1854 18330223 : AllocateFixed(cur_input, instr_index, is_tagged, true);
1855 36660192 : data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
1856 : }
1857 : }
1858 : // Handle "output same as input" for second instruction.
1859 137027168 : for (size_t i = 0; i < second->OutputCount(); i++) {
1860 36084542 : InstructionOperand* output = second->OutputAt(i);
1861 69322318 : if (!output->IsUnallocated()) continue;
1862 : UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
1863 22582428 : if (!second_output->HasSameAsInputPolicy()) continue;
1864 : DCHECK_EQ(0, i); // Only valid for first output.
1865 : UnallocatedOperand* cur_input =
1866 2846766 : UnallocatedOperand::cast(second->InputAt(0));
1867 : int output_vreg = second_output->virtual_register();
1868 : int input_vreg = cur_input->virtual_register();
1869 : UnallocatedOperand input_copy(UnallocatedOperand::REGISTER_OR_SLOT,
1870 : input_vreg);
1871 : *cur_input =
1872 2846766 : UnallocatedOperand(*cur_input, second_output->virtual_register());
1873 : MoveOperands* gap_move = data()->AddGapMove(instr_index, Instruction::END,
1874 5693532 : input_copy, *cur_input);
1875 : DCHECK_NOT_NULL(gap_move);
1876 3435295 : if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) {
1877 588367 : if (second->HasReferenceMap()) {
1878 : RegisterAllocationData::DelayedReference delayed_reference = {
1879 0 : second->reference_map(), &gap_move->source()};
1880 0 : data()->delayed_references().push_back(delayed_reference);
1881 : }
1882 2258567 : } else if (!code()->IsReference(input_vreg) &&
1883 : code()->IsReference(output_vreg)) {
1884 : // The input is assumed to immediately have a tagged representation,
1885 : // before the pointer map can be used. I.e. the pointer map at the
1886 : // instruction will include the output operand (whose value at the
1887 : // beginning of the instruction is equal to the input operand). If
1888 : // this is not desired, then the pointer map at this instruction needs
1889 : // to be adjusted manually.
1890 : }
1891 : }
1892 64857918 : }
1893 :
1894 2949645 : void ConstraintBuilder::ResolvePhis() {
1895 : // Process the blocks in reverse order.
1896 47068692 : for (InstructionBlock* block : base::Reversed(code()->instruction_blocks())) {
1897 20584771 : ResolvePhis(block);
1898 : }
1899 2949505 : }
1900 :
1901 22754312 : void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) {
1902 43338849 : for (PhiInstruction* phi : block->phis()) {
1903 : int phi_vreg = phi->virtual_register();
1904 : RegisterAllocationData::PhiMapValue* map_value =
1905 2169797 : data()->InitializePhiMap(block, phi);
1906 2169781 : InstructionOperand& output = phi->output();
1907 : // Map the destination operands, so the commitment phase can find them.
1908 14710202 : for (size_t i = 0; i < phi->operands().size(); ++i) {
1909 5185312 : InstructionBlock* cur_block =
1910 10370578 : code()->InstructionBlockAt(block->predecessors()[i]);
1911 : UnallocatedOperand input(UnallocatedOperand::REGISTER_OR_SLOT,
1912 5185312 : phi->operands()[i]);
1913 : MoveOperands* move = data()->AddGapMove(
1914 5185312 : cur_block->last_instruction_index(), Instruction::END, input, output);
1915 5185323 : map_value->AddOperand(&move->destination());
1916 : DCHECK(!code()
1917 : ->InstructionAt(cur_block->last_instruction_index())
1918 : ->HasReferenceMap());
1919 : }
1920 2169812 : TopLevelLiveRange* live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
1921 : int gap_index = block->first_instruction_index();
1922 : live_range->RecordSpillLocation(allocation_zone(), gap_index, &output);
1923 : live_range->SetSpillStartIndex(gap_index);
1924 : // We use the phi-ness of some nodes in some later heuristics.
1925 : live_range->set_is_phi(true);
1926 2169795 : live_range->set_is_non_loop_phi(!block->IsLoopHeader());
1927 : }
1928 20584525 : }
1929 :
1930 2949487 : LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data,
1931 : Zone* local_zone)
1932 5898974 : : data_(data), phi_hints_(local_zone) {}
1933 :
1934 20583436 : BitVector* LiveRangeBuilder::ComputeLiveOut(const InstructionBlock* block,
1935 20583495 : RegisterAllocationData* data) {
1936 : size_t block_index = block->rpo_number().ToSize();
1937 41166872 : BitVector* live_out = data->live_out_sets()[block_index];
1938 20583436 : if (live_out == nullptr) {
1939 : // Compute live out for the given block, except not including backward
1940 : // successor edges.
1941 : Zone* zone = data->allocation_zone();
1942 43798222 : const InstructionSequence* code = data->code();
1943 :
1944 20583472 : live_out = new (zone) BitVector(code->VirtualRegisterCount(), zone);
1945 :
1946 : // Process all successor blocks.
1947 64626319 : for (const RpoNumber& succ : block->successors()) {
1948 : // Add values live on entry to the successor.
1949 23458876 : if (succ <= block->rpo_number()) continue;
1950 46429374 : BitVector* live_in = data->live_in_sets()[succ.ToSize()];
1951 23214687 : if (live_in != nullptr) live_out->Union(*live_in);
1952 :
1953 : // All phi input operands corresponding to this successor edge are live
1954 : // out from this block.
1955 23214727 : const InstructionBlock* successor = code->InstructionBlockAt(succ);
1956 23214823 : size_t index = successor->PredecessorIndexOf(block->rpo_number());
1957 : DCHECK(index < successor->PredecessorCount());
1958 51239631 : for (PhiInstruction* phi : successor->phis()) {
1959 9615830 : live_out->Add(phi->operands()[index]);
1960 : }
1961 : }
1962 41168614 : data->live_out_sets()[block_index] = live_out;
1963 : }
1964 20584248 : return live_out;
1965 : }
1966 :
1967 41168074 : void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
1968 116330988 : BitVector* live_out) {
1969 : // Add an interval that includes the entire block to the live range for
1970 : // each live_out value.
1971 : LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
1972 20584037 : block->first_instruction_index());
1973 : LifetimePosition end = LifetimePosition::InstructionFromInstructionIndex(
1974 : block->last_instruction_index())
1975 20584037 : .NextStart();
1976 20584037 : BitVector::Iterator iterator(live_out);
1977 294413290 : while (!iterator.Done()) {
1978 116330988 : int operand_index = iterator.Current();
1979 116330988 : TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
1980 116330876 : range->AddUseInterval(start, end, allocation_zone());
1981 116330726 : iterator.Advance();
1982 : }
1983 20583668 : }
1984 :
1985 25000728 : int LiveRangeBuilder::FixedFPLiveRangeID(int index, MachineRepresentation rep) {
1986 25000728 : int result = -index - 1;
1987 25000728 : switch (rep) {
1988 : case MachineRepresentation::kSimd128:
1989 60 : result -= config()->num_float_registers();
1990 : V8_FALLTHROUGH;
1991 : case MachineRepresentation::kFloat32:
1992 13190 : result -= config()->num_double_registers();
1993 : V8_FALLTHROUGH;
1994 : case MachineRepresentation::kFloat64:
1995 25000728 : result -= config()->num_general_registers();
1996 : break;
1997 : default:
1998 0 : UNREACHABLE();
1999 : break;
2000 : }
2001 25000728 : return result;
2002 : }
2003 :
2004 273279615 : TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
2005 : DCHECK(index < config()->num_general_registers());
2006 344700681 : TopLevelLiveRange* result = data()->fixed_live_ranges()[index];
2007 114900227 : if (result == nullptr) {
2008 : MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
2009 21739929 : result = data()->NewLiveRange(FixedLiveRangeID(index), rep);
2010 : DCHECK(result->IsFixed());
2011 : result->set_assigned_register(index);
2012 21739488 : data()->MarkAllocated(rep, index);
2013 43479346 : data()->fixed_live_ranges()[index] = result;
2014 : }
2015 114899971 : return result;
2016 : }
2017 :
2018 80728844 : TopLevelLiveRange* LiveRangeBuilder::FixedFPLiveRangeFor(
2019 105728950 : int index, MachineRepresentation rep) {
2020 : int num_regs = config()->num_double_registers();
2021 : ZoneVector<TopLevelLiveRange*>* live_ranges =
2022 : &data()->fixed_double_live_ranges();
2023 : if (!kSimpleFPAliasing) {
2024 : switch (rep) {
2025 : case MachineRepresentation::kFloat32:
2026 : num_regs = config()->num_float_registers();
2027 : live_ranges = &data()->fixed_float_live_ranges();
2028 : break;
2029 : case MachineRepresentation::kSimd128:
2030 : num_regs = config()->num_simd128_registers();
2031 : live_ranges = &data()->fixed_simd128_live_ranges();
2032 : break;
2033 : default:
2034 : break;
2035 : }
2036 : }
2037 :
2038 : DCHECK(index < num_regs);
2039 : USE(num_regs);
2040 186457976 : TopLevelLiveRange* result = (*live_ranges)[index];
2041 80728844 : if (result == nullptr) {
2042 25000689 : result = data()->NewLiveRange(FixedFPLiveRangeID(index, rep), rep);
2043 : DCHECK(result->IsFixed());
2044 : result->set_assigned_register(index);
2045 25000106 : data()->MarkAllocated(rep, index);
2046 25000288 : (*live_ranges)[index] = result;
2047 : }
2048 80728443 : return result;
2049 : }
2050 :
2051 284370718 : TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
2052 168885826 : if (operand->IsUnallocated()) {
2053 : return data()->GetOrCreateLiveRangeFor(
2054 101982565 : UnallocatedOperand::cast(operand)->virtual_register());
2055 66903261 : } else if (operand->IsConstant()) {
2056 : return data()->GetOrCreateLiveRangeFor(
2057 13502327 : ConstantOperand::cast(operand)->virtual_register());
2058 53400934 : } else if (operand->IsRegister()) {
2059 : return FixedLiveRangeFor(
2060 50595241 : LocationOperand::cast(operand)->GetRegister().code());
2061 2805693 : } else if (operand->IsFPRegister()) {
2062 : LocationOperand* op = LocationOperand::cast(operand);
2063 340787 : return FixedFPLiveRangeFor(op->register_code(), op->representation());
2064 : } else {
2065 : return nullptr;
2066 : }
2067 : }
2068 :
2069 104176584 : UsePosition* LiveRangeBuilder::NewUsePosition(LifetimePosition pos,
2070 : InstructionOperand* operand,
2071 : void* hint,
2072 : UsePositionHintType hint_type) {
2073 208352957 : return new (allocation_zone()) UsePosition(pos, operand, hint, hint_type);
2074 : }
2075 :
2076 67567447 : UsePosition* LiveRangeBuilder::Define(LifetimePosition position,
2077 : InstructionOperand* operand, void* hint,
2078 : UsePositionHintType hint_type) {
2079 67567447 : TopLevelLiveRange* range = LiveRangeFor(operand);
2080 67567637 : if (range == nullptr) return nullptr;
2081 :
2082 66335263 : if (range->IsEmpty() || range->Start() > position) {
2083 : // Can happen if there is a definition without use.
2084 2194205 : range->AddUseInterval(position, position.NextStart(), allocation_zone());
2085 2194196 : range->AddUsePosition(NewUsePosition(position.NextStart()));
2086 : } else {
2087 64141058 : range->ShortenTo(position);
2088 : }
2089 66335393 : if (!operand->IsUnallocated()) return nullptr;
2090 : UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
2091 : UsePosition* use_pos =
2092 26470778 : NewUsePosition(position, unalloc_operand, hint, hint_type);
2093 26470669 : range->AddUsePosition(use_pos);
2094 26470769 : return use_pos;
2095 : }
2096 :
2097 101318647 : UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start,
2098 : LifetimePosition position,
2099 : InstructionOperand* operand, void* hint,
2100 : UsePositionHintType hint_type) {
2101 101318647 : TopLevelLiveRange* range = LiveRangeFor(operand);
2102 101318556 : if (range == nullptr) return nullptr;
2103 : UsePosition* use_pos = nullptr;
2104 100086204 : if (operand->IsUnallocated()) {
2105 : UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
2106 75512745 : use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type);
2107 75512561 : range->AddUsePosition(use_pos);
2108 : }
2109 100086168 : range->AddUseInterval(block_start, position, allocation_zone());
2110 100085919 : return use_pos;
2111 : }
2112 :
2113 77252772 : void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
2114 30353096 : BitVector* live) {
2115 : int block_start = block->first_instruction_index();
2116 : LifetimePosition block_start_position =
2117 : LifetimePosition::GapFromInstructionIndex(block_start);
2118 : bool fixed_float_live_ranges = false;
2119 : bool fixed_simd128_live_ranges = false;
2120 : if (!kSimpleFPAliasing) {
2121 : int mask = data()->code()->representation_mask();
2122 : fixed_float_live_ranges = (mask & kFloat32Bit) != 0;
2123 : fixed_simd128_live_ranges = (mask & kSimd128Bit) != 0;
2124 : }
2125 :
2126 85440948 : for (int index = block->last_instruction_index(); index >= block_start;
2127 : index--) {
2128 : LifetimePosition curr_position =
2129 : LifetimePosition::InstructionFromInstructionIndex(index);
2130 358736424 : Instruction* instr = code()->InstructionAt(index);
2131 : DCHECK_NOT_NULL(instr);
2132 : DCHECK(curr_position.IsInstructionPosition());
2133 : // Process output, inputs, and temps of this instruction.
2134 201883732 : for (size_t i = 0; i < instr->OutputCount(); i++) {
2135 36084944 : InstructionOperand* output = instr->OutputAt(i);
2136 36084944 : if (output->IsUnallocated()) {
2137 : // Unsupported.
2138 : DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
2139 : int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
2140 13483027 : live->Remove(out_vreg);
2141 22601917 : } else if (output->IsConstant()) {
2142 : int out_vreg = ConstantOperand::cast(output)->virtual_register();
2143 13502325 : live->Remove(out_vreg);
2144 : }
2145 36649071 : if (block->IsHandler() && index == block_start && output->IsAllocated() &&
2146 36263515 : output->IsRegister() &&
2147 : AllocatedOperand::cast(output)->GetRegister() ==
2148 : v8::internal::kReturnRegister0) {
2149 : // The register defined here is blocked from gap start - it is the
2150 : // exception value.
2151 : // TODO(mtrofin): should we explore an explicit opcode for
2152 : // the first instruction in the handler?
2153 : Define(LifetimePosition::GapFromInstructionIndex(index), output);
2154 : } else {
2155 : Define(curr_position, output);
2156 : }
2157 : }
2158 :
2159 64856922 : if (instr->ClobbersRegisters()) {
2160 133970272 : for (int i = 0; i < config()->num_allocatable_general_registers(); ++i) {
2161 : // Create a UseInterval at this instruction for all fixed registers,
2162 : // (including the instruction outputs). Adding another UseInterval here
2163 : // is OK because AddUseInterval will just merge it with the existing
2164 : // one at the end of the range.
2165 64305693 : int code = config()->GetAllocatableGeneralCode(i);
2166 64305693 : TopLevelLiveRange* range = FixedLiveRangeFor(code);
2167 : range->AddUseInterval(curr_position, curr_position.End(),
2168 64305499 : allocation_zone());
2169 : }
2170 : }
2171 :
2172 64856736 : if (instr->ClobbersDoubleRegisters()) {
2173 166134769 : for (int i = 0; i < config()->num_allocatable_double_registers(); ++i) {
2174 : // Add a UseInterval for all DoubleRegisters. See comment above for
2175 : // general registers.
2176 80388056 : int code = config()->GetAllocatableDoubleCode(i);
2177 : TopLevelLiveRange* range =
2178 80388056 : FixedFPLiveRangeFor(code, MachineRepresentation::kFloat64);
2179 : range->AddUseInterval(curr_position, curr_position.End(),
2180 80387787 : allocation_zone());
2181 : }
2182 : // Clobber fixed float registers on archs with non-simple aliasing.
2183 : if (!kSimpleFPAliasing) {
2184 : if (fixed_float_live_ranges) {
2185 : for (int i = 0; i < config()->num_allocatable_float_registers();
2186 : ++i) {
2187 : // Add a UseInterval for all FloatRegisters. See comment above for
2188 : // general registers.
2189 : int code = config()->GetAllocatableFloatCode(i);
2190 : TopLevelLiveRange* range =
2191 : FixedFPLiveRangeFor(code, MachineRepresentation::kFloat32);
2192 : range->AddUseInterval(curr_position, curr_position.End(),
2193 : allocation_zone());
2194 : }
2195 : }
2196 : if (fixed_simd128_live_ranges) {
2197 : for (int i = 0; i < config()->num_allocatable_simd128_registers();
2198 : ++i) {
2199 : int code = config()->GetAllocatableSimd128Code(i);
2200 : TopLevelLiveRange* range =
2201 : FixedFPLiveRangeFor(code, MachineRepresentation::kSimd128);
2202 : range->AddUseInterval(curr_position, curr_position.End(),
2203 : allocation_zone());
2204 : }
2205 : }
2206 : }
2207 : }
2208 :
2209 319369662 : for (size_t i = 0; i < instr->InputCount(); i++) {
2210 127256550 : InstructionOperand* input = instr->InputAt(i);
2211 127256550 : if (input->IsImmediate() || input->IsExplicit()) {
2212 : continue; // Ignore immediates and explicitly reserved registers.
2213 : }
2214 : LifetimePosition use_pos;
2215 115312265 : if (input->IsUnallocated() &&
2216 : UnallocatedOperand::cast(input)->IsUsedAtStart()) {
2217 : use_pos = curr_position;
2218 : } else {
2219 : use_pos = curr_position.End();
2220 : }
2221 :
2222 66821176 : if (input->IsUnallocated()) {
2223 : UnallocatedOperand* unalloc = UnallocatedOperand::cast(input);
2224 : int vreg = unalloc->virtual_register();
2225 : live->Add(vreg);
2226 48491197 : if (unalloc->HasSlotPolicy()) {
2227 12469862 : data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(true);
2228 : }
2229 : }
2230 : Use(block_start_position, use_pos, input);
2231 : }
2232 :
2233 66506525 : for (size_t i = 0; i < instr->TempCount(); i++) {
2234 825015 : InstructionOperand* temp = instr->TempAt(i);
2235 : // Unsupported.
2236 : DCHECK_IMPLIES(temp->IsUnallocated(),
2237 : !UnallocatedOperand::cast(temp)->HasSlotPolicy());
2238 825015 : if (instr->ClobbersTemps()) {
2239 0 : if (temp->IsRegister()) continue;
2240 0 : if (temp->IsUnallocated()) {
2241 : UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
2242 0 : if (temp_unalloc->HasFixedPolicy()) {
2243 : continue;
2244 : }
2245 : }
2246 : }
2247 : Use(block_start_position, curr_position.End(), temp);
2248 : Define(curr_position, temp);
2249 : }
2250 :
2251 : // Process the moves of the instruction's gaps, making their sources live.
2252 : const Instruction::GapPosition kPositions[] = {Instruction::END,
2253 64856496 : Instruction::START};
2254 : curr_position = curr_position.PrevStart();
2255 : DCHECK(curr_position.IsGapPosition());
2256 194569753 : for (const Instruction::GapPosition& position : kPositions) {
2257 129712733 : ParallelMove* move = instr->GetParallelMove(position);
2258 129712733 : if (move == nullptr) continue;
2259 24835980 : if (position == Instruction::END) {
2260 : curr_position = curr_position.End();
2261 : } else {
2262 : curr_position = curr_position.Start();
2263 : }
2264 85134167 : for (MoveOperands* cur : *move) {
2265 35461683 : InstructionOperand& from = cur->source();
2266 35461683 : InstructionOperand& to = cur->destination();
2267 : void* hint = &to;
2268 35461683 : UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to);
2269 : UsePosition* to_use = nullptr;
2270 : int phi_vreg = -1;
2271 35461832 : if (to.IsUnallocated()) {
2272 : int to_vreg = UnallocatedOperand::cast(to).virtual_register();
2273 17131716 : TopLevelLiveRange* to_range =
2274 17131764 : data()->GetOrCreateLiveRangeFor(to_vreg);
2275 17131716 : if (to_range->is_phi()) {
2276 : phi_vreg = to_vreg;
2277 5185299 : if (to_range->is_non_loop_phi()) {
2278 4433829 : hint = to_range->current_hint_position();
2279 : hint_type = hint == nullptr ? UsePositionHintType::kNone
2280 4433829 : : UsePositionHintType::kUsePos;
2281 : } else {
2282 : hint_type = UsePositionHintType::kPhi;
2283 : hint = data()->GetPhiMapValueFor(to_vreg);
2284 : }
2285 : } else {
2286 11946417 : if (live->Contains(to_vreg)) {
2287 : to_use = Define(curr_position, &to, &from,
2288 10157992 : UsePosition::HintTypeForOperand(from));
2289 10158103 : live->Remove(to_vreg);
2290 : } else {
2291 : cur->Eliminate();
2292 : continue;
2293 : }
2294 : }
2295 : } else {
2296 : Define(curr_position, &to);
2297 : }
2298 : UsePosition* from_use =
2299 33673464 : Use(block_start_position, curr_position, &from, hint, hint_type);
2300 : // Mark range live.
2301 33673305 : if (from.IsUnallocated()) {
2302 : live->Add(UnallocatedOperand::cast(from).virtual_register());
2303 : }
2304 : // Resolve use position hints just created.
2305 33673305 : if (to_use != nullptr && from_use != nullptr) {
2306 : to_use->ResolveHint(from_use);
2307 : from_use->ResolveHint(to_use);
2308 : }
2309 : DCHECK_IMPLIES(to_use != nullptr, to_use->IsResolved());
2310 : DCHECK_IMPLIES(from_use != nullptr, from_use->IsResolved());
2311 : // Potentially resolve phi hint.
2312 33673305 : if (phi_vreg != -1) ResolvePhiHint(&from, from_use);
2313 : }
2314 : }
2315 : }
2316 20584302 : }
2317 :
2318 22753488 : void LiveRangeBuilder::ProcessPhis(const InstructionBlock* block,
2319 : BitVector* live) {
2320 43337194 : for (PhiInstruction* phi : block->phis()) {
2321 : // The live range interval already ends at the first instruction of the
2322 : // block.
2323 : int phi_vreg = phi->virtual_register();
2324 2169806 : live->Remove(phi_vreg);
2325 : // Select a hint from a predecessor block that precedes this block in the
2326 : // rpo order. In order of priority:
2327 : // - Avoid hints from deferred blocks.
2328 : // - Prefer hints from allocated (or explicit) operands.
2329 : // - Prefer hints from empty blocks (containing just parallel moves and a
2330 : // jump). In these cases, if we can elide the moves, the jump threader
2331 : // is likely to be able to elide the jump.
2332 : // The enforcement of hinting in rpo order is required because hint
2333 : // resolution that happens later in the compiler pipeline visits
2334 : // instructions in reverse rpo order, relying on the fact that phis are
2335 : // encountered before their hints.
2336 : InstructionOperand* hint = nullptr;
2337 : int hint_preference = 0;
2338 :
2339 : // The cost of hinting increases with the number of predecessors. At the
2340 : // same time, the typical benefit decreases, since this hinting only
2341 : // optimises the execution path through one predecessor. A limit of 2 is
2342 : // sufficient to hit the common if/else pattern.
2343 : int predecessor_limit = 2;
2344 :
2345 6886761 : for (RpoNumber predecessor : block->predecessors()) {
2346 11896245 : const InstructionBlock* predecessor_block =
2347 4342674 : code()->InstructionBlockAt(predecessor);
2348 : DCHECK_EQ(predecessor_block->rpo_number(), predecessor);
2349 :
2350 : // Only take hints from earlier rpo numbers.
2351 4342679 : if (predecessor >= block->rpo_number()) continue;
2352 :
2353 : // Look up the predecessor instruction.
2354 : const Instruction* predecessor_instr =
2355 3965429 : GetLastInstruction(code(), predecessor_block);
2356 : InstructionOperand* predecessor_hint = nullptr;
2357 : // Phis are assigned in the END position of the last instruction in each
2358 : // predecessor block.
2359 8948242 : for (MoveOperands* move :
2360 8948240 : *predecessor_instr->GetParallelMove(Instruction::END)) {
2361 : InstructionOperand& to = move->destination();
2362 9965661 : if (to.IsUnallocated() &&
2363 : UnallocatedOperand::cast(to).virtual_register() == phi_vreg) {
2364 3965413 : predecessor_hint = &move->source();
2365 3965413 : break;
2366 : }
2367 : }
2368 : DCHECK_NOT_NULL(predecessor_hint);
2369 :
2370 : // For each predecessor, generate a score according to the priorities
2371 : // described above, and pick the best one. Flags in higher-order bits have
2372 : // a higher priority than those in lower-order bits.
2373 : int predecessor_hint_preference = 0;
2374 : const int kNotDeferredBlockPreference = (1 << 2);
2375 : const int kMoveIsAllocatedPreference = (1 << 1);
2376 : const int kBlockIsEmptyPreference = (1 << 0);
2377 :
2378 : // - Avoid hints from deferred blocks.
2379 3965415 : if (!predecessor_block->IsDeferred()) {
2380 : predecessor_hint_preference |= kNotDeferredBlockPreference;
2381 : }
2382 :
2383 : // - Prefer hints from allocated (or explicit) operands.
2384 : //
2385 : // Already-allocated or explicit operands are typically assigned using
2386 : // the parallel moves on the last instruction. For example:
2387 : //
2388 : // gap (v101 = [x0|R|w32]) (v100 = v101)
2389 : // ArchJmp
2390 : // ...
2391 : // phi: v100 = v101 v102
2392 : //
2393 : // We have already found the END move, so look for a matching START move
2394 : // from an allocated (or explicit) operand.
2395 : //
2396 : // Note that we cannot simply look up data()->live_ranges()[vreg] here
2397 : // because the live ranges are still being built when this function is
2398 : // called.
2399 : // TODO(v8): Find a way to separate hinting from live range analysis in
2400 : // BuildLiveRanges so that we can use the O(1) live-range look-up.
2401 : auto moves = predecessor_instr->GetParallelMove(Instruction::START);
2402 3965415 : if (moves != nullptr) {
2403 771542 : for (MoveOperands* move : *moves) {
2404 : InstructionOperand& to = move->destination();
2405 352857 : if (predecessor_hint->Equals(to)) {
2406 287027 : if (move->source().IsAllocated() || move->source().IsExplicit()) {
2407 287026 : predecessor_hint_preference |= kMoveIsAllocatedPreference;
2408 : }
2409 : break;
2410 : }
2411 : }
2412 : }
2413 :
2414 : // - Prefer hints from empty blocks.
2415 3965415 : if (predecessor_block->last_instruction_index() ==
2416 : predecessor_block->first_instruction_index()) {
2417 1493841 : predecessor_hint_preference |= kBlockIsEmptyPreference;
2418 : }
2419 :
2420 7930830 : if ((hint == nullptr) ||
2421 3965415 : (predecessor_hint_preference > hint_preference)) {
2422 : // Take the hint from this predecessor.
2423 : hint = predecessor_hint;
2424 : hint_preference = predecessor_hint_preference;
2425 : }
2426 :
2427 3965415 : if (--predecessor_limit <= 0) break;
2428 : }
2429 : DCHECK_NOT_NULL(hint);
2430 :
2431 : LifetimePosition block_start = LifetimePosition::GapFromInstructionIndex(
2432 2169793 : block->first_instruction_index());
2433 2169800 : UsePosition* use_pos = Define(block_start, &phi->output(), hint,
2434 4339593 : UsePosition::HintTypeForOperand(*hint));
2435 : MapPhiHint(hint, use_pos);
2436 : }
2437 20583693 : }
2438 :
2439 486807 : void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block,
2440 2273248 : BitVector* live) {
2441 : DCHECK(block->IsLoopHeader());
2442 : // Add a live range stretching from the first loop instruction to the last
2443 : // for each value live on entry to the header.
2444 243404 : BitVector::Iterator iterator(live);
2445 : LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
2446 243403 : block->first_instruction_index());
2447 : LifetimePosition end = LifetimePosition::GapFromInstructionIndex(
2448 243403 : code()->LastLoopInstructionIndex(block))
2449 243403 : .NextFullStart();
2450 5276713 : while (!iterator.Done()) {
2451 2273248 : int operand_index = iterator.Current();
2452 2273248 : TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
2453 2273249 : range->EnsureInterval(start, end, allocation_zone());
2454 2273250 : iterator.Advance();
2455 : }
2456 : // Insert all values into the live in sets of all blocks in the loop.
2457 3629537 : for (int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt();
2458 : ++i) {
2459 10158399 : live_in_sets()[i]->Union(*live);
2460 : }
2461 243404 : }
2462 :
2463 91921644 : void LiveRangeBuilder::BuildLiveRanges() {
2464 : // Process the blocks in reverse order.
2465 26483288 : for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
2466 : --block_id) {
2467 : InstructionBlock* block =
2468 20583681 : code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
2469 20583477 : BitVector* live = ComputeLiveOut(block, data());
2470 : // Initially consider all live_out values live for the entire block. We
2471 : // will shorten these intervals if necessary.
2472 20584262 : AddInitialIntervals(block, live);
2473 : // Process the instructions in reverse order, generating and killing
2474 : // live values.
2475 20583727 : ProcessInstructions(block, live);
2476 : // All phi output operands are killed by this block.
2477 20584168 : ProcessPhis(block, live);
2478 : // Now live is live_in for this block except not including values live
2479 : // out on backward successor edges.
2480 20583754 : if (block->IsLoopHeader()) ProcessLoopHeader(block, live);
2481 61751742 : live_in_sets()[block_id] = live;
2482 : }
2483 : // Postprocess the ranges.
2484 2949920 : const size_t live_ranges_size = data()->live_ranges().size();
2485 142392503 : for (TopLevelLiveRange* range : data()->live_ranges()) {
2486 165937546 : CHECK_EQ(live_ranges_size,
2487 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
2488 82968684 : if (range == nullptr) continue;
2489 : // Give slots to all ranges with a non fixed slot use.
2490 40830214 : if (range->has_slot_use() && range->HasNoSpillType()) {
2491 1009080 : data()->AssignSpillRangeToLiveRange(range);
2492 : }
2493 : // TODO(bmeurer): This is a horrible hack to make sure that for constant
2494 : // live ranges, every use requires the constant to be in a register.
2495 : // Without this hack, all uses with "any" policy would get the constant
2496 : // operand assigned.
2497 53523979 : if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
2498 33049667 : for (UsePosition* pos = range->first_pos(); pos != nullptr;
2499 : pos = pos->next()) {
2500 19547348 : if (pos->type() == UsePositionType::kRequiresSlot ||
2501 : pos->type() == UsePositionType::kRegisterOrSlotOrConstant) {
2502 : continue;
2503 : }
2504 : UsePositionType new_type = UsePositionType::kRegisterOrSlot;
2505 : // Can't mark phis as needing a register.
2506 19547343 : if (!pos->pos().IsGapPosition()) {
2507 : new_type = UsePositionType::kRequiresRegister;
2508 : }
2509 : pos->set_type(new_type, true);
2510 : }
2511 : }
2512 : }
2513 6002922 : for (auto preassigned : data()->preassigned_slot_ranges()) {
2514 0 : TopLevelLiveRange* range = preassigned.first;
2515 : int slot_id = preassigned.second;
2516 : SpillRange* spill = range->HasSpillRange()
2517 : ? range->GetSpillRange()
2518 207180 : : data()->AssignSpillRangeToLiveRange(range);
2519 : spill->set_assigned_slot(slot_id);
2520 : }
2521 : #ifdef DEBUG
2522 : Verify();
2523 : #endif
2524 2949658 : }
2525 :
2526 0 : void LiveRangeBuilder::MapPhiHint(InstructionOperand* operand,
2527 : UsePosition* use_pos) {
2528 : DCHECK(!use_pos->IsResolved());
2529 4339606 : auto res = phi_hints_.insert(std::make_pair(operand, use_pos));
2530 : DCHECK(res.second);
2531 : USE(res);
2532 0 : }
2533 :
2534 5185292 : void LiveRangeBuilder::ResolvePhiHint(InstructionOperand* operand,
2535 : UsePosition* use_pos) {
2536 : auto it = phi_hints_.find(operand);
2537 10370566 : if (it == phi_hints_.end()) return;
2538 : DCHECK(!it->second->IsResolved());
2539 2169804 : it->second->ResolveHint(use_pos);
2540 : }
2541 :
2542 0 : void LiveRangeBuilder::Verify() const {
2543 0 : for (auto& hint : phi_hints_) {
2544 0 : CHECK(hint.second->IsResolved());
2545 : }
2546 0 : for (const TopLevelLiveRange* current : data()->live_ranges()) {
2547 0 : if (current != nullptr && !current->IsEmpty()) {
2548 : // New LiveRanges should not be split.
2549 0 : CHECK_NULL(current->next());
2550 : // General integrity check.
2551 0 : current->Verify();
2552 0 : const UseInterval* first = current->first_interval();
2553 0 : if (first->next() == nullptr) continue;
2554 :
2555 : // Consecutive intervals should not end and start in the same block,
2556 : // otherwise the intervals should have been joined, because the
2557 : // variable is live throughout that block.
2558 0 : CHECK(NextIntervalStartsInDifferentBlocks(first));
2559 :
2560 0 : for (const UseInterval* i = first->next(); i != nullptr; i = i->next()) {
2561 : // Except for the first interval, the other intevals must start at
2562 : // a block boundary, otherwise data wouldn't flow to them.
2563 0 : CHECK(IntervalStartsAtBlockBoundary(i));
2564 : // The last instruction of the predecessors of the block the interval
2565 : // starts must be covered by the range.
2566 0 : CHECK(IntervalPredecessorsCoveredByRange(i, current));
2567 0 : if (i->next() != nullptr) {
2568 : // Check the consecutive intervals property, except for the last
2569 : // interval, where it doesn't apply.
2570 0 : CHECK(NextIntervalStartsInDifferentBlocks(i));
2571 : }
2572 : }
2573 : }
2574 : }
2575 0 : }
2576 :
2577 0 : bool LiveRangeBuilder::IntervalStartsAtBlockBoundary(
2578 0 : const UseInterval* interval) const {
2579 : LifetimePosition start = interval->start();
2580 0 : if (!start.IsFullStart()) return false;
2581 : int instruction_index = start.ToInstructionIndex();
2582 0 : const InstructionBlock* block =
2583 0 : data()->code()->GetInstructionBlock(instruction_index);
2584 0 : return block->first_instruction_index() == instruction_index;
2585 : }
2586 :
2587 0 : bool LiveRangeBuilder::IntervalPredecessorsCoveredByRange(
2588 0 : const UseInterval* interval, const TopLevelLiveRange* range) const {
2589 : LifetimePosition start = interval->start();
2590 : int instruction_index = start.ToInstructionIndex();
2591 : const InstructionBlock* block =
2592 0 : data()->code()->GetInstructionBlock(instruction_index);
2593 0 : for (RpoNumber pred_index : block->predecessors()) {
2594 0 : const InstructionBlock* predecessor =
2595 0 : data()->code()->InstructionBlockAt(pred_index);
2596 : LifetimePosition last_pos = LifetimePosition::GapFromInstructionIndex(
2597 : predecessor->last_instruction_index());
2598 : last_pos = last_pos.NextStart().End();
2599 0 : if (!range->Covers(last_pos)) return false;
2600 : }
2601 0 : return true;
2602 : }
2603 :
2604 0 : bool LiveRangeBuilder::NextIntervalStartsInDifferentBlocks(
2605 0 : const UseInterval* interval) const {
2606 : DCHECK_NOT_NULL(interval->next());
2607 : LifetimePosition end = interval->end();
2608 : LifetimePosition next_start = interval->next()->start();
2609 : // Since end is not covered, but the previous position is, move back a
2610 : // position
2611 0 : end = end.IsStart() ? end.PrevStart().End() : end.Start();
2612 : int last_covered_index = end.ToInstructionIndex();
2613 : const InstructionBlock* block =
2614 0 : data()->code()->GetInstructionBlock(last_covered_index);
2615 : const InstructionBlock* next_block =
2616 0 : data()->code()->GetInstructionBlock(next_start.ToInstructionIndex());
2617 0 : return block->rpo_number() < next_block->rpo_number();
2618 : }
2619 :
2620 35487418 : void BundleBuilder::BuildBundles() {
2621 2949457 : TRACE("Build bundles\n");
2622 : // Process the blocks in reverse order.
2623 26484145 : for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
2624 : --block_id) {
2625 : InstructionBlock* block =
2626 20585011 : code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
2627 20585129 : TRACE("Block B%d\n", block_id);
2628 43339565 : for (auto phi : block->phis()) {
2629 2169781 : LiveRange* out_range =
2630 2169782 : data()->GetOrCreateLiveRangeFor(phi->virtual_register());
2631 0 : LiveRangeBundle* out = out_range->get_bundle();
2632 2169781 : if (out == nullptr) {
2633 1648365 : out = new (data()->allocation_zone())
2634 1648365 : LiveRangeBundle(data()->allocation_zone(), next_bundle_id_++);
2635 1648366 : out->TryAddRange(out_range);
2636 : }
2637 2169787 : TRACE("Processing phi for v%d with %d:%d\n", phi->virtual_register(),
2638 : out_range->TopLevel()->vreg(), out_range->relative_id());
2639 9524838 : for (auto input : phi->operands()) {
2640 10370539 : LiveRange* input_range = data()->GetOrCreateLiveRangeFor(input);
2641 5185267 : TRACE("Input value v%d with range %d:%d\n", input,
2642 : input_range->TopLevel()->vreg(), input_range->relative_id());
2643 : LiveRangeBundle* input_bundle = input_range->get_bundle();
2644 5185283 : if (input_bundle != nullptr) {
2645 648662 : TRACE("Merge\n");
2646 648662 : if (out->TryMerge(input_bundle))
2647 366397 : TRACE("Merged %d and %d to %d\n", phi->virtual_register(), input,
2648 : out->id());
2649 : } else {
2650 4536621 : TRACE("Add\n");
2651 4536621 : if (out->TryAddRange(input_range))
2652 4108072 : TRACE("Added %d and %d to %d\n", phi->virtual_register(), input,
2653 : out->id());
2654 : }
2655 : }
2656 : }
2657 20584898 : TRACE("Done block B%d\n", block_id);
2658 : }
2659 2949587 : }
2660 :
2661 6184955 : bool LiveRangeBundle::TryAddRange(LiveRange* range) {
2662 : DCHECK_NULL(range->get_bundle());
2663 : // We may only add a new live range if its use intervals do not
2664 : // overlap with existing intervals in the bundle.
2665 11941367 : if (UsesOverlap(range->first_interval())) return false;
2666 : ranges_.insert(range);
2667 5756412 : range->set_bundle(this);
2668 5756412 : InsertUses(range->first_interval());
2669 5756426 : return true;
2670 : }
2671 648662 : bool LiveRangeBundle::TryMerge(LiveRangeBundle* other) {
2672 648662 : if (other == this) return true;
2673 :
2674 : auto iter1 = uses_.begin();
2675 : auto iter2 = other->uses_.begin();
2676 :
2677 2633429 : while (iter1 != uses_.end() && iter2 != other->uses_.end()) {
2678 1140013 : if (iter1->start > iter2->end) {
2679 : ++iter2;
2680 471411 : } else if (iter2->start > iter1->end) {
2681 : ++iter1;
2682 : } else {
2683 282255 : TRACE("No merge %d:%d %d:%d\n", iter1->start, iter1->end, iter2->start,
2684 : iter2->end);
2685 : return false;
2686 : }
2687 : }
2688 : // Uses are disjoint, merging is possible.
2689 198631 : for (auto it = other->ranges_.begin(); it != other->ranges_.end(); ++it) {
2690 144941 : (*it)->set_bundle(this);
2691 144941 : InsertUses((*it)->first_interval());
2692 : }
2693 : ranges_.insert(other->ranges_.begin(), other->ranges_.end());
2694 : other->ranges_.clear();
2695 :
2696 26845 : return true;
2697 : }
2698 :
2699 5756352 : void LiveRangeBundle::MergeSpillRanges() {
2700 : SpillRange* target = nullptr;
2701 55195018 : for (auto range : ranges_) {
2702 43682288 : if (range->TopLevel()->HasSpillRange()) {
2703 3746302 : SpillRange* current = range->TopLevel()->GetSpillRange();
2704 3746302 : if (target == nullptr) {
2705 : target = current;
2706 1553410 : } else if (target != current) {
2707 101829 : target->TryMerge(current);
2708 : }
2709 : }
2710 : }
2711 5756378 : }
2712 :
2713 12629664 : RegisterAllocator::RegisterAllocator(RegisterAllocationData* data,
2714 : RegisterKind kind)
2715 : : data_(data),
2716 : mode_(kind),
2717 : num_registers_(GetRegisterCount(data->config(), kind)),
2718 : num_allocatable_registers_(
2719 : GetAllocatableRegisterCount(data->config(), kind)),
2720 : allocatable_register_codes_(
2721 : GetAllocatableRegisterCodes(data->config(), kind)),
2722 12629664 : check_fp_aliasing_(false) {
2723 : if (!kSimpleFPAliasing && kind == FP_REGISTERS) {
2724 : check_fp_aliasing_ = (data->code()->representation_mask() &
2725 : (kFloat32Bit | kSimd128Bit)) != 0;
2726 : }
2727 3157416 : }
2728 :
2729 0 : LifetimePosition RegisterAllocator::GetSplitPositionForInstruction(
2730 13144326 : const LiveRange* range, int instruction_index) {
2731 : LifetimePosition ret = LifetimePosition::Invalid();
2732 :
2733 : ret = LifetimePosition::GapFromInstructionIndex(instruction_index);
2734 26289462 : if (range->Start() >= ret || ret >= range->End()) {
2735 : return LifetimePosition::Invalid();
2736 : }
2737 0 : return ret;
2738 : }
2739 :
2740 121563144 : void RegisterAllocator::SplitAndSpillRangesDefinedByMemoryOperand() {
2741 3158063 : size_t initial_range_count = data()->live_ranges().size();
2742 121562828 : for (size_t i = 0; i < initial_range_count; ++i) {
2743 236810162 : CHECK_EQ(initial_range_count,
2744 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
2745 136694069 : TopLevelLiveRange* range = data()->live_ranges()[i];
2746 118404786 : if (!CanProcessRange(range)) continue;
2747 83429234 : if (range->HasNoSpillType() ||
2748 1827163 : (range->HasSpillRange() && !range->has_slot_use())) {
2749 : continue;
2750 : }
2751 0 : LifetimePosition start = range->Start();
2752 18289301 : TRACE("Live range %d:%d is defined by a spill operand.\n",
2753 : range->TopLevel()->vreg(), range->relative_id());
2754 : LifetimePosition next_pos = start;
2755 18289283 : if (next_pos.IsGapPosition()) {
2756 : next_pos = next_pos.NextStart();
2757 : }
2758 :
2759 : // With splinters, we can be more strict and skip over positions
2760 : // not strictly needing registers.
2761 : UsePosition* pos =
2762 : range->IsSplinter()
2763 2923170 : ? range->NextRegisterPosition(next_pos)
2764 21212453 : : range->NextUsePositionRegisterIsBeneficial(next_pos);
2765 : // If the range already has a spill operand and it doesn't need a
2766 : // register immediately, split it and spill the first part of the range.
2767 18289272 : if (pos == nullptr) {
2768 4853823 : Spill(range);
2769 13435449 : } else if (pos->pos() > range->Start().NextStart()) {
2770 : // Do not spill live range eagerly if use position that can benefit from
2771 : // the register is too close to the start of live range.
2772 : LifetimePosition split_pos = GetSplitPositionForInstruction(
2773 : range, pos->pos().ToInstructionIndex());
2774 : // There is no place to split, so we can't split and spill.
2775 13145136 : if (!split_pos.IsValid()) continue;
2776 :
2777 : split_pos =
2778 13144319 : FindOptimalSplitPos(range->Start().NextFullStart(), split_pos);
2779 :
2780 13144279 : SplitRangeAt(range, split_pos);
2781 13144263 : Spill(range);
2782 : }
2783 : }
2784 3157747 : }
2785 :
2786 25509350 : LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
2787 : LifetimePosition pos) {
2788 : DCHECK(!range->TopLevel()->IsFixed());
2789 25509350 : TRACE("Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(),
2790 : range->relative_id(), pos.value());
2791 :
2792 25509405 : if (pos <= range->Start()) return range;
2793 :
2794 : // We can't properly connect liveranges if splitting occurred at the end
2795 : // a block.
2796 : DCHECK(pos.IsStart() || pos.IsGapPosition() ||
2797 : (GetInstructionBlock(code(), pos)->last_instruction_index() !=
2798 : pos.ToInstructionIndex()));
2799 :
2800 22707933 : LiveRange* result = range->SplitAt(pos, allocation_zone());
2801 22707892 : return result;
2802 : }
2803 :
2804 3136156 : LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
2805 : LifetimePosition start,
2806 : LifetimePosition end) {
2807 : DCHECK(!range->TopLevel()->IsFixed());
2808 3136156 : TRACE("Splitting live range %d:%d in position between [%d, %d]\n",
2809 : range->TopLevel()->vreg(), range->relative_id(), start.value(),
2810 : end.value());
2811 :
2812 3136156 : LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2813 : DCHECK(split_pos >= start);
2814 3136160 : return SplitRangeAt(range, split_pos);
2815 : }
2816 :
2817 16280384 : LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
2818 : LifetimePosition end) {
2819 : int start_instr = start.ToInstructionIndex();
2820 : int end_instr = end.ToInstructionIndex();
2821 : DCHECK_LE(start_instr, end_instr);
2822 :
2823 : // We have no choice
2824 16280384 : if (start_instr == end_instr) return end;
2825 :
2826 : const InstructionBlock* start_block = GetInstructionBlock(code(), start);
2827 : const InstructionBlock* end_block = GetInstructionBlock(code(), end);
2828 :
2829 9262906 : if (end_block == start_block) {
2830 : // The interval is split in the same basic block. Split at the latest
2831 : // possible position.
2832 7051613 : return end;
2833 : }
2834 :
2835 191726 : const InstructionBlock* block = end_block;
2836 : // Find header of outermost loop.
2837 : do {
2838 : const InstructionBlock* loop = GetContainingLoop(code(), block);
2839 2334983 : if (loop == nullptr ||
2840 : loop->rpo_number().ToInt() <= start_block->rpo_number().ToInt()) {
2841 : // No more loops or loop starts before the lifetime start.
2842 : break;
2843 : }
2844 : block = loop;
2845 : } while (true);
2846 :
2847 : // We did not find any suitable outer loop. Split at the latest possible
2848 : // position unless end_block is a loop header itself.
2849 4302531 : if (block == end_block && !end_block->IsLoopHeader()) return end;
2850 :
2851 : return LifetimePosition::GapFromInstructionIndex(
2852 : block->first_instruction_index());
2853 : }
2854 :
2855 543649 : LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
2856 : LiveRange* range, LifetimePosition pos) {
2857 : const InstructionBlock* block = GetInstructionBlock(code(), pos.Start());
2858 84718 : const InstructionBlock* loop_header =
2859 543650 : block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
2860 :
2861 543650 : if (loop_header == nullptr) return pos;
2862 :
2863 : const UsePosition* prev_use =
2864 : range->PreviousUsePositionRegisterIsBeneficial(pos);
2865 :
2866 145108 : while (loop_header != nullptr) {
2867 : // We are going to spill live range inside the loop.
2868 : // If possible try to move spilling position backwards to loop header.
2869 : // This will reduce number of memory moves on the back edge.
2870 : LifetimePosition loop_start = LifetimePosition::GapFromInstructionIndex(
2871 : loop_header->first_instruction_index());
2872 :
2873 84718 : if (range->Covers(loop_start)) {
2874 65365 : if (prev_use == nullptr || prev_use->pos() < loop_start) {
2875 : // No register beneficial use inside the loop before the pos.
2876 : pos = loop_start;
2877 : }
2878 : }
2879 :
2880 : // Try hoisting out to an outer loop.
2881 : loop_header = GetContainingLoop(code(), loop_header);
2882 : }
2883 :
2884 60390 : return pos;
2885 : }
2886 :
2887 27588554 : void RegisterAllocator::Spill(LiveRange* range) {
2888 : DCHECK(!range->spilled());
2889 0 : TopLevelLiveRange* first = range->TopLevel();
2890 24126437 : TRACE("Spilling live range %d:%d\n", first->vreg(), range->relative_id());
2891 :
2892 24126538 : if (first->HasNoSpillType()) {
2893 3462117 : data()->AssignSpillRangeToLiveRange(first);
2894 : }
2895 : range->Spill();
2896 24126537 : }
2897 :
2898 0 : const char* RegisterAllocator::RegisterName(int register_code) const {
2899 : return mode() == GENERAL_REGISTERS
2900 : ? i::RegisterName(Register::from_code(register_code))
2901 0 : : i::RegisterName(DoubleRegister::from_code(register_code));
2902 : }
2903 :
2904 3157388 : LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
2905 : RegisterKind kind, Zone* local_zone)
2906 : : RegisterAllocator(data, kind),
2907 : unhandled_live_ranges_(local_zone),
2908 : active_live_ranges_(local_zone),
2909 : inactive_live_ranges_(local_zone),
2910 : next_active_ranges_change_(LifetimePosition::Invalid()),
2911 6314812 : next_inactive_ranges_change_(LifetimePosition::Invalid()) {
2912 3157424 : active_live_ranges().reserve(8);
2913 3157825 : inactive_live_ranges().reserve(8);
2914 3157820 : }
2915 :
2916 3157797 : void LinearScanAllocator::AllocateRegisters() {
2917 : DCHECK(unhandled_live_ranges().empty());
2918 : DCHECK(active_live_ranges().empty());
2919 : DCHECK(inactive_live_ranges().empty());
2920 :
2921 127876601 : SplitAndSpillRangesDefinedByMemoryOperand();
2922 :
2923 3157788 : const size_t live_ranges_size = data()->live_ranges().size();
2924 124719223 : for (TopLevelLiveRange* range : data()->live_ranges()) {
2925 236808132 : CHECK_EQ(live_ranges_size,
2926 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
2927 118404065 : if (!CanProcessRange(range)) continue;
2928 109716508 : for (LiveRange* to_add = range; to_add != nullptr;
2929 : to_add = to_add->next()) {
2930 54858463 : if (!to_add->spilled()) {
2931 36860694 : AddToUnhandled(to_add);
2932 : }
2933 : }
2934 : }
2935 :
2936 3157369 : if (mode() == GENERAL_REGISTERS) {
2937 53090323 : for (TopLevelLiveRange* current : data()->fixed_live_ranges()) {
2938 47191308 : if (current != nullptr) AddToInactive(current);
2939 : }
2940 : } else {
2941 3745296 : for (TopLevelLiveRange* current : data()->fixed_double_live_ranges()) {
2942 3329118 : if (current != nullptr) AddToInactive(current);
2943 : }
2944 : if (!kSimpleFPAliasing && check_fp_aliasing()) {
2945 : for (TopLevelLiveRange* current : data()->fixed_float_live_ranges()) {
2946 : if (current != nullptr) AddToInactive(current);
2947 : }
2948 : for (TopLevelLiveRange* current : data()->fixed_simd128_live_ranges()) {
2949 : if (current != nullptr) AddToInactive(current);
2950 : }
2951 : }
2952 : }
2953 :
2954 49040604 : while (!unhandled_live_ranges().empty()) {
2955 45882989 : LiveRange* current = *unhandled_live_ranges().begin();
2956 : unhandled_live_ranges().erase(unhandled_live_ranges().begin());
2957 : LifetimePosition position = current->Start();
2958 : #ifdef DEBUG
2959 : allocation_finger_ = position;
2960 : #endif
2961 45883022 : TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
2962 : current->relative_id(), position.value());
2963 :
2964 45883135 : if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
2965 : continue;
2966 :
2967 45868927 : ForwardStateTo(position);
2968 :
2969 : DCHECK(!current->HasRegisterAssigned() && !current->spilled());
2970 :
2971 45868990 : ProcessCurrentRange(current);
2972 : }
2973 :
2974 3157615 : if (FLAG_trace_alloc) {
2975 0 : PrintRangeOverview(std::cout);
2976 : }
2977 3157615 : }
2978 :
2979 1845462 : bool LinearScanAllocator::TrySplitAndSpillSplinter(LiveRange* range) {
2980 : DCHECK(range->TopLevel()->IsSplinter());
2981 : // If we can spill the whole range, great. Otherwise, split above the
2982 : // first use needing a register and spill the top part.
2983 1845462 : const UsePosition* next_reg = range->NextRegisterPosition(range->Start());
2984 1845450 : if (next_reg == nullptr) {
2985 1188584 : Spill(range);
2986 1188595 : return true;
2987 656867 : } else if (range->FirstHintPosition() == nullptr) {
2988 : // If there was no hint, but we have a use position requiring a
2989 : // register, apply the hot path heuristics.
2990 : return false;
2991 452345 : } else if (next_reg->pos().PrevStart() > range->Start()) {
2992 229269 : LiveRange* tail = SplitRangeAt(range, next_reg->pos().PrevStart());
2993 229269 : AddToUnhandled(tail);
2994 229269 : Spill(range);
2995 229269 : return true;
2996 : }
2997 : return false;
2998 : }
2999 :
3000 40298224 : void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
3001 : int reg) {
3002 42361192 : data()->MarkAllocated(range->representation(), reg);
3003 : range->set_assigned_register(reg);
3004 40298276 : range->SetUseHints(reg);
3005 : range->UpdateBundleRegister(reg);
3006 62766991 : if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
3007 : data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
3008 : }
3009 40298386 : }
3010 :
3011 40298346 : void LinearScanAllocator::AddToActive(LiveRange* range) {
3012 40298346 : TRACE("Add live range %d:%d in %s to active\n", range->TopLevel()->vreg(),
3013 : range->relative_id(), RegisterName(range->assigned_register()));
3014 40298346 : active_live_ranges().push_back(range);
3015 : next_active_ranges_change_ =
3016 120894609 : std::min(next_active_ranges_change_, range->NextEndAfter(range->Start()));
3017 40298203 : }
3018 :
3019 24325691 : void LinearScanAllocator::AddToInactive(LiveRange* range) {
3020 24325691 : TRACE("Add live range %d:%d to inactive\n", range->TopLevel()->vreg(),
3021 : range->relative_id());
3022 24325691 : inactive_live_ranges().push_back(range);
3023 : next_inactive_ranges_change_ = std::min(
3024 72976848 : next_inactive_ranges_change_, range->NextStartAfter(range->Start()));
3025 24325616 : }
3026 :
3027 45883176 : void LinearScanAllocator::AddToUnhandled(LiveRange* range) {
3028 137648921 : if (range == nullptr || range->IsEmpty()) return;
3029 : DCHECK(!range->HasRegisterAssigned() && !range->spilled());
3030 : DCHECK(allocation_finger_ <= range->Start());
3031 :
3032 45883423 : TRACE("Add live range %d:%d to unhandled\n", range->TopLevel()->vreg(),
3033 : range->relative_id());
3034 : unhandled_live_ranges().insert(range);
3035 : }
3036 :
3037 47740838 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::ActiveToHandled(
3038 : const ZoneVector<LiveRange*>::iterator it) {
3039 47740838 : TRACE("Moving live range %d:%d from active to handled\n",
3040 : (*it)->TopLevel()->vreg(), (*it)->relative_id());
3041 47740838 : return active_live_ranges().erase(it);
3042 : }
3043 :
3044 23786511 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::ActiveToInactive(
3045 : const ZoneVector<LiveRange*>::iterator it, LifetimePosition position) {
3046 23786511 : LiveRange* range = *it;
3047 23786511 : inactive_live_ranges().push_back(range);
3048 23786508 : TRACE("Moving live range %d:%d from active to inactive\n",
3049 : (range)->TopLevel()->vreg(), range->relative_id());
3050 : next_inactive_ranges_change_ =
3051 71359551 : std::min(next_inactive_ranges_change_, range->NextStartAfter(position));
3052 23786517 : return active_live_ranges().erase(it);
3053 : }
3054 :
3055 6990889 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::InactiveToHandled(
3056 : ZoneVector<LiveRange*>::iterator it) {
3057 6990889 : TRACE("Moving live range %d:%d from inactive to handled\n",
3058 : (*it)->TopLevel()->vreg(), (*it)->relative_id());
3059 6990889 : return inactive_live_ranges().erase(it);
3060 : }
3061 :
3062 35460294 : ZoneVector<LiveRange*>::iterator LinearScanAllocator::InactiveToActive(
3063 : ZoneVector<LiveRange*>::iterator it, LifetimePosition position) {
3064 35460294 : LiveRange* range = *it;
3065 35460294 : active_live_ranges().push_back(range);
3066 35460307 : TRACE("Moving live range %d:%d from inactive to active\n",
3067 : range->TopLevel()->vreg(), range->relative_id());
3068 : next_active_ranges_change_ =
3069 106380912 : std::min(next_active_ranges_change_, range->NextEndAfter(position));
3070 35460304 : return inactive_live_ranges().erase(it);
3071 : }
3072 :
3073 45868847 : void LinearScanAllocator::ForwardStateTo(LifetimePosition position) {
3074 45868847 : if (position >= next_active_ranges_change_) {
3075 28065627 : next_active_ranges_change_ = LifetimePosition::MaxPosition();
3076 163399722 : for (auto it = active_live_ranges().begin();
3077 : it != active_live_ranges().end();) {
3078 107268774 : LiveRange* cur_active = *it;
3079 107268774 : if (cur_active->End() <= position) {
3080 47196821 : it = ActiveToHandled(it);
3081 60071953 : } else if (!cur_active->Covers(position)) {
3082 23786503 : it = ActiveToInactive(it, position);
3083 : } else {
3084 : next_active_ranges_change_ = std::min(
3085 72571374 : next_active_ranges_change_, cur_active->NextEndAfter(position));
3086 : ++it;
3087 : }
3088 : }
3089 : }
3090 :
3091 45868541 : if (position >= next_inactive_ranges_change_) {
3092 11911155 : next_inactive_ranges_change_ = LifetimePosition::MaxPosition();
3093 157425409 : for (auto it = inactive_live_ranges().begin();
3094 : it != inactive_live_ranges().end();) {
3095 133603080 : LiveRange* cur_inactive = *it;
3096 133603080 : if (cur_inactive->End() <= position) {
3097 6990503 : it = InactiveToHandled(it);
3098 126612577 : } else if (cur_inactive->Covers(position)) {
3099 35460286 : it = InactiveToActive(it, position);
3100 : } else {
3101 : next_inactive_ranges_change_ =
3102 : std::min(next_inactive_ranges_change_,
3103 182305370 : cur_inactive->NextStartAfter(position));
3104 : ++it;
3105 : }
3106 : }
3107 : }
3108 45868560 : }
3109 :
3110 0 : void LinearScanAllocator::GetFPRegisterSet(MachineRepresentation rep,
3111 : int* num_regs, int* num_codes,
3112 : const int** codes) const {
3113 : DCHECK(!kSimpleFPAliasing);
3114 0 : if (rep == MachineRepresentation::kFloat32) {
3115 0 : *num_regs = data()->config()->num_float_registers();
3116 0 : *num_codes = data()->config()->num_allocatable_float_registers();
3117 0 : *codes = data()->config()->allocatable_float_codes();
3118 0 : } else if (rep == MachineRepresentation::kSimd128) {
3119 0 : *num_regs = data()->config()->num_simd128_registers();
3120 0 : *num_codes = data()->config()->num_allocatable_simd128_registers();
3121 0 : *codes = data()->config()->allocatable_simd128_codes();
3122 : } else {
3123 0 : UNREACHABLE();
3124 : }
3125 0 : }
3126 :
3127 45869916 : void LinearScanAllocator::FindFreeRegistersForRange(
3128 : LiveRange* range, Vector<LifetimePosition> positions) {
3129 45869916 : int num_regs = num_registers();
3130 : int num_codes = num_allocatable_registers();
3131 : const int* codes = allocatable_register_codes();
3132 : MachineRepresentation rep = range->representation();
3133 : if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
3134 : rep == MachineRepresentation::kSimd128))
3135 : GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
3136 : DCHECK_GE(positions.length(), num_regs);
3137 :
3138 779773337 : for (int i = 0; i < num_regs; ++i) {
3139 1467806842 : positions[i] = LifetimePosition::MaxPosition();
3140 : }
3141 :
3142 229532323 : for (LiveRange* cur_active : active_live_ranges()) {
3143 : int cur_reg = cur_active->assigned_register();
3144 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3145 275585008 : positions[cur_reg] = LifetimePosition::GapFromInstructionIndex(0);
3146 137792504 : TRACE("Register %s is free until pos %d (1) due to %d\n",
3147 : RegisterName(cur_reg),
3148 : LifetimePosition::GapFromInstructionIndex(0).value(),
3149 : cur_active->TopLevel()->vreg());
3150 : } else {
3151 : int alias_base_index = -1;
3152 : int aliases = data()->config()->GetAliases(
3153 : cur_active->representation(), cur_reg, rep, &alias_base_index);
3154 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3155 : while (aliases--) {
3156 : int aliased_reg = alias_base_index + aliases;
3157 : positions[aliased_reg] = LifetimePosition::GapFromInstructionIndex(0);
3158 : }
3159 : }
3160 : }
3161 :
3162 540748918 : for (LiveRange* cur_inactive : inactive_live_ranges()) {
3163 : DCHECK(cur_inactive->End() > range->Start());
3164 : int cur_reg = cur_inactive->assigned_register();
3165 : // No need to carry out intersections, when this register won't be
3166 : // interesting to this range anyway.
3167 : // TODO(mtrofin): extend to aliased ranges, too.
3168 449009820 : if ((kSimpleFPAliasing || !check_fp_aliasing()) &&
3169 449009820 : positions[cur_reg] < range->Start()) {
3170 379663547 : continue;
3171 : }
3172 :
3173 354997212 : LifetimePosition next_intersection = cur_inactive->FirstIntersection(range);
3174 355000973 : if (!next_intersection.IsValid()) continue;
3175 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3176 69350034 : positions[cur_reg] = Min(positions[cur_reg], next_intersection);
3177 69350034 : TRACE("Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
3178 : Min(positions[cur_reg], next_intersection).value());
3179 : } else {
3180 : int alias_base_index = -1;
3181 : int aliases = data()->config()->GetAliases(
3182 : cur_inactive->representation(), cur_reg, rep, &alias_base_index);
3183 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3184 : while (aliases--) {
3185 : int aliased_reg = alias_base_index + aliases;
3186 : positions[aliased_reg] = Min(positions[aliased_reg], next_intersection);
3187 : }
3188 : }
3189 : }
3190 45869195 : }
3191 :
3192 : // High-level register allocation summary:
3193 : //
3194 : // For regular, or hot (i.e. not splinter) ranges, we attempt to first
3195 : // allocate first the preferred (hint) register. If that is not possible,
3196 : // we find a register that's free, and allocate that. If that's not possible,
3197 : // we search for a register to steal from a range that was allocated. The
3198 : // goal is to optimize for throughput by avoiding register-to-memory
3199 : // moves, which are expensive.
3200 : //
3201 : // For splinters, the goal is to minimize the number of moves. First we try
3202 : // to allocate the preferred register (more discussion follows). Failing that,
3203 : // we bail out and spill as far as we can, unless the first use is at start,
3204 : // case in which we apply the same behavior as we do for regular ranges.
3205 : // If there is no hint, we apply the hot-path behavior.
3206 : //
3207 : // For the splinter, the hint register may come from:
3208 : //
3209 : // - the hot path (we set it at splintering time with SetHint). In this case, if
3210 : // we cannot offer the hint register, spilling is better because it's at most
3211 : // 1 move, while trying to find and offer another register is at least 1 move.
3212 : //
3213 : // - a constraint. If we cannot offer that register, it's because there is some
3214 : // interference. So offering the hint register up to the interference would
3215 : // result
3216 : // in a move at the interference, plus a move to satisfy the constraint. This is
3217 : // also the number of moves if we spill, with the potential of the range being
3218 : // already spilled and thus saving a move (the spill).
3219 : // Note that this can only be an input constraint, if it were an output one,
3220 : // the range wouldn't be a splinter because it means it'd be defined in a
3221 : // deferred
3222 : // block, and we don't mark those as splinters (they live in deferred blocks
3223 : // only).
3224 : //
3225 : // - a phi. The same analysis as in the case of the input constraint applies.
3226 : //
3227 73241789 : void LinearScanAllocator::ProcessCurrentRange(LiveRange* current) {
3228 : EmbeddedVector<LifetimePosition, RegisterConfiguration::kMaxRegisters>
3229 : free_until_pos;
3230 45869025 : FindFreeRegistersForRange(current, free_until_pos);
3231 45869243 : if (!TryAllocatePreferredReg(current, free_until_pos)) {
3232 27372764 : if (current->TopLevel()->IsSplinter()) {
3233 3263340 : if (TrySplitAndSpillSplinter(current)) return;
3234 : }
3235 25954886 : if (!TryAllocateFreeReg(current, free_until_pos)) {
3236 4696493 : AllocateBlockedReg(current);
3237 : }
3238 : }
3239 44451140 : if (current->HasRegisterAssigned()) {
3240 40298366 : AddToActive(current);
3241 : }
3242 : }
3243 :
3244 51526270 : bool LinearScanAllocator::TryAllocatePreferredReg(
3245 59772394 : LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
3246 : int hint_register;
3247 75348093 : if (current->FirstHintPosition(&hint_register) != nullptr ||
3248 : current->RegisterFromBundle(&hint_register)) {
3249 29886196 : TRACE(
3250 : "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
3251 : RegisterName(hint_register), free_until_pos[hint_register].value(),
3252 : current->TopLevel()->vreg(), current->relative_id(),
3253 : current->End().value());
3254 :
3255 : // The desired register is free until the end of the current live range.
3256 59772394 : if (free_until_pos[hint_register] >= current->End()) {
3257 21304726 : TRACE("Assigning preferred reg %s to live range %d:%d\n",
3258 : RegisterName(hint_register), current->TopLevel()->vreg(),
3259 : current->relative_id());
3260 21304726 : SetLiveRangeAssignedRegister(current, hint_register);
3261 21304899 : return true;
3262 : }
3263 : }
3264 : return false;
3265 : }
3266 :
3267 25954789 : bool LinearScanAllocator::TryAllocateFreeReg(
3268 566185486 : LiveRange* current, const Vector<LifetimePosition>& free_until_pos) {
3269 : int num_regs = 0; // used only for the call to GetFPRegisterSet.
3270 229140910 : int num_codes = num_allocatable_registers();
3271 : const int* codes = allocatable_register_codes();
3272 : MachineRepresentation rep = current->representation();
3273 : if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
3274 : rep == MachineRepresentation::kSimd128)) {
3275 : GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
3276 : }
3277 :
3278 : DCHECK_GE(free_until_pos.length(), num_codes);
3279 :
3280 : // Find the register which stays free for the longest time. Check for
3281 : // the hinted register first, as we might want to use that one. Only
3282 : // count full instructions for free ranges, as an instruction's internal
3283 : // positions do not help but might shadow a hinted register. This is
3284 : // typically the case for function calls, where all registered are
3285 : // cloberred after the call except for the argument registers, which are
3286 : // set before the call. Hence, the argument registers always get ignored,
3287 : // as their available time is shorter.
3288 25954789 : int hint_reg = kUnassignedRegister;
3289 25954789 : int reg = codes[0];
3290 44689980 : if (current->FirstHintPosition(&hint_reg) != nullptr ||
3291 : current->RegisterFromBundle(&hint_reg)) {
3292 7686240 : reg = hint_reg;
3293 : }
3294 315785964 : for (int i = 0; i < num_codes; ++i) {
3295 315786038 : int code = codes[i];
3296 : // Prefer registers that have no fixed uses to avoid blocking later hints.
3297 : // We use the first register that has no fixed uses to ensure we use
3298 : // byte addressable registers in ia32 first.
3299 315786038 : int candidate_free = free_until_pos[code].ToInstructionIndex();
3300 315786038 : int current_free = free_until_pos[reg].ToInstructionIndex();
3301 631572002 : if (candidate_free > current_free ||
3302 469192190 : (candidate_free == current_free && reg != hint_reg &&
3303 271556808 : data()->HasFixedUse(current->representation(), reg) &&
3304 68370596 : !data()->HasFixedUse(current->representation(), code))) {
3305 : reg = code;
3306 : }
3307 : }
3308 :
3309 51909764 : LifetimePosition pos = free_until_pos[reg];
3310 :
3311 25954882 : if (pos <= current->Start()) {
3312 : // All registers are blocked.
3313 : return false;
3314 : }
3315 :
3316 21258445 : if (pos < current->End()) {
3317 : // Register reg is available at the range start but becomes blocked before
3318 : // the range end. Split current at position where it becomes blocked.
3319 5657176 : LiveRange* tail = SplitRangeAt(current, pos);
3320 5657171 : AddToUnhandled(tail);
3321 :
3322 : // Try to allocate preferred register once more.
3323 5657171 : if (TryAllocatePreferredReg(current, free_until_pos)) return true;
3324 : }
3325 :
3326 : // Register reg is available at the range start and is free until the range
3327 : // end.
3328 : DCHECK(pos >= current->End());
3329 18450075 : TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
3330 : current->TopLevel()->vreg(), current->relative_id());
3331 18450075 : SetLiveRangeAssignedRegister(current, reg);
3332 :
3333 18450076 : return true;
3334 : }
3335 :
3336 5240137 : void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
3337 4696488 : UsePosition* register_use = current->NextRegisterPosition(current->Start());
3338 4696525 : if (register_use == nullptr) {
3339 : // There is no use in the current live range that requires a register.
3340 : // We can just spill it.
3341 1359541 : Spill(current);
3342 1359547 : return;
3343 : }
3344 :
3345 : int num_regs = num_registers();
3346 : int num_codes = num_allocatable_registers();
3347 : const int* codes = allocatable_register_codes();
3348 : MachineRepresentation rep = current->representation();
3349 : if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
3350 : rep == MachineRepresentation::kSimd128))
3351 : GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
3352 :
3353 : // use_pos keeps track of positions a register/alias is used at.
3354 : // block_pos keeps track of positions where a register/alias is blocked
3355 : // from.
3356 110119032 : LifetimePosition use_pos[RegisterConfiguration::kMaxRegisters];
3357 106781824 : LifetimePosition block_pos[RegisterConfiguration::kMaxRegisters];
3358 53390769 : for (int i = 0; i < num_regs; i++) {
3359 53390769 : use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
3360 : }
3361 :
3362 87462419 : for (LiveRange* range : active_live_ranges()) {
3363 : int cur_reg = range->assigned_register();
3364 : bool is_fixed_or_cant_spill =
3365 47254504 : range->TopLevel()->IsFixed() || !range->CanBeSpilled(current->Start());
3366 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3367 40394209 : if (is_fixed_or_cant_spill) {
3368 : block_pos[cur_reg] = use_pos[cur_reg] =
3369 33933608 : LifetimePosition::GapFromInstructionIndex(0);
3370 : } else {
3371 : DCHECK_NE(LifetimePosition::GapFromInstructionIndex(0),
3372 : block_pos[cur_reg]);
3373 : use_pos[cur_reg] =
3374 6460601 : range->NextLifetimePositionRegisterIsBeneficial(current->Start());
3375 : }
3376 : } else {
3377 : int alias_base_index = -1;
3378 : int aliases = data()->config()->GetAliases(
3379 : range->representation(), cur_reg, rep, &alias_base_index);
3380 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3381 : while (aliases--) {
3382 : int aliased_reg = alias_base_index + aliases;
3383 : if (is_fixed_or_cant_spill) {
3384 : block_pos[aliased_reg] = use_pos[aliased_reg] =
3385 : LifetimePosition::GapFromInstructionIndex(0);
3386 : } else {
3387 : use_pos[aliased_reg] =
3388 : Min(block_pos[aliased_reg],
3389 : range->NextLifetimePositionRegisterIsBeneficial(
3390 : current->Start()));
3391 : }
3392 : }
3393 : }
3394 : }
3395 :
3396 22981232 : for (LiveRange* range : inactive_live_ranges()) {
3397 : DCHECK(range->End() > current->Start());
3398 : int cur_reg = range->assigned_register();
3399 8153673 : bool is_fixed = range->TopLevel()->IsFixed();
3400 :
3401 : // Don't perform costly intersections if they are guaranteed to not update
3402 : // block_pos or use_pos.
3403 : // TODO(mtrofin): extend to aliased ranges, too.
3404 : if ((kSimpleFPAliasing || !check_fp_aliasing())) {
3405 8153673 : if (is_fixed) {
3406 19411889 : if (block_pos[cur_reg] < range->Start()) continue;
3407 : } else {
3408 4604854 : if (use_pos[cur_reg] < range->Start()) continue;
3409 : }
3410 : }
3411 :
3412 5847951 : LifetimePosition next_intersection = range->FirstIntersection(current);
3413 5847951 : if (!next_intersection.IsValid()) continue;
3414 :
3415 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3416 444276 : if (is_fixed) {
3417 423609 : block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
3418 423609 : use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
3419 : } else {
3420 20667 : use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
3421 : }
3422 : } else {
3423 : int alias_base_index = -1;
3424 : int aliases = data()->config()->GetAliases(
3425 : range->representation(), cur_reg, rep, &alias_base_index);
3426 : DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3427 : while (aliases--) {
3428 : int aliased_reg = alias_base_index + aliases;
3429 : if (is_fixed) {
3430 : block_pos[aliased_reg] =
3431 : Min(block_pos[aliased_reg], next_intersection);
3432 : use_pos[aliased_reg] =
3433 : Min(block_pos[aliased_reg], use_pos[aliased_reg]);
3434 : } else {
3435 : use_pos[aliased_reg] = Min(use_pos[aliased_reg], next_intersection);
3436 : }
3437 : }
3438 : }
3439 : }
3440 :
3441 3336943 : int reg = codes[0];
3442 3336943 : register_use->HintRegister(®) || current->RegisterFromBundle(®);
3443 40394146 : for (int i = 0; i < num_codes; ++i) {
3444 40394146 : int code = codes[i];
3445 80788292 : if (use_pos[code] > use_pos[reg]) {
3446 1629138 : reg = code;
3447 : }
3448 : }
3449 :
3450 6673876 : if (use_pos[reg] < register_use->pos()) {
3451 : // If there is a gap position before the next register use, we can
3452 : // spill until there. The gap position will then fit the fill move.
3453 2793291 : if (LifetimePosition::ExistsGapPositionBetween(current->Start(),
3454 : register_use->pos())) {
3455 : SpillBetween(current, current->Start(), register_use->pos());
3456 : return;
3457 : }
3458 : }
3459 :
3460 : // We couldn't spill until the next register use. Split before the register
3461 : // is blocked, if applicable.
3462 1087298 : if (block_pos[reg] < current->End()) {
3463 : // Register becomes blocked before the current range end. Split before that
3464 : // position.
3465 : LiveRange* tail =
3466 16156 : SplitBetween(current, current->Start(), block_pos[reg].Start());
3467 16156 : AddToUnhandled(tail);
3468 : }
3469 :
3470 : // Register reg is not blocked for the whole range.
3471 : DCHECK(block_pos[reg] >= current->End());
3472 543649 : TRACE("Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg),
3473 : current->TopLevel()->vreg(), current->relative_id());
3474 543649 : SetLiveRangeAssignedRegister(current, reg);
3475 :
3476 : // This register was not free. Thus we need to find and spill
3477 : // parts of active and inactive live regions that use the same register
3478 : // at the same lifetime positions as current.
3479 543649 : SplitAndSpillIntersecting(current);
3480 : }
3481 :
3482 543656 : void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
3483 : DCHECK(current->HasRegisterAssigned());
3484 : int reg = current->assigned_register();
3485 : LifetimePosition split_pos = current->Start();
3486 7646998 : for (auto it = active_live_ranges().begin();
3487 : it != active_live_ranges().end();) {
3488 6559694 : LiveRange* range = *it;
3489 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3490 6559694 : if (range->assigned_register() != reg) {
3491 : ++it;
3492 6016038 : continue;
3493 : }
3494 : } else {
3495 : if (!data()->config()->AreAliases(current->representation(), reg,
3496 : range->representation(),
3497 : range->assigned_register())) {
3498 : ++it;
3499 : continue;
3500 : }
3501 : }
3502 :
3503 543656 : UsePosition* next_pos = range->NextRegisterPosition(current->Start());
3504 543649 : LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
3505 543648 : if (next_pos == nullptr) {
3506 222431 : SpillAfter(range, spill_pos);
3507 : } else {
3508 : // When spilling between spill_pos and next_pos ensure that the range
3509 : // remains spilled at least until the start of the current live range.
3510 : // This guarantees that we will not introduce new unhandled ranges that
3511 : // start before the current range as this violates allocation invariants
3512 : // and will lead to an inconsistent state of active and inactive
3513 : // live-ranges: ranges are allocated in order of their start positions,
3514 : // ranges are retired from active/inactive when the start of the
3515 : // current live-range is larger than their end.
3516 : DCHECK(LifetimePosition::ExistsGapPositionBetween(current->Start(),
3517 : next_pos->pos()));
3518 321217 : SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
3519 : }
3520 543649 : it = ActiveToHandled(it);
3521 : }
3522 :
3523 6954911 : for (auto it = inactive_live_ranges().begin();
3524 : it != inactive_live_ranges().end();) {
3525 6147320 : LiveRange* range = *it;
3526 : DCHECK(range->End() > current->Start());
3527 5867614 : if (range->TopLevel()->IsFixed()) {
3528 : ++it;
3529 : continue;
3530 : }
3531 : if (kSimpleFPAliasing || !check_fp_aliasing()) {
3532 279706 : if (range->assigned_register() != reg) {
3533 : ++it;
3534 : continue;
3535 : }
3536 : } else {
3537 : if (!data()->config()->AreAliases(current->representation(), reg,
3538 : range->representation(),
3539 : range->assigned_register())) {
3540 : ++it;
3541 : continue;
3542 : }
3543 : }
3544 :
3545 19292 : LifetimePosition next_intersection = range->FirstIntersection(current);
3546 19293 : if (next_intersection.IsValid()) {
3547 129 : UsePosition* next_pos = range->NextRegisterPosition(current->Start());
3548 129 : if (next_pos == nullptr) {
3549 81 : SpillAfter(range, split_pos);
3550 : } else {
3551 : next_intersection = Min(next_intersection, next_pos->pos());
3552 : SpillBetween(range, split_pos, next_intersection);
3553 : }
3554 129 : it = InactiveToHandled(it);
3555 : } else {
3556 : ++it;
3557 : }
3558 : }
3559 543649 : }
3560 :
3561 23716194 : bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
3562 23716194 : if (!range->is_phi()) return false;
3563 :
3564 : DCHECK(!range->HasSpillOperand());
3565 : // Check how many operands belong to the same bundle as the output.
3566 2088657 : LiveRangeBundle* out_bundle = range->get_bundle();
3567 2088641 : RegisterAllocationData::PhiMapValue* phi_map_value =
3568 7053646 : data()->GetPhiMapValueFor(range);
3569 : const PhiInstruction* phi = phi_map_value->phi();
3570 : const InstructionBlock* block = phi_map_value->block();
3571 : // Count the number of spilled operands.
3572 : size_t spilled_count = 0;
3573 14107256 : for (size_t i = 0; i < phi->operands().size(); i++) {
3574 4964989 : int op = phi->operands()[i];
3575 5846310 : LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op);
3576 4964987 : if (!op_range->TopLevel()->HasSpillRange()) continue;
3577 283490 : const InstructionBlock* pred =
3578 566980 : code()->InstructionBlockAt(block->predecessors()[i]);
3579 : LifetimePosition pred_end =
3580 : LifetimePosition::InstructionFromInstructionIndex(
3581 : pred->last_instruction_index());
3582 1642487 : while (op_range != nullptr && !op_range->CanCover(pred_end)) {
3583 : op_range = op_range->next();
3584 : }
3585 767428 : if (op_range != nullptr && op_range->spilled() &&
3586 : op_range->get_bundle() == out_bundle) {
3587 151250 : spilled_count++;
3588 : }
3589 : }
3590 :
3591 : // Only continue if more than half of the operands are spilled to the same
3592 : // slot (because part of same bundle).
3593 2088639 : if (spilled_count * 2 <= phi->operands().size()) {
3594 : return false;
3595 : }
3596 :
3597 : // If the range does not need register soon, spill it to the merged
3598 : // spill range.
3599 : LifetimePosition next_pos = range->Start();
3600 15260 : if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
3601 15260 : UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
3602 15260 : if (pos == nullptr) {
3603 8602 : Spill(range);
3604 8602 : return true;
3605 6658 : } else if (pos->pos() > range->Start().NextStart()) {
3606 : SpillBetween(range, range->Start(), pos->pos());
3607 5489 : return true;
3608 : }
3609 : return false;
3610 : }
3611 :
3612 222512 : void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
3613 222512 : LiveRange* second_part = SplitRangeAt(range, pos);
3614 222513 : Spill(second_part);
3615 222513 : }
3616 :
3617 0 : void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
3618 : LifetimePosition end) {
3619 2798826 : SpillBetweenUntil(range, start, start, end);
3620 0 : }
3621 :
3622 3120039 : void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
3623 : LifetimePosition start,
3624 : LifetimePosition until,
3625 : LifetimePosition end) {
3626 3120039 : CHECK(start < end);
3627 6240037 : LiveRange* second_part = SplitRangeAt(range, start);
3628 :
3629 3120040 : if (second_part->Start() < end) {
3630 : // The split result intersects with [start, end[.
3631 : // Split it at position between ]start+1, end[, spill the middle part
3632 : // and put the rest to unhandled.
3633 3119998 : LifetimePosition third_part_end = end.PrevStart().End();
3634 3119998 : if (data()->IsBlockBoundary(end.Start())) {
3635 0 : third_part_end = end.Start();
3636 : }
3637 : LiveRange* third_part = SplitBetween(
3638 3119999 : second_part, Max(second_part->Start().End(), until), third_part_end);
3639 :
3640 : DCHECK(third_part != second_part);
3641 :
3642 3120001 : Spill(second_part);
3643 3120003 : AddToUnhandled(third_part);
3644 : } else {
3645 : // The split result does not intersect with [start, end[.
3646 : // Nothing to spill. Just put it to unhandled as whole.
3647 42 : AddToUnhandled(second_part);
3648 : }
3649 3120039 : }
3650 :
3651 2949651 : SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
3652 2949651 : : data_(data) {}
3653 :
3654 2949934 : void SpillSlotLocator::LocateSpillSlots() {
3655 2949934 : const InstructionSequence* code = data()->code();
3656 2949934 : const size_t live_ranges_size = data()->live_ranges().size();
3657 96061313 : for (TopLevelLiveRange* range : data()->live_ranges()) {
3658 165936652 : CHECK_EQ(live_ranges_size,
3659 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
3660 121882165 : if (range == nullptr || range->IsEmpty()) continue;
3661 : // We care only about ranges which spill in the frame.
3662 41163091 : if (!range->HasSpillRange() || range->IsSpilledOnlyInDeferredBlocks()) {
3663 : continue;
3664 : }
3665 : TopLevelLiveRange::SpillMoveInsertionList* spills =
3666 : range->GetSpillMoveInsertionLocations();
3667 : DCHECK_NOT_NULL(spills);
3668 6315149 : for (; spills != nullptr; spills = spills->next) {
3669 3158994 : code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
3670 : }
3671 : }
3672 2949662 : }
3673 :
3674 5899110 : OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
3675 :
3676 2949287 : void OperandAssigner::AssignSpillSlots() {
3677 88866662 : for (auto range : data()->live_ranges()) {
3678 82967805 : if (range != nullptr && range->get_bundle() != nullptr) {
3679 5756376 : range->get_bundle()->MergeSpillRanges();
3680 : }
3681 : }
3682 : ZoneVector<SpillRange*>& spill_ranges = data()->spill_ranges();
3683 : // Merge disjoint spill ranges
3684 88867490 : for (size_t i = 0; i < spill_ranges.size(); ++i) {
3685 597965371 : SpillRange* range = spill_ranges[i];
3686 41484191 : if (range == nullptr) continue;
3687 4037201 : if (range->IsEmpty()) continue;
3688 1024094870 : for (size_t j = i + 1; j < spill_ranges.size(); ++j) {
3689 509034460 : SpillRange* other = spill_ranges[j];
3690 590810844 : if (other != nullptr && !other->IsEmpty()) {
3691 63404181 : range->TryMerge(other);
3692 : }
3693 : }
3694 : }
3695 : // Allocate slots for the merged spill ranges.
3696 91840322 : for (SpillRange* range : spill_ranges) {
3697 45521390 : if (range == nullptr || range->IsEmpty()) continue;
3698 : // Allocate a new operand referring to the spill slot.
3699 3012995 : if (!range->HasSlot()) {
3700 2909361 : int index = data()->frame()->AllocateSpillSlot(range->byte_width());
3701 : range->set_assigned_slot(index);
3702 : }
3703 : }
3704 2949598 : }
3705 :
3706 2949872 : void OperandAssigner::CommitAssignment() {
3707 2949872 : const size_t live_ranges_size = data()->live_ranges().size();
3708 162406308 : for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3709 165936246 : CHECK_EQ(live_ranges_size,
3710 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
3711 204850813 : if (top_range == nullptr || top_range->IsEmpty()) continue;
3712 : InstructionOperand spill_operand;
3713 37125768 : if (top_range->HasSpillOperand()) {
3714 14609747 : spill_operand = *top_range->TopLevel()->GetSpillOperand();
3715 22516021 : } else if (top_range->TopLevel()->HasSpillRange()) {
3716 4037157 : spill_operand = top_range->TopLevel()->GetSpillRangeOperand();
3717 : }
3718 37125768 : if (top_range->is_phi()) {
3719 : data()->GetPhiMapValueFor(top_range)->CommitAssignment(
3720 4339573 : top_range->GetAssignedOperand());
3721 : }
3722 118180733 : for (LiveRange* range = top_range; range != nullptr;
3723 : range = range->next()) {
3724 81054716 : InstructionOperand assigned = range->GetAssignedOperand();
3725 : DCHECK(!assigned.IsUnallocated());
3726 81054798 : range->ConvertUsesToOperand(assigned, spill_operand);
3727 : }
3728 :
3729 37126017 : if (!spill_operand.IsInvalid()) {
3730 : // If this top level range has a child spilled in a deferred block, we use
3731 : // the range and control flow connection mechanism instead of spilling at
3732 : // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
3733 : // phases. Normally, when we spill at definition, we do not insert a
3734 : // connecting move when a successor child range is spilled - because the
3735 : // spilled range picks up its value from the slot which was assigned at
3736 : // definition. For ranges that are determined to spill only in deferred
3737 : // blocks, we let ConnectLiveRanges and ResolveControlFlow find the blocks
3738 : // where a spill operand is expected, and then finalize by inserting the
3739 : // spills in the deferred blocks dominators.
3740 18646951 : if (!top_range->IsSpilledOnlyInDeferredBlocks()) {
3741 : // Spill at definition if the range isn't spilled only in deferred
3742 : // blocks.
3743 : top_range->CommitSpillMoves(
3744 : data()->code(), spill_operand,
3745 51626263 : top_range->has_slot_use() || top_range->spilled());
3746 : }
3747 : }
3748 : }
3749 2949645 : }
3750 :
3751 2949671 : ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
3752 2949671 : : data_(data) {}
3753 :
3754 0 : bool ReferenceMapPopulator::SafePointsAreInOrder() const {
3755 : int safe_point = 0;
3756 0 : for (ReferenceMap* map : *data()->code()->reference_maps()) {
3757 0 : if (safe_point > map->instruction_position()) return false;
3758 : safe_point = map->instruction_position();
3759 : }
3760 0 : return true;
3761 : }
3762 :
3763 2950108 : void ReferenceMapPopulator::PopulateReferenceMaps() {
3764 : DCHECK(SafePointsAreInOrder());
3765 : // Map all delayed references.
3766 5900216 : for (RegisterAllocationData::DelayedReference& delayed_reference :
3767 : data()->delayed_references()) {
3768 : delayed_reference.map->RecordReference(
3769 0 : AllocatedOperand::cast(*delayed_reference.operand));
3770 : }
3771 : // Iterate over all safe point positions and record a pointer
3772 : // for all spilled live ranges at this point.
3773 : int last_range_start = 0;
3774 2950108 : const ReferenceMapDeque* reference_maps = data()->code()->reference_maps();
3775 : ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
3776 2950108 : const size_t live_ranges_size = data()->live_ranges().size();
3777 172075926 : for (TopLevelLiveRange* range : data()->live_ranges()) {
3778 165937098 : CHECK_EQ(live_ranges_size,
3779 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
3780 82967616 : if (range == nullptr) continue;
3781 : // Skip non-reference values.
3782 38914487 : if (!data()->IsReference(range)) continue;
3783 : // Skip empty live ranges.
3784 15489638 : if (range->IsEmpty()) continue;
3785 13734134 : if (range->has_preassigned_slot()) continue;
3786 :
3787 : // Find the extent of the range and its children.
3788 : int start = range->Start().ToInstructionIndex();
3789 : int end = 0;
3790 50829528 : for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) {
3791 : LifetimePosition this_end = cur->End();
3792 37198994 : if (this_end.ToInstructionIndex() > end)
3793 : end = this_end.ToInstructionIndex();
3794 : DCHECK(cur->Start().ToInstructionIndex() >= start);
3795 : }
3796 :
3797 : // Most of the ranges are in order, but not all. Keep an eye on when they
3798 : // step backwards and reset the first_it so we don't miss any safe points.
3799 13630534 : if (start < last_range_start) first_it = reference_maps->begin();
3800 : last_range_start = start;
3801 :
3802 : // Step across all the safe points that are before the start of this range,
3803 : // recording how far we step in order to save doing this for the next range.
3804 469238237 : for (; first_it != reference_maps->end(); ++first_it) {
3805 454587809 : ReferenceMap* map = *first_it;
3806 454587809 : if (map->instruction_position() >= start) break;
3807 : }
3808 :
3809 : InstructionOperand spill_operand;
3810 19674074 : if (((range->HasSpillOperand() &&
3811 26206888 : !range->GetSpillOperand()->IsConstant()) ||
3812 : range->HasSpillRange())) {
3813 2962660 : if (range->HasSpillOperand()) {
3814 1054146 : spill_operand = *range->GetSpillOperand();
3815 : } else {
3816 : spill_operand = range->GetSpillRangeOperand();
3817 : }
3818 : DCHECK(spill_operand.IsStackSlot());
3819 : DCHECK(CanBeTaggedPointer(
3820 : AllocatedOperand::cast(spill_operand).representation()));
3821 : }
3822 :
3823 60721624 : LiveRange* cur = range;
3824 : // Step through the safe points to see whether they are in the range.
3825 67722557 : for (auto it = first_it; it != reference_maps->end(); ++it) {
3826 49642499 : ReferenceMap* map = *it;
3827 : int safe_point = map->instruction_position();
3828 :
3829 : // The safe points are sorted so we can stop searching here.
3830 49642499 : if (safe_point - 1 > end) break;
3831 :
3832 : // Advance to the next active range that covers the current
3833 : // safe point position.
3834 : LifetimePosition safe_point_pos =
3835 : LifetimePosition::InstructionFromInstructionIndex(safe_point);
3836 :
3837 : // Search for the child range (cur) that covers safe_point_pos. If we
3838 : // don't find it before the children pass safe_point_pos, keep cur at
3839 : // the last child, because the next safe_point_pos may be covered by cur.
3840 : // This may happen if cur has more than one interval, and the current
3841 : // safe_point_pos is in between intervals.
3842 : // For that reason, cur may be at most the last child.
3843 : DCHECK_NOT_NULL(cur);
3844 : DCHECK(safe_point_pos >= cur->Start() || range == cur);
3845 : bool found = false;
3846 132659584 : while (!found) {
3847 60721356 : if (cur->Covers(safe_point_pos)) {
3848 : found = true;
3849 : } else {
3850 : LiveRange* next = cur->next();
3851 51445215 : if (next == nullptr || next->Start() > safe_point_pos) {
3852 : break;
3853 : }
3854 : cur = next;
3855 : }
3856 : }
3857 :
3858 40461482 : if (!found) {
3859 : continue;
3860 : }
3861 :
3862 : // Check if the live range is spilled and the safe point is after
3863 : // the spill position.
3864 : int spill_index = range->IsSpilledOnlyInDeferredBlocks()
3865 : ? cur->Start().ToInstructionIndex()
3866 31477007 : : range->spill_start_index();
3867 :
3868 31477007 : if (!spill_operand.IsInvalid() && safe_point >= spill_index) {
3869 20981216 : TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
3870 : range->vreg(), spill_index, safe_point);
3871 20981216 : map->RecordReference(AllocatedOperand::cast(spill_operand));
3872 : }
3873 :
3874 31477014 : if (!cur->spilled()) {
3875 0 : TRACE(
3876 : "Pointer in register for range %d:%d (start at %d) "
3877 : "at safe point %d\n",
3878 : range->vreg(), cur->relative_id(), cur->Start().value(),
3879 : safe_point);
3880 0 : InstructionOperand operand = cur->GetAssignedOperand();
3881 : DCHECK(!operand.IsStackSlot());
3882 : DCHECK(CanBeTaggedPointer(
3883 : AllocatedOperand::cast(operand).representation()));
3884 0 : map->RecordReference(AllocatedOperand::cast(operand));
3885 : }
3886 : }
3887 : }
3888 2949488 : }
3889 :
3890 5899294 : LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
3891 5899294 : : data_(data) {}
3892 :
3893 0 : bool LiveRangeConnector::CanEagerlyResolveControlFlow(
3894 : const InstructionBlock* block) const {
3895 22076780 : if (block->PredecessorCount() != 1) return false;
3896 0 : return block->predecessors()[0].IsNext(block->rpo_number());
3897 : }
3898 :
3899 2949395 : void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
3900 : // Lazily linearize live ranges in memory for fast lookup.
3901 2949395 : LiveRangeFinder finder(data(), local_zone);
3902 : ZoneVector<BitVector*>& live_in_sets = data()->live_in_sets();
3903 31632011 : for (const InstructionBlock* block : code()->instruction_blocks()) {
3904 28776365 : if (CanEagerlyResolveControlFlow(block)) continue;
3905 24788574 : BitVector* live = live_in_sets[block->rpo_number().ToInt()];
3906 12394287 : BitVector::Iterator iterator(live);
3907 189347254 : while (!iterator.Done()) {
3908 76082130 : int vreg = iterator.Current();
3909 76082130 : LiveRangeBoundArray* array = finder.ArrayFor(vreg);
3910 268960480 : for (const RpoNumber& pred : block->predecessors()) {
3911 : FindResult result;
3912 117071785 : const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
3913 116797183 : if (!array->FindConnectableSubranges(block, pred_block, &result)) {
3914 114101750 : continue;
3915 : }
3916 7552488 : InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
3917 7552492 : InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
3918 7552489 : if (pred_op.Equals(cur_op)) continue;
3919 5393552 : if (!pred_op.IsAnyRegister() && cur_op.IsAnyRegister()) {
3920 : // We're doing a reload.
3921 : // We don't need to, if:
3922 : // 1) there's no register use in this block, and
3923 : // 2) the range ends before the block does, and
3924 : // 3) we don't have a successor, or the successor is spilled.
3925 : LifetimePosition block_start =
3926 2573670 : LifetimePosition::GapFromInstructionIndex(block->code_start());
3927 : LifetimePosition block_end =
3928 : LifetimePosition::GapFromInstructionIndex(block->code_end());
3929 5023639 : const LiveRange* current = result.cur_cover_;
3930 298274 : const LiveRange* successor = current->next();
3931 5147340 : if (current->End() < block_end &&
3932 298274 : (successor == nullptr || successor->spilled())) {
3933 : // verify point 1: no register use. We can go to the end of the
3934 : // range, since it's all within the block.
3935 :
3936 : bool uses_reg = false;
3937 523927 : for (const UsePosition* use = current->NextUsePosition(block_start);
3938 : use != nullptr; use = use->next()) {
3939 400221 : if (use->operand()->IsAnyRegister()) {
3940 : uses_reg = true;
3941 : break;
3942 : }
3943 : }
3944 647508 : if (!uses_reg) continue;
3945 : }
3946 2724542 : if (current->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3947 : pred_block->IsDeferred()) {
3948 : // The spill location should be defined in pred_block, so add
3949 : // pred_block to the list of blocks requiring a spill operand.
3950 : current->TopLevel()->GetListOfBlocksRequiringSpillOperands()->Add(
3951 274573 : pred_block->rpo_number().ToInt());
3952 : }
3953 : }
3954 2696182 : int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op);
3955 : USE(move_loc);
3956 : DCHECK_IMPLIES(
3957 : result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3958 : !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()),
3959 : code()->GetInstructionBlock(move_loc)->IsDeferred());
3960 : }
3961 76081841 : iterator.Advance();
3962 : }
3963 : }
3964 :
3965 : // At this stage, we collected blocks needing a spill operand from
3966 : // ConnectRanges and from ResolveControlFlow. Time to commit the spills for
3967 : // deferred blocks.
3968 2949680 : const size_t live_ranges_size = data()->live_ranges().size();
3969 126875604 : for (TopLevelLiveRange* top : data()->live_ranges()) {
3970 165938220 : CHECK_EQ(live_ranges_size,
3971 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
3972 159009500 : if (top == nullptr || top->IsEmpty() ||
3973 : !top->IsSpilledOnlyInDeferredBlocks())
3974 : continue;
3975 881045 : CommitSpillsInDeferredBlocks(top, finder.ArrayFor(top->vreg()), local_zone);
3976 : }
3977 2949707 : }
3978 :
3979 3898170 : int LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
3980 : const InstructionOperand& cur_op,
3981 1494160 : const InstructionBlock* pred,
3982 : const InstructionOperand& pred_op) {
3983 : DCHECK(!pred_op.Equals(cur_op));
3984 : int gap_index;
3985 : Instruction::GapPosition position;
3986 2696165 : if (block->PredecessorCount() == 1) {
3987 : gap_index = block->first_instruction_index();
3988 : position = Instruction::START;
3989 : } else {
3990 : DCHECK_EQ(1, pred->SuccessorCount());
3991 : DCHECK(!code()
3992 : ->InstructionAt(pred->last_instruction_index())
3993 : ->HasReferenceMap());
3994 : gap_index = pred->last_instruction_index();
3995 : position = Instruction::END;
3996 : }
3997 2696165 : data()->AddGapMove(gap_index, position, pred_op, cur_op);
3998 2696178 : return gap_index;
3999 : }
4000 :
4001 2949559 : void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
4002 : DelayedInsertionMap delayed_insertion_map(local_zone);
4003 2949559 : const size_t live_ranges_size = data()->live_ranges().size();
4004 128773282 : for (TopLevelLiveRange* top_range : data()->live_ranges()) {
4005 165935290 : CHECK_EQ(live_ranges_size,
4006 : data()->live_ranges().size()); // TODO(neis): crbug.com/831822
4007 82967286 : if (top_range == nullptr) continue;
4008 : bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
4009 59819567 : LiveRange* first_range = top_range;
4010 126774550 : for (LiveRange *second_range = first_range->next(); second_range != nullptr;
4011 : first_range = second_range, second_range = second_range->next()) {
4012 : LifetimePosition pos = second_range->Start();
4013 : // Add gap move if the two live ranges touch and there is no block
4014 : // boundary.
4015 68764737 : if (second_range->spilled()) continue;
4016 20905885 : if (first_range->End() != pos) continue;
4017 21758791 : if (data()->IsBlockBoundary(pos) &&
4018 : !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
4019 : continue;
4020 : }
4021 19390348 : InstructionOperand prev_operand = first_range->GetAssignedOperand();
4022 19390333 : InstructionOperand cur_operand = second_range->GetAssignedOperand();
4023 19390377 : if (prev_operand.Equals(cur_operand)) continue;
4024 : bool delay_insertion = false;
4025 : Instruction::GapPosition gap_pos;
4026 : int gap_index = pos.ToInstructionIndex();
4027 21084265 : if (connect_spilled && !prev_operand.IsAnyRegister() &&
4028 : cur_operand.IsAnyRegister()) {
4029 992983 : const InstructionBlock* block = code()->GetInstructionBlock(gap_index);
4030 : DCHECK(block->IsDeferred());
4031 : // Performing a reload in this block, meaning the spill operand must
4032 : // be defined here.
4033 : top_range->GetListOfBlocksRequiringSpillOperands()->Add(
4034 : block->rpo_number().ToInt());
4035 : }
4036 :
4037 19096025 : if (pos.IsGapPosition()) {
4038 19095396 : gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
4039 : } else {
4040 629 : if (pos.IsStart()) {
4041 : delay_insertion = true;
4042 : } else {
4043 0 : gap_index++;
4044 : }
4045 629 : gap_pos = delay_insertion ? Instruction::END : Instruction::START;
4046 : }
4047 : // Reloads or spills for spilled in deferred blocks ranges must happen
4048 : // only in deferred blocks.
4049 : DCHECK_IMPLIES(connect_spilled && !(prev_operand.IsAnyRegister() &&
4050 : cur_operand.IsAnyRegister()),
4051 : code()->GetInstructionBlock(gap_index)->IsDeferred());
4052 :
4053 : ParallelMove* move =
4054 : code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
4055 19096065 : gap_pos, code_zone());
4056 19095923 : if (!delay_insertion) {
4057 : move->AddMove(prev_operand, cur_operand);
4058 : } else {
4059 : delayed_insertion_map.insert(
4060 629 : std::make_pair(std::make_pair(move, prev_operand), cur_operand));
4061 : }
4062 : }
4063 : }
4064 5898784 : if (delayed_insertion_map.empty()) return;
4065 : // Insert all the moves which should occur after the stored move.
4066 : ZoneVector<MoveOperands*> to_insert(local_zone);
4067 : ZoneVector<MoveOperands*> to_eliminate(local_zone);
4068 398 : to_insert.reserve(4);
4069 398 : to_eliminate.reserve(4);
4070 398 : ParallelMove* moves = delayed_insertion_map.begin()->first.first;
4071 : for (auto it = delayed_insertion_map.begin();; ++it) {
4072 : bool done = it == delayed_insertion_map.end();
4073 1027 : if (done || it->first.first != moves) {
4074 : // Commit the MoveOperands for current ParallelMove.
4075 1258 : for (MoveOperands* move : to_eliminate) {
4076 : move->Eliminate();
4077 : }
4078 1887 : for (MoveOperands* move : to_insert) {
4079 629 : moves->push_back(move);
4080 : }
4081 629 : if (done) break;
4082 : // Reset state.
4083 : to_eliminate.clear();
4084 : to_insert.clear();
4085 231 : moves = it->first.first;
4086 : }
4087 : // Gather all MoveOperands for a single ParallelMove.
4088 : MoveOperands* move =
4089 629 : new (code_zone()) MoveOperands(it->first.second, it->second);
4090 629 : moves->PrepareInsertAfter(move, &to_eliminate);
4091 629 : to_insert.push_back(move);
4092 : }
4093 : }
4094 :
4095 881020 : void LiveRangeConnector::CommitSpillsInDeferredBlocks(
4096 4442715 : TopLevelLiveRange* range, LiveRangeBoundArray* array, Zone* temp_zone) {
4097 : DCHECK(range->IsSpilledOnlyInDeferredBlocks());
4098 : DCHECK(!range->spilled());
4099 :
4100 10766742 : InstructionSequence* code = data()->code();
4101 881020 : InstructionOperand spill_operand = range->GetSpillRangeOperand();
4102 :
4103 881020 : TRACE("Live Range %d will be spilled only in deferred blocks.\n",
4104 : range->vreg());
4105 : // If we have ranges that aren't spilled but require the operand on the stack,
4106 : // make sure we insert the spill.
4107 8062377 : for (const LiveRange* child = range; child != nullptr;
4108 : child = child->next()) {
4109 7438807 : for (const UsePosition* pos = child->first_pos(); pos != nullptr;
4110 : pos = pos->next()) {
4111 7909223 : if (pos->type() != UsePositionType::kRequiresSlot && !child->spilled())
4112 : continue;
4113 : range->AddBlockRequiringSpillOperand(
4114 : code->GetInstructionBlock(pos->pos().ToInstructionIndex())
4115 514873 : ->rpo_number());
4116 : }
4117 : }
4118 :
4119 881047 : ZoneQueue<int> worklist(temp_zone);
4120 :
4121 3283916 : for (BitVector::Iterator iterator(
4122 881013 : range->GetListOfBlocksRequiringSpillOperands());
4123 1521855 : !iterator.Done(); iterator.Advance()) {
4124 3043715 : worklist.push(iterator.Current());
4125 : }
4126 :
4127 : ZoneSet<std::pair<RpoNumber, int>> done_moves(temp_zone);
4128 : // Seek the deferred blocks that dominate locations requiring spill operands,
4129 : // and spill there. We only need to spill at the start of such blocks.
4130 : BitVector done_blocks(
4131 881037 : range->GetListOfBlocksRequiringSpillOperands()->length(), temp_zone);
4132 7406659 : while (!worklist.empty()) {
4133 5644627 : int block_id = worklist.front();
4134 : worklist.pop();
4135 5644642 : if (done_blocks.Contains(block_id)) continue;
4136 : done_blocks.Add(block_id);
4137 1340306 : InstructionBlock* spill_block =
4138 4422644 : code->InstructionBlockAt(RpoNumber::FromInt(block_id));
4139 :
4140 14308353 : for (const RpoNumber& pred : spill_block->predecessors()) {
4141 6803402 : const InstructionBlock* pred_block = code->InstructionBlockAt(pred);
4142 :
4143 5463078 : if (pred_block->IsDeferred()) {
4144 8245508 : worklist.push(pred_block->rpo_number().ToInt());
4145 : } else {
4146 : LifetimePosition pred_end =
4147 : LifetimePosition::InstructionFromInstructionIndex(
4148 : pred_block->last_instruction_index());
4149 :
4150 : LiveRangeBound* bound = array->Find(pred_end);
4151 :
4152 1340324 : InstructionOperand pred_op = bound->range_->GetAssignedOperand();
4153 :
4154 : RpoNumber spill_block_number = spill_block->rpo_number();
4155 1340308 : if (done_moves.find(std::make_pair(
4156 2680641 : spill_block_number, range->vreg())) == done_moves.end()) {
4157 : data()->AddGapMove(spill_block->first_instruction_index(),
4158 : Instruction::GapPosition::START, pred_op,
4159 1340306 : spill_operand);
4160 2680633 : done_moves.insert(std::make_pair(spill_block_number, range->vreg()));
4161 : spill_block->mark_needs_frame();
4162 : }
4163 : }
4164 : }
4165 : }
4166 881049 : }
4167 :
4168 : #undef TRACE
4169 :
4170 : } // namespace compiler
4171 : } // namespace internal
4172 183867 : } // namespace v8
|