Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/register-allocator-verifier.h"
6 :
7 : #include "src/bit-vector.h"
8 : #include "src/compiler/instruction.h"
9 : #include "src/ostreams.h"
10 :
11 : namespace v8 {
12 : namespace internal {
13 : namespace compiler {
14 :
15 : namespace {
16 :
17 664 : size_t OperandCount(const Instruction* instr) {
18 1328 : return instr->InputCount() + instr->OutputCount() + instr->TempCount();
19 : }
20 :
21 :
22 332 : void VerifyEmptyGaps(const Instruction* instr) {
23 996 : for (int i = Instruction::FIRST_GAP_POSITION;
24 : i <= Instruction::LAST_GAP_POSITION; i++) {
25 : Instruction::GapPosition inner_pos =
26 : static_cast<Instruction::GapPosition>(i);
27 664 : CHECK(instr->GetParallelMove(inner_pos) == nullptr);
28 : }
29 332 : }
30 :
31 :
32 332 : void VerifyAllocatedGaps(const Instruction* instr) {
33 996 : for (int i = Instruction::FIRST_GAP_POSITION;
34 : i <= Instruction::LAST_GAP_POSITION; i++) {
35 : Instruction::GapPosition inner_pos =
36 : static_cast<Instruction::GapPosition>(i);
37 : const ParallelMove* moves = instr->GetParallelMove(inner_pos);
38 664 : if (moves == nullptr) continue;
39 618 : for (const MoveOperands* move : *moves) {
40 228 : if (move->IsRedundant()) continue;
41 163 : CHECK(move->source().IsAllocated() || move->source().IsConstant());
42 163 : CHECK(move->destination().IsAllocated());
43 : }
44 : }
45 332 : }
46 :
47 : } // namespace
48 :
49 42 : RegisterAllocatorVerifier::RegisterAllocatorVerifier(
50 : Zone* zone, const RegisterConfiguration* config,
51 : const InstructionSequence* sequence)
52 : : zone_(zone),
53 : config_(config),
54 : sequence_(sequence),
55 : constraints_(zone),
56 : assessments_(zone),
57 42 : outstanding_assessments_(zone) {
58 42 : constraints_.reserve(sequence->instructions().size());
59 : // TODO(dcarney): model unique constraints.
60 : // Construct OperandConstraints for all InstructionOperands, eliminating
61 : // kSameAsFirst along the way.
62 2144 : for (const Instruction* instr : sequence->instructions()) {
63 : // All gaps should be totally unallocated at this point.
64 332 : VerifyEmptyGaps(instr);
65 : const size_t operand_count = OperandCount(instr);
66 : OperandConstraint* op_constraints =
67 : zone->NewArray<OperandConstraint>(operand_count);
68 : size_t count = 0;
69 1184 : for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
70 520 : BuildConstraint(instr->InputAt(i), &op_constraints[count]);
71 260 : VerifyInput(op_constraints[count]);
72 : }
73 332 : for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
74 0 : BuildConstraint(instr->TempAt(i), &op_constraints[count]);
75 0 : VerifyTemp(op_constraints[count]);
76 : }
77 686 : for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
78 177 : BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
79 177 : if (op_constraints[count].type_ == kSameAsFirst) {
80 5 : CHECK(instr->InputCount() > 0);
81 5 : op_constraints[count].type_ = op_constraints[0].type_;
82 5 : op_constraints[count].value_ = op_constraints[0].value_;
83 : }
84 177 : VerifyOutput(op_constraints[count]);
85 : }
86 : InstructionConstraint instr_constraint = {instr, operand_count,
87 332 : op_constraints};
88 332 : constraints()->push_back(instr_constraint);
89 : }
90 42 : }
91 :
92 260 : void RegisterAllocatorVerifier::VerifyInput(
93 : const OperandConstraint& constraint) {
94 260 : CHECK_NE(kSameAsFirst, constraint.type_);
95 260 : if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) {
96 159 : CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
97 : constraint.virtual_register_);
98 : }
99 260 : }
100 :
101 0 : void RegisterAllocatorVerifier::VerifyTemp(
102 : const OperandConstraint& constraint) {
103 0 : CHECK_NE(kSameAsFirst, constraint.type_);
104 0 : CHECK_NE(kImmediate, constraint.type_);
105 0 : CHECK_NE(kExplicit, constraint.type_);
106 0 : CHECK_NE(kConstant, constraint.type_);
107 0 : }
108 :
109 177 : void RegisterAllocatorVerifier::VerifyOutput(
110 : const OperandConstraint& constraint) {
111 177 : CHECK_NE(kImmediate, constraint.type_);
112 177 : CHECK_NE(kExplicit, constraint.type_);
113 177 : CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
114 : constraint.virtual_register_);
115 177 : }
116 :
117 42 : void RegisterAllocatorVerifier::VerifyAssignment() {
118 84 : CHECK(sequence()->instructions().size() == constraints()->size());
119 : auto instr_it = sequence()->begin();
120 374 : for (const auto& instr_constraint : *constraints()) {
121 332 : const Instruction* instr = instr_constraint.instruction_;
122 : // All gaps should be totally allocated at this point.
123 332 : VerifyAllocatedGaps(instr);
124 332 : const size_t operand_count = instr_constraint.operand_constaints_size_;
125 : const OperandConstraint* op_constraints =
126 332 : instr_constraint.operand_constraints_;
127 332 : CHECK_EQ(instr, *instr_it);
128 332 : CHECK(operand_count == OperandCount(instr));
129 : size_t count = 0;
130 260 : for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
131 520 : CheckConstraint(instr->InputAt(i), &op_constraints[count]);
132 : }
133 0 : for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
134 0 : CheckConstraint(instr->TempAt(i), &op_constraints[count]);
135 : }
136 177 : for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
137 177 : CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
138 : }
139 : ++instr_it;
140 : }
141 42 : }
142 :
143 437 : void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
144 129 : OperandConstraint* constraint) {
145 437 : constraint->value_ = kMinInt;
146 437 : constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
147 437 : if (op->IsConstant()) {
148 61 : constraint->type_ = kConstant;
149 61 : constraint->value_ = ConstantOperand::cast(op)->virtual_register();
150 61 : constraint->virtual_register_ = constraint->value_;
151 376 : } else if (op->IsExplicit()) {
152 0 : constraint->type_ = kExplicit;
153 376 : } else if (op->IsImmediate()) {
154 : const ImmediateOperand* imm = ImmediateOperand::cast(op);
155 : int value = imm->type() == ImmediateOperand::INLINE ? imm->inline_value()
156 101 : : imm->indexed_value();
157 101 : constraint->type_ = kImmediate;
158 101 : constraint->value_ = value;
159 : } else {
160 275 : CHECK(op->IsUnallocated());
161 : const UnallocatedOperand* unallocated = UnallocatedOperand::cast(op);
162 : int vreg = unallocated->virtual_register();
163 275 : constraint->virtual_register_ = vreg;
164 275 : if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
165 51 : constraint->type_ = kFixedSlot;
166 51 : constraint->value_ = unallocated->fixed_slot_index();
167 : } else {
168 224 : switch (unallocated->extended_policy()) {
169 : case UnallocatedOperand::ANY:
170 : case UnallocatedOperand::NONE:
171 38 : if (sequence()->IsFP(vreg)) {
172 0 : constraint->type_ = kNoneFP;
173 : } else {
174 38 : constraint->type_ = kNone;
175 : }
176 : break;
177 : case UnallocatedOperand::FIXED_REGISTER:
178 88 : if (unallocated->HasSecondaryStorage()) {
179 0 : constraint->type_ = kRegisterAndSlot;
180 0 : constraint->spilled_slot_ = unallocated->GetSecondaryStorage();
181 : } else {
182 88 : constraint->type_ = kFixedRegister;
183 : }
184 88 : constraint->value_ = unallocated->fixed_register_index();
185 88 : break;
186 : case UnallocatedOperand::FIXED_FP_REGISTER:
187 2 : constraint->type_ = kFixedFPRegister;
188 2 : constraint->value_ = unallocated->fixed_register_index();
189 2 : break;
190 : case UnallocatedOperand::MUST_HAVE_REGISTER:
191 68 : if (sequence()->IsFP(vreg)) {
192 12 : constraint->type_ = kFPRegister;
193 : } else {
194 56 : constraint->type_ = kRegister;
195 : }
196 : break;
197 : case UnallocatedOperand::MUST_HAVE_SLOT:
198 23 : constraint->type_ = kSlot;
199 : constraint->value_ =
200 23 : ElementSizeLog2Of(sequence()->GetRepresentation(vreg));
201 23 : break;
202 : case UnallocatedOperand::SAME_AS_FIRST_INPUT:
203 5 : constraint->type_ = kSameAsFirst;
204 5 : break;
205 : }
206 : }
207 : }
208 437 : }
209 :
210 437 : void RegisterAllocatorVerifier::CheckConstraint(
211 : const InstructionOperand* op, const OperandConstraint* constraint) {
212 437 : switch (constraint->type_) {
213 : case kConstant:
214 61 : CHECK(op->IsConstant());
215 122 : CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
216 : constraint->value_);
217 : return;
218 : case kImmediate: {
219 101 : CHECK(op->IsImmediate());
220 : const ImmediateOperand* imm = ImmediateOperand::cast(op);
221 : int value = imm->type() == ImmediateOperand::INLINE
222 : ? imm->inline_value()
223 101 : : imm->indexed_value();
224 101 : CHECK_EQ(value, constraint->value_);
225 : return;
226 : }
227 : case kRegister:
228 61 : CHECK(op->IsRegister());
229 : return;
230 : case kFPRegister:
231 12 : CHECK(op->IsFPRegister());
232 : return;
233 : case kExplicit:
234 0 : CHECK(op->IsExplicit());
235 : return;
236 : case kFixedRegister:
237 : case kRegisterAndSlot:
238 88 : CHECK(op->IsRegister());
239 176 : CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
240 : return;
241 : case kFixedFPRegister:
242 2 : CHECK(op->IsFPRegister());
243 4 : CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
244 : return;
245 : case kFixedSlot:
246 51 : CHECK(op->IsStackSlot() || op->IsFPStackSlot());
247 102 : CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
248 : return;
249 : case kSlot:
250 23 : CHECK(op->IsStackSlot() || op->IsFPStackSlot());
251 46 : CHECK_EQ(ElementSizeLog2Of(LocationOperand::cast(op)->representation()),
252 : constraint->value_);
253 : return;
254 : case kNone:
255 55 : CHECK(op->IsRegister() || op->IsStackSlot());
256 : return;
257 : case kNoneFP:
258 0 : CHECK(op->IsFPRegister() || op->IsFPStackSlot());
259 : return;
260 : case kSameAsFirst:
261 0 : CHECK(false);
262 : return;
263 : }
264 : }
265 :
266 332 : void BlockAssessments::PerformMoves(const Instruction* instruction) {
267 : const ParallelMove* first =
268 : instruction->GetParallelMove(Instruction::GapPosition::START);
269 332 : PerformParallelMoves(first);
270 : const ParallelMove* last =
271 : instruction->GetParallelMove(Instruction::GapPosition::END);
272 332 : PerformParallelMoves(last);
273 332 : }
274 :
275 664 : void BlockAssessments::PerformParallelMoves(const ParallelMove* moves) {
276 1328 : if (moves == nullptr) return;
277 :
278 195 : CHECK(map_for_moves_.empty());
279 618 : for (MoveOperands* move : *moves) {
280 228 : if (move->IsEliminated() || move->IsRedundant()) continue;
281 163 : auto it = map_.find(move->source());
282 : // The RHS of a parallel move should have been already assessed.
283 163 : CHECK(it != map_.end());
284 : // The LHS of a parallel move should not have been assigned in this
285 : // parallel move.
286 326 : CHECK(map_for_moves_.find(move->destination()) == map_for_moves_.end());
287 : // Copy the assessment to the destination.
288 163 : map_for_moves_[move->destination()] = it->second;
289 : }
290 553 : for (auto pair : map_for_moves_) {
291 163 : map_[pair.first] = pair.second;
292 : }
293 : map_for_moves_.clear();
294 : }
295 :
296 10 : void BlockAssessments::DropRegisters() {
297 150 : for (auto iterator = map().begin(), end = map().end(); iterator != end;) {
298 : auto current = iterator;
299 : ++iterator;
300 130 : InstructionOperand op = current->first;
301 130 : if (op.IsAnyRegister()) map().erase(current);
302 : }
303 10 : }
304 :
305 0 : void BlockAssessments::Print() const {
306 0 : OFStream os(stdout);
307 0 : for (const auto pair : map()) {
308 : const InstructionOperand op = pair.first;
309 : const Assessment* assessment = pair.second;
310 : // Use operator<< so we can write the assessment on the same
311 : // line. Since we need a register configuration, just pick
312 : // Turbofan for now.
313 0 : PrintableInstructionOperand wrapper = {RegisterConfiguration::Turbofan(),
314 0 : op};
315 0 : os << wrapper << " : ";
316 0 : if (assessment->kind() == AssessmentKind::Final) {
317 0 : os << "v" << FinalAssessment::cast(assessment)->virtual_register();
318 : } else {
319 0 : os << "P";
320 : }
321 : os << std::endl;
322 : }
323 0 : os << std::endl;
324 0 : }
325 :
326 142 : BlockAssessments* RegisterAllocatorVerifier::CreateForBlock(
327 287 : const InstructionBlock* block) {
328 : RpoNumber current_block_id = block->rpo_number();
329 :
330 : BlockAssessments* ret = new (zone()) BlockAssessments(zone());
331 142 : if (block->PredecessorCount() == 0) {
332 : // TODO(mtrofin): the following check should hold, however, in certain
333 : // unit tests it is invalidated by the last block. Investigate and
334 : // normalize the CFG.
335 : // CHECK(current_block_id.ToInt() == 0);
336 : // The phi size test below is because we can, technically, have phi
337 : // instructions with one argument. Some tests expose that, too.
338 179 : } else if (block->PredecessorCount() == 1 && block->phis().size() == 0) {
339 79 : const BlockAssessments* prev_block = assessments_[block->predecessors()[0]];
340 79 : ret->CopyFrom(prev_block);
341 : } else {
342 98 : for (RpoNumber pred_id : block->predecessors()) {
343 : // For every operand coming from any of the predecessors, create an
344 : // Unfinalized assessment.
345 : auto iterator = assessments_.find(pred_id);
346 39 : if (iterator == assessments_.end()) {
347 : // This block is the head of a loop, and this predecessor is the
348 : // loopback
349 : // arc.
350 : // Validate this is a loop case, otherwise the CFG is malformed.
351 2 : CHECK(pred_id >= current_block_id);
352 2 : CHECK(block->IsLoopHeader());
353 : continue;
354 : }
355 37 : const BlockAssessments* pred_assessments = iterator->second;
356 37 : CHECK_NOT_NULL(pred_assessments);
357 292 : for (auto pair : pred_assessments->map()) {
358 218 : InstructionOperand operand = pair.first;
359 218 : if (ret->map().find(operand) == ret->map().end()) {
360 : ret->map().insert(std::make_pair(
361 435 : operand, new (zone()) PendingAssessment(block, operand)));
362 : }
363 : }
364 : }
365 : }
366 142 : return ret;
367 : }
368 :
369 50 : void RegisterAllocatorVerifier::ValidatePendingAssessment(
370 : RpoNumber block_id, InstructionOperand op,
371 : BlockAssessments* current_assessments, const PendingAssessment* assessment,
372 102 : int virtual_register) {
373 : // When validating a pending assessment, it is possible some of the
374 : // assessments
375 : // for the original operand (the one where the assessment was created for
376 : // first) are also pending. To avoid recursion, we use a work list. To
377 : // deal with cycles, we keep a set of seen nodes.
378 50 : Zone local_zone(zone()->allocator(), ZONE_NAME);
379 50 : ZoneQueue<std::pair<const PendingAssessment*, int>> worklist(&local_zone);
380 : ZoneSet<RpoNumber> seen(&local_zone);
381 100 : worklist.push(std::make_pair(assessment, virtual_register));
382 : seen.insert(block_id);
383 :
384 136 : while (!worklist.empty()) {
385 86 : auto work = worklist.front();
386 86 : const PendingAssessment* current_assessment = work.first;
387 : int current_virtual_register = work.second;
388 86 : InstructionOperand current_operand = current_assessment->operand();
389 : worklist.pop();
390 :
391 : const InstructionBlock* origin = current_assessment->origin();
392 88 : CHECK(origin->PredecessorCount() > 1 || origin->phis().size() > 0);
393 :
394 : // Check if the virtual register is a phi first, instead of relying on
395 : // the incoming assessments. In particular, this handles the case
396 : // v1 = phi v0 v0, which structurally is identical to v0 having been
397 : // defined at the top of a diamond, and arriving at the node joining the
398 : // diamond's branches.
399 : const PhiInstruction* phi = nullptr;
400 444 : for (const PhiInstruction* candidate : origin->phis()) {
401 317 : if (candidate->virtual_register() == current_virtual_register) {
402 : phi = candidate;
403 : break;
404 : }
405 : }
406 :
407 : int op_index = 0;
408 342 : for (RpoNumber pred : origin->predecessors()) {
409 : int expected =
410 260 : phi != nullptr ? phi->operands()[op_index] : current_virtual_register;
411 :
412 170 : ++op_index;
413 : auto pred_assignment = assessments_.find(pred);
414 170 : if (pred_assignment == assessments_.end()) {
415 6 : CHECK(origin->IsLoopHeader());
416 : auto todo_iter = outstanding_assessments_.find(pred);
417 : DelayedAssessments* set = nullptr;
418 6 : if (todo_iter == outstanding_assessments_.end()) {
419 : set = new (zone()) DelayedAssessments(zone());
420 4 : outstanding_assessments_.insert(std::make_pair(pred, set));
421 : } else {
422 4 : set = todo_iter->second;
423 : }
424 6 : set->AddDelayedAssessment(current_operand, expected);
425 : continue;
426 : }
427 :
428 164 : const BlockAssessments* pred_assessments = pred_assignment->second;
429 : auto found_contribution = pred_assessments->map().find(current_operand);
430 164 : CHECK(found_contribution != pred_assessments->map().end());
431 164 : Assessment* contribution = found_contribution->second;
432 :
433 164 : switch (contribution->kind()) {
434 : case Final:
435 : ValidateFinalAssessment(
436 : block_id, current_operand, current_assessments,
437 127 : FinalAssessment::cast(contribution), expected);
438 127 : break;
439 : case Pending: {
440 : // This happens if we have a diamond feeding into another one, and
441 : // the inner one never being used - other than for carrying the value.
442 37 : const PendingAssessment* next = PendingAssessment::cast(contribution);
443 37 : if (seen.find(pred) == seen.end()) {
444 36 : worklist.push({next, expected});
445 : seen.insert(pred);
446 : }
447 : // Note that we do not want to finalize pending assessments at the
448 : // beginning of a block - which is the information we'd have
449 : // available here. This is because this operand may be reused to
450 : // define
451 : // duplicate phis.
452 : break;
453 : }
454 : }
455 : }
456 : }
457 : // If everything checks out, we may make the assessment.
458 50 : current_assessments->map()[op] =
459 100 : new (zone()) FinalAssessment(virtual_register, assessment);
460 50 : }
461 :
462 242 : void RegisterAllocatorVerifier::ValidateFinalAssessment(
463 : RpoNumber block_id, InstructionOperand op,
464 242 : BlockAssessments* current_assessments, const FinalAssessment* assessment,
465 : int virtual_register) {
466 484 : if (assessment->virtual_register() == virtual_register) return;
467 : // If we have 2 phis with the exact same operand list, and the first phi is
468 : // used before the second one, via the operand incoming to the block,
469 : // and the second one's operand is defined (via a parallel move) after the
470 : // use, then the original operand will be assigned to the first phi. We
471 : // then look at the original pending assessment to ascertain if op
472 : // is virtual_register.
473 0 : const PendingAssessment* old = assessment->original_pending_assessment();
474 0 : CHECK_NOT_NULL(old);
475 0 : RpoNumber old_block = old->origin()->rpo_number();
476 : DCHECK_LE(old_block, block_id);
477 : BlockAssessments* old_block_assessments =
478 0 : old_block == block_id ? current_assessments : assessments_[old_block];
479 : ValidatePendingAssessment(old_block, op, old_block_assessments, old,
480 0 : virtual_register);
481 : }
482 :
483 159 : void RegisterAllocatorVerifier::ValidateUse(
484 : RpoNumber block_id, BlockAssessments* current_assessments,
485 : InstructionOperand op, int virtual_register) {
486 : auto iterator = current_assessments->map().find(op);
487 : // We should have seen this operand before.
488 159 : CHECK(iterator != current_assessments->map().end());
489 159 : Assessment* assessment = iterator->second;
490 :
491 159 : switch (assessment->kind()) {
492 : case Final:
493 : ValidateFinalAssessment(block_id, op, current_assessments,
494 : FinalAssessment::cast(assessment),
495 110 : virtual_register);
496 110 : break;
497 : case Pending: {
498 49 : const PendingAssessment* pending = PendingAssessment::cast(assessment);
499 : ValidatePendingAssessment(block_id, op, current_assessments, pending,
500 49 : virtual_register);
501 49 : break;
502 : }
503 : }
504 159 : }
505 :
506 227 : void RegisterAllocatorVerifier::VerifyGapMoves() {
507 42 : CHECK(assessments_.empty());
508 42 : CHECK(outstanding_assessments_.empty());
509 42 : const size_t block_count = sequence()->instruction_blocks().size();
510 184 : for (size_t block_index = 0; block_index < block_count; ++block_index) {
511 616 : const InstructionBlock* block =
512 284 : sequence()->instruction_blocks()[block_index];
513 142 : BlockAssessments* block_assessments = CreateForBlock(block);
514 :
515 948 : for (int instr_index = block->code_start(); instr_index < block->code_end();
516 : ++instr_index) {
517 332 : const InstructionConstraint& instr_constraint = constraints_[instr_index];
518 1765 : const Instruction* instr = instr_constraint.instruction_;
519 332 : block_assessments->PerformMoves(instr);
520 :
521 : const OperandConstraint* op_constraints =
522 332 : instr_constraint.operand_constraints_;
523 : size_t count = 0;
524 1184 : for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
525 260 : if (op_constraints[count].type_ == kImmediate ||
526 : op_constraints[count].type_ == kExplicit) {
527 101 : continue;
528 : }
529 159 : int virtual_register = op_constraints[count].virtual_register_;
530 159 : InstructionOperand op = *instr->InputAt(i);
531 : ValidateUse(block->rpo_number(), block_assessments, op,
532 159 : virtual_register);
533 : }
534 332 : for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
535 : block_assessments->Drop(*instr->TempAt(i));
536 : }
537 332 : if (instr->IsCall()) {
538 10 : block_assessments->DropRegisters();
539 : }
540 686 : for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
541 177 : int virtual_register = op_constraints[count].virtual_register_;
542 177 : block_assessments->AddDefinition(*instr->OutputAt(i), virtual_register);
543 177 : if (op_constraints[count].type_ == kRegisterAndSlot) {
544 : const AllocatedOperand* reg_op =
545 : AllocatedOperand::cast(instr->OutputAt(i));
546 : MachineRepresentation rep = reg_op->representation();
547 : const AllocatedOperand* stack_op = AllocatedOperand::New(
548 : zone(), LocationOperand::LocationKind::STACK_SLOT, rep,
549 0 : op_constraints[i].spilled_slot_);
550 0 : block_assessments->AddDefinition(*stack_op, virtual_register);
551 : }
552 : }
553 : }
554 : // Now commit the assessments for this block. If there are any delayed
555 : // assessments, ValidatePendingAssessment should see this block, too.
556 142 : assessments_[block->rpo_number()] = block_assessments;
557 :
558 284 : auto todo_iter = outstanding_assessments_.find(block->rpo_number());
559 142 : if (todo_iter == outstanding_assessments_.end()) continue;
560 2 : DelayedAssessments* todo = todo_iter->second;
561 10 : for (auto pair : todo->map()) {
562 6 : InstructionOperand op = pair.first;
563 : int vreg = pair.second;
564 : auto found_op = block_assessments->map().find(op);
565 6 : CHECK(found_op != block_assessments->map().end());
566 12 : switch (found_op->second->kind()) {
567 : case Final:
568 : ValidateFinalAssessment(block->rpo_number(), op, block_assessments,
569 : FinalAssessment::cast(found_op->second),
570 5 : vreg);
571 5 : break;
572 : case Pending:
573 : const PendingAssessment* pending =
574 1 : PendingAssessment::cast(found_op->second);
575 : ValidatePendingAssessment(block->rpo_number(), op, block_assessments,
576 1 : pending, vreg);
577 1 : block_assessments->map()[op] =
578 1 : new (zone()) FinalAssessment(vreg, pending);
579 1 : break;
580 : }
581 : }
582 : }
583 42 : }
584 :
585 : } // namespace compiler
586 : } // namespace internal
587 : } // namespace v8
|