Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/backend/register-allocator-verifier.h"
6 :
7 : #include "src/bit-vector.h"
8 : #include "src/compiler/backend/instruction.h"
9 : #include "src/ostreams.h"
10 :
11 : namespace v8 {
12 : namespace internal {
13 : namespace compiler {
14 :
15 : namespace {
16 :
17 1236 : size_t OperandCount(const Instruction* instr) {
18 2472 : return instr->InputCount() + instr->OutputCount() + instr->TempCount();
19 : }
20 :
21 412 : void VerifyEmptyGaps(const Instruction* instr) {
22 1236 : for (int i = Instruction::FIRST_GAP_POSITION;
23 : i <= Instruction::LAST_GAP_POSITION; i++) {
24 : Instruction::GapPosition inner_pos =
25 : static_cast<Instruction::GapPosition>(i);
26 824 : CHECK_NULL(instr->GetParallelMove(inner_pos));
27 : }
28 412 : }
29 :
30 824 : void VerifyAllocatedGaps(const Instruction* instr, const char* caller_info) {
31 2472 : for (int i = Instruction::FIRST_GAP_POSITION;
32 : i <= Instruction::LAST_GAP_POSITION; i++) {
33 : Instruction::GapPosition inner_pos =
34 : static_cast<Instruction::GapPosition>(i);
35 : const ParallelMove* moves = instr->GetParallelMove(inner_pos);
36 1648 : if (moves == nullptr) continue;
37 1588 : for (const MoveOperands* move : *moves) {
38 714 : if (move->IsRedundant()) continue;
39 388 : CHECK_WITH_MSG(
40 : move->source().IsAllocated() || move->source().IsConstant(),
41 : caller_info);
42 388 : CHECK_WITH_MSG(move->destination().IsAllocated(), caller_info);
43 : }
44 : }
45 824 : }
46 :
47 : } // namespace
48 :
49 42 : RegisterAllocatorVerifier::RegisterAllocatorVerifier(
50 : Zone* zone, const RegisterConfiguration* config,
51 : const InstructionSequence* sequence)
52 : : zone_(zone),
53 : config_(config),
54 : sequence_(sequence),
55 : constraints_(zone),
56 : assessments_(zone),
57 84 : outstanding_assessments_(zone) {
58 42 : constraints_.reserve(sequence->instructions().size());
59 : // TODO(dcarney): model unique constraints.
60 : // Construct OperandConstraints for all InstructionOperands, eliminating
61 : // kSameAsFirst along the way.
62 2672 : for (const Instruction* instr : sequence->instructions()) {
63 : // All gaps should be totally unallocated at this point.
64 412 : VerifyEmptyGaps(instr);
65 : const size_t operand_count = OperandCount(instr);
66 : OperandConstraint* op_constraints =
67 : zone->NewArray<OperandConstraint>(operand_count);
68 : size_t count = 0;
69 1456 : for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
70 632 : BuildConstraint(instr->InputAt(i), &op_constraints[count]);
71 316 : VerifyInput(op_constraints[count]);
72 : }
73 412 : for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
74 0 : BuildConstraint(instr->TempAt(i), &op_constraints[count]);
75 0 : VerifyTemp(op_constraints[count]);
76 : }
77 910 : for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
78 249 : BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
79 249 : if (op_constraints[count].type_ == kSameAsFirst) {
80 5 : CHECK_LT(0, instr->InputCount());
81 5 : op_constraints[count].type_ = op_constraints[0].type_;
82 5 : op_constraints[count].value_ = op_constraints[0].value_;
83 : }
84 249 : VerifyOutput(op_constraints[count]);
85 : }
86 : InstructionConstraint instr_constraint = {instr, operand_count,
87 412 : op_constraints};
88 412 : constraints()->push_back(instr_constraint);
89 : }
90 42 : }
91 :
92 316 : void RegisterAllocatorVerifier::VerifyInput(
93 : const OperandConstraint& constraint) {
94 316 : CHECK_NE(kSameAsFirst, constraint.type_);
95 316 : if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) {
96 215 : CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
97 : constraint.virtual_register_);
98 : }
99 316 : }
100 :
101 0 : void RegisterAllocatorVerifier::VerifyTemp(
102 : const OperandConstraint& constraint) {
103 0 : CHECK_NE(kSameAsFirst, constraint.type_);
104 0 : CHECK_NE(kImmediate, constraint.type_);
105 0 : CHECK_NE(kExplicit, constraint.type_);
106 0 : CHECK_NE(kConstant, constraint.type_);
107 0 : }
108 :
109 249 : void RegisterAllocatorVerifier::VerifyOutput(
110 : const OperandConstraint& constraint) {
111 249 : CHECK_NE(kImmediate, constraint.type_);
112 249 : CHECK_NE(kExplicit, constraint.type_);
113 249 : CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
114 : constraint.virtual_register_);
115 249 : }
116 :
117 168 : void RegisterAllocatorVerifier::VerifyAssignment(const char* caller_info) {
118 84 : caller_info_ = caller_info;
119 168 : CHECK(sequence()->instructions().size() == constraints()->size());
120 : auto instr_it = sequence()->begin();
121 908 : for (const auto& instr_constraint : *constraints()) {
122 824 : const Instruction* instr = instr_constraint.instruction_;
123 : // All gaps should be totally allocated at this point.
124 824 : VerifyAllocatedGaps(instr, caller_info_);
125 824 : const size_t operand_count = instr_constraint.operand_constaints_size_;
126 : const OperandConstraint* op_constraints =
127 824 : instr_constraint.operand_constraints_;
128 824 : CHECK_EQ(instr, *instr_it);
129 824 : CHECK(operand_count == OperandCount(instr));
130 : size_t count = 0;
131 632 : for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
132 1264 : CheckConstraint(instr->InputAt(i), &op_constraints[count]);
133 : }
134 0 : for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
135 0 : CheckConstraint(instr->TempAt(i), &op_constraints[count]);
136 : }
137 498 : for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
138 498 : CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
139 : }
140 : ++instr_it;
141 : }
142 84 : }
143 :
144 565 : void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
145 161 : OperandConstraint* constraint) {
146 565 : constraint->value_ = kMinInt;
147 565 : constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
148 565 : if (op->IsConstant()) {
149 101 : constraint->type_ = kConstant;
150 101 : constraint->value_ = ConstantOperand::cast(op)->virtual_register();
151 101 : constraint->virtual_register_ = constraint->value_;
152 464 : } else if (op->IsExplicit()) {
153 0 : constraint->type_ = kExplicit;
154 464 : } else if (op->IsImmediate()) {
155 : const ImmediateOperand* imm = ImmediateOperand::cast(op);
156 : int value = imm->type() == ImmediateOperand::INLINE ? imm->inline_value()
157 101 : : imm->indexed_value();
158 101 : constraint->type_ = kImmediate;
159 101 : constraint->value_ = value;
160 : } else {
161 363 : CHECK(op->IsUnallocated());
162 : const UnallocatedOperand* unallocated = UnallocatedOperand::cast(op);
163 : int vreg = unallocated->virtual_register();
164 363 : constraint->virtual_register_ = vreg;
165 363 : if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
166 75 : constraint->type_ = kFixedSlot;
167 75 : constraint->value_ = unallocated->fixed_slot_index();
168 : } else {
169 288 : switch (unallocated->extended_policy()) {
170 : case UnallocatedOperand::REGISTER_OR_SLOT:
171 : case UnallocatedOperand::NONE:
172 70 : if (sequence()->IsFP(vreg)) {
173 0 : constraint->type_ = kRegisterOrSlotFP;
174 : } else {
175 70 : constraint->type_ = kRegisterOrSlot;
176 : }
177 : break;
178 : case UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT:
179 : DCHECK(!sequence()->IsFP(vreg));
180 0 : constraint->type_ = kRegisterOrSlotOrConstant;
181 0 : break;
182 : case UnallocatedOperand::FIXED_REGISTER:
183 120 : if (unallocated->HasSecondaryStorage()) {
184 0 : constraint->type_ = kRegisterAndSlot;
185 0 : constraint->spilled_slot_ = unallocated->GetSecondaryStorage();
186 : } else {
187 120 : constraint->type_ = kFixedRegister;
188 : }
189 120 : constraint->value_ = unallocated->fixed_register_index();
190 120 : break;
191 : case UnallocatedOperand::FIXED_FP_REGISTER:
192 2 : constraint->type_ = kFixedFPRegister;
193 2 : constraint->value_ = unallocated->fixed_register_index();
194 2 : break;
195 : case UnallocatedOperand::MUST_HAVE_REGISTER:
196 68 : if (sequence()->IsFP(vreg)) {
197 12 : constraint->type_ = kFPRegister;
198 : } else {
199 56 : constraint->type_ = kRegister;
200 : }
201 : break;
202 : case UnallocatedOperand::MUST_HAVE_SLOT:
203 23 : constraint->type_ = kSlot;
204 : constraint->value_ =
205 23 : ElementSizeLog2Of(sequence()->GetRepresentation(vreg));
206 23 : break;
207 : case UnallocatedOperand::SAME_AS_FIRST_INPUT:
208 5 : constraint->type_ = kSameAsFirst;
209 5 : break;
210 : }
211 : }
212 : }
213 565 : }
214 :
215 1130 : void RegisterAllocatorVerifier::CheckConstraint(
216 : const InstructionOperand* op, const OperandConstraint* constraint) {
217 1130 : switch (constraint->type_) {
218 : case kConstant:
219 202 : CHECK_WITH_MSG(op->IsConstant(), caller_info_);
220 404 : CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
221 : constraint->value_);
222 : return;
223 : case kImmediate: {
224 202 : CHECK_WITH_MSG(op->IsImmediate(), caller_info_);
225 : const ImmediateOperand* imm = ImmediateOperand::cast(op);
226 : int value = imm->type() == ImmediateOperand::INLINE
227 : ? imm->inline_value()
228 202 : : imm->indexed_value();
229 202 : CHECK_EQ(value, constraint->value_);
230 : return;
231 : }
232 : case kRegister:
233 122 : CHECK_WITH_MSG(op->IsRegister(), caller_info_);
234 : return;
235 : case kFPRegister:
236 24 : CHECK_WITH_MSG(op->IsFPRegister(), caller_info_);
237 : return;
238 : case kExplicit:
239 0 : CHECK_WITH_MSG(op->IsExplicit(), caller_info_);
240 : return;
241 : case kFixedRegister:
242 : case kRegisterAndSlot:
243 240 : CHECK_WITH_MSG(op->IsRegister(), caller_info_);
244 480 : CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
245 : return;
246 : case kFixedFPRegister:
247 4 : CHECK_WITH_MSG(op->IsFPRegister(), caller_info_);
248 8 : CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
249 : return;
250 : case kFixedSlot:
251 150 : CHECK_WITH_MSG(op->IsStackSlot() || op->IsFPStackSlot(), caller_info_);
252 300 : CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
253 : return;
254 : case kSlot:
255 46 : CHECK_WITH_MSG(op->IsStackSlot() || op->IsFPStackSlot(), caller_info_);
256 92 : CHECK_EQ(ElementSizeLog2Of(LocationOperand::cast(op)->representation()),
257 : constraint->value_);
258 : return;
259 : case kRegisterOrSlot:
260 206 : CHECK_WITH_MSG(op->IsRegister() || op->IsStackSlot(), caller_info_);
261 : return;
262 : case kRegisterOrSlotFP:
263 0 : CHECK_WITH_MSG(op->IsFPRegister() || op->IsFPStackSlot(), caller_info_);
264 : return;
265 : case kRegisterOrSlotOrConstant:
266 0 : CHECK_WITH_MSG(op->IsRegister() || op->IsStackSlot() || op->IsConstant(),
267 : caller_info_);
268 : return;
269 : case kSameAsFirst:
270 0 : CHECK_WITH_MSG(false, caller_info_);
271 : return;
272 : }
273 : }
274 :
275 412 : void BlockAssessments::PerformMoves(const Instruction* instruction) {
276 : const ParallelMove* first =
277 : instruction->GetParallelMove(Instruction::GapPosition::START);
278 412 : PerformParallelMoves(first);
279 : const ParallelMove* last =
280 : instruction->GetParallelMove(Instruction::GapPosition::END);
281 412 : PerformParallelMoves(last);
282 412 : }
283 :
284 824 : void BlockAssessments::PerformParallelMoves(const ParallelMove* moves) {
285 1648 : if (moves == nullptr) return;
286 :
287 235 : CHECK(map_for_moves_.empty());
288 793 : for (MoveOperands* move : *moves) {
289 323 : if (move->IsEliminated() || move->IsRedundant()) continue;
290 220 : auto it = map_.find(move->source());
291 : // The RHS of a parallel move should have been already assessed.
292 220 : CHECK(it != map_.end());
293 : // The LHS of a parallel move should not have been assigned in this
294 : // parallel move.
295 440 : CHECK(map_for_moves_.find(move->destination()) == map_for_moves_.end());
296 : // Copy the assessment to the destination.
297 220 : map_for_moves_[move->destination()] = it->second;
298 : }
299 690 : for (auto pair : map_for_moves_) {
300 220 : map_[pair.first] = pair.second;
301 : }
302 : map_for_moves_.clear();
303 : }
304 :
305 10 : void BlockAssessments::DropRegisters() {
306 254 : for (auto iterator = map().begin(), end = map().end(); iterator != end;) {
307 : auto current = iterator;
308 : ++iterator;
309 234 : InstructionOperand op = current->first;
310 234 : if (op.IsAnyRegister()) map().erase(current);
311 : }
312 10 : }
313 :
314 0 : void BlockAssessments::Print() const {
315 0 : StdoutStream os;
316 0 : for (const auto pair : map()) {
317 0 : const InstructionOperand op = pair.first;
318 : const Assessment* assessment = pair.second;
319 : // Use operator<< so we can write the assessment on the same
320 : // line.
321 0 : os << op << " : ";
322 0 : if (assessment->kind() == AssessmentKind::Final) {
323 0 : os << "v" << FinalAssessment::cast(assessment)->virtual_register();
324 : } else {
325 0 : os << "P";
326 : }
327 : os << std::endl;
328 : }
329 0 : os << std::endl;
330 0 : }
331 :
332 142 : BlockAssessments* RegisterAllocatorVerifier::CreateForBlock(
333 376 : const InstructionBlock* block) {
334 : RpoNumber current_block_id = block->rpo_number();
335 :
336 : BlockAssessments* ret = new (zone()) BlockAssessments(zone());
337 142 : if (block->PredecessorCount() == 0) {
338 : // TODO(mtrofin): the following check should hold, however, in certain
339 : // unit tests it is invalidated by the last block. Investigate and
340 : // normalize the CFG.
341 : // CHECK_EQ(0, current_block_id.ToInt());
342 : // The phi size test below is because we can, technically, have phi
343 : // instructions with one argument. Some tests expose that, too.
344 179 : } else if (block->PredecessorCount() == 1 && block->phis().size() == 0) {
345 79 : const BlockAssessments* prev_block = assessments_[block->predecessors()[0]];
346 79 : ret->CopyFrom(prev_block);
347 : } else {
348 98 : for (RpoNumber pred_id : block->predecessors()) {
349 : // For every operand coming from any of the predecessors, create an
350 : // Unfinalized assessment.
351 : auto iterator = assessments_.find(pred_id);
352 39 : if (iterator == assessments_.end()) {
353 : // This block is the head of a loop, and this predecessor is the
354 : // loopback
355 : // arc.
356 : // Validate this is a loop case, otherwise the CFG is malformed.
357 2 : CHECK(pred_id >= current_block_id);
358 2 : CHECK(block->IsLoopHeader());
359 : continue;
360 : }
361 37 : const BlockAssessments* pred_assessments = iterator->second;
362 37 : CHECK_NOT_NULL(pred_assessments);
363 430 : for (auto pair : pred_assessments->map()) {
364 356 : InstructionOperand operand = pair.first;
365 356 : if (ret->map().find(operand) == ret->map().end()) {
366 : ret->map().insert(std::make_pair(
367 702 : operand, new (zone()) PendingAssessment(zone(), block, operand)));
368 : }
369 : }
370 : }
371 : }
372 142 : return ret;
373 : }
374 :
375 85 : void RegisterAllocatorVerifier::ValidatePendingAssessment(
376 : RpoNumber block_id, InstructionOperand op,
377 : const BlockAssessments* current_assessments,
378 83 : PendingAssessment* const assessment, int virtual_register) {
379 89 : if (assessment->IsAliasOf(virtual_register)) return;
380 :
381 : // When validating a pending assessment, it is possible some of the
382 : // assessments for the original operand (the one where the assessment was
383 : // created for first) are also pending. To avoid recursion, we use a work
384 : // list. To deal with cycles, we keep a set of seen nodes.
385 81 : Zone local_zone(zone()->allocator(), ZONE_NAME);
386 81 : ZoneQueue<std::pair<const PendingAssessment*, int>> worklist(&local_zone);
387 : ZoneSet<RpoNumber> seen(&local_zone);
388 81 : worklist.push(std::make_pair(assessment, virtual_register));
389 : seen.insert(block_id);
390 :
391 230 : while (!worklist.empty()) {
392 149 : auto work = worklist.front();
393 149 : const PendingAssessment* current_assessment = work.first;
394 : int current_virtual_register = work.second;
395 149 : InstructionOperand current_operand = current_assessment->operand();
396 : worklist.pop();
397 :
398 : const InstructionBlock* origin = current_assessment->origin();
399 151 : CHECK(origin->PredecessorCount() > 1 || origin->phis().size() > 0);
400 :
401 : // Check if the virtual register is a phi first, instead of relying on
402 : // the incoming assessments. In particular, this handles the case
403 : // v1 = phi v0 v0, which structurally is identical to v0 having been
404 : // defined at the top of a diamond, and arriving at the node joining the
405 : // diamond's branches.
406 : const PhiInstruction* phi = nullptr;
407 1334 : for (const PhiInstruction* candidate : origin->phis()) {
408 1113 : if (candidate->virtual_register() == current_virtual_register) {
409 : phi = candidate;
410 : break;
411 : }
412 : }
413 :
414 : int op_index = 0;
415 594 : for (RpoNumber pred : origin->predecessors()) {
416 : int expected =
417 450 : phi != nullptr ? phi->operands()[op_index] : current_virtual_register;
418 :
419 296 : ++op_index;
420 : auto pred_assignment = assessments_.find(pred);
421 296 : if (pred_assignment == assessments_.end()) {
422 6 : CHECK(origin->IsLoopHeader());
423 : auto todo_iter = outstanding_assessments_.find(pred);
424 : DelayedAssessments* set = nullptr;
425 6 : if (todo_iter == outstanding_assessments_.end()) {
426 : set = new (zone()) DelayedAssessments(zone());
427 4 : outstanding_assessments_.insert(std::make_pair(pred, set));
428 : } else {
429 4 : set = todo_iter->second;
430 : }
431 6 : set->AddDelayedAssessment(current_operand, expected);
432 : continue;
433 : }
434 :
435 290 : const BlockAssessments* pred_assessments = pred_assignment->second;
436 : auto found_contribution = pred_assessments->map().find(current_operand);
437 290 : CHECK(found_contribution != pred_assessments->map().end());
438 290 : Assessment* contribution = found_contribution->second;
439 :
440 290 : switch (contribution->kind()) {
441 : case Final:
442 222 : CHECK_EQ(FinalAssessment::cast(contribution)->virtual_register(),
443 : expected);
444 : break;
445 : case Pending: {
446 : // This happens if we have a diamond feeding into another one, and
447 : // the inner one never being used - other than for carrying the value.
448 68 : const PendingAssessment* next = PendingAssessment::cast(contribution);
449 68 : if (seen.find(pred) == seen.end()) {
450 68 : worklist.push({next, expected});
451 : seen.insert(pred);
452 : }
453 : // Note that we do not want to finalize pending assessments at the
454 : // beginning of a block - which is the information we'd have
455 : // available here. This is because this operand may be reused to
456 : // define duplicate phis.
457 : break;
458 : }
459 : }
460 : }
461 : }
462 81 : assessment->AddAlias(virtual_register);
463 : }
464 :
465 215 : void RegisterAllocatorVerifier::ValidateUse(
466 : RpoNumber block_id, BlockAssessments* current_assessments,
467 : InstructionOperand op, int virtual_register) {
468 : auto iterator = current_assessments->map().find(op);
469 : // We should have seen this operand before.
470 215 : CHECK(iterator != current_assessments->map().end());
471 215 : Assessment* assessment = iterator->second;
472 :
473 215 : switch (assessment->kind()) {
474 : case Final:
475 131 : CHECK_EQ(FinalAssessment::cast(assessment)->virtual_register(),
476 : virtual_register);
477 : break;
478 : case Pending: {
479 84 : PendingAssessment* pending = PendingAssessment::cast(assessment);
480 : ValidatePendingAssessment(block_id, op, current_assessments, pending,
481 84 : virtual_register);
482 84 : break;
483 : }
484 : }
485 215 : }
486 :
487 226 : void RegisterAllocatorVerifier::VerifyGapMoves() {
488 42 : CHECK(assessments_.empty());
489 42 : CHECK(outstanding_assessments_.empty());
490 42 : const size_t block_count = sequence()->instruction_blocks().size();
491 184 : for (size_t block_index = 0; block_index < block_count; ++block_index) {
492 696 : const InstructionBlock* block =
493 284 : sequence()->instruction_blocks()[block_index];
494 142 : BlockAssessments* block_assessments = CreateForBlock(block);
495 :
496 1108 : for (int instr_index = block->code_start(); instr_index < block->code_end();
497 : ++instr_index) {
498 412 : const InstructionConstraint& instr_constraint = constraints_[instr_index];
499 2213 : const Instruction* instr = instr_constraint.instruction_;
500 412 : block_assessments->PerformMoves(instr);
501 :
502 : const OperandConstraint* op_constraints =
503 412 : instr_constraint.operand_constraints_;
504 : size_t count = 0;
505 1456 : for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
506 316 : if (op_constraints[count].type_ == kImmediate ||
507 : op_constraints[count].type_ == kExplicit) {
508 101 : continue;
509 : }
510 215 : int virtual_register = op_constraints[count].virtual_register_;
511 215 : InstructionOperand op = *instr->InputAt(i);
512 : ValidateUse(block->rpo_number(), block_assessments, op,
513 215 : virtual_register);
514 : }
515 412 : for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
516 : block_assessments->Drop(*instr->TempAt(i));
517 : }
518 412 : if (instr->IsCall()) {
519 10 : block_assessments->DropRegisters();
520 : }
521 910 : for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
522 249 : int virtual_register = op_constraints[count].virtual_register_;
523 249 : block_assessments->AddDefinition(*instr->OutputAt(i), virtual_register);
524 249 : if (op_constraints[count].type_ == kRegisterAndSlot) {
525 : const AllocatedOperand* reg_op =
526 : AllocatedOperand::cast(instr->OutputAt(i));
527 : MachineRepresentation rep = reg_op->representation();
528 : const AllocatedOperand* stack_op = AllocatedOperand::New(
529 : zone(), LocationOperand::LocationKind::STACK_SLOT, rep,
530 0 : op_constraints[i].spilled_slot_);
531 0 : block_assessments->AddDefinition(*stack_op, virtual_register);
532 : }
533 : }
534 : }
535 : // Now commit the assessments for this block. If there are any delayed
536 : // assessments, ValidatePendingAssessment should see this block, too.
537 142 : assessments_[block->rpo_number()] = block_assessments;
538 :
539 284 : auto todo_iter = outstanding_assessments_.find(block->rpo_number());
540 142 : if (todo_iter == outstanding_assessments_.end()) continue;
541 2 : DelayedAssessments* todo = todo_iter->second;
542 10 : for (auto pair : todo->map()) {
543 6 : InstructionOperand op = pair.first;
544 : int vreg = pair.second;
545 : auto found_op = block_assessments->map().find(op);
546 6 : CHECK(found_op != block_assessments->map().end());
547 12 : switch (found_op->second->kind()) {
548 : case Final:
549 5 : CHECK_EQ(FinalAssessment::cast(found_op->second)->virtual_register(),
550 : vreg);
551 : break;
552 : case Pending:
553 : ValidatePendingAssessment(block->rpo_number(), op, block_assessments,
554 : PendingAssessment::cast(found_op->second),
555 1 : vreg);
556 1 : break;
557 : }
558 : }
559 : }
560 42 : }
561 :
562 : } // namespace compiler
563 : } // namespace internal
564 178779 : } // namespace v8
|