Line data Source code
1 : // Copyright 2013 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/code-generator.h"
6 :
7 : #include "src/address-map.h"
8 : #include "src/assembler-inl.h"
9 : #include "src/base/adapters.h"
10 : #include "src/compilation-info.h"
11 : #include "src/compiler/code-generator-impl.h"
12 : #include "src/compiler/linkage.h"
13 : #include "src/compiler/pipeline.h"
14 : #include "src/frames-inl.h"
15 : #include "src/macro-assembler-inl.h"
16 :
17 : namespace v8 {
18 : namespace internal {
19 : namespace compiler {
20 :
21 : class CodeGenerator::JumpTable final : public ZoneObject {
22 : public:
23 : JumpTable(JumpTable* next, Label** targets, size_t target_count)
24 6072 : : next_(next), targets_(targets), target_count_(target_count) {}
25 :
26 : Label* label() { return &label_; }
27 : JumpTable* next() const { return next_; }
28 : Label** targets() const { return targets_; }
29 : size_t target_count() const { return target_count_; }
30 :
31 : private:
32 : Label label_;
33 : JumpTable* const next_;
34 : Label** const targets_;
35 : size_t const target_count_;
36 : };
37 :
38 912390 : CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
39 19608887 : InstructionSequence* code, CompilationInfo* info)
40 : : frame_access_state_(nullptr),
41 : linkage_(linkage),
42 : code_(code),
43 : unwinding_info_writer_(zone()),
44 : info_(info),
45 912390 : labels_(zone()->NewArray<Label>(code->InstructionBlockCount())),
46 : current_block_(RpoNumber::Invalid()),
47 : current_source_position_(SourcePosition::Unknown()),
48 : masm_(info->isolate(), nullptr, 0, CodeObjectRequired::kNo),
49 : resolver_(this),
50 : safepoints_(code->zone()),
51 : handlers_(code->zone()),
52 : deoptimization_exits_(code->zone()),
53 : deoptimization_states_(code->zone()),
54 : deoptimization_literals_(code->zone()),
55 : inlined_function_count_(0),
56 : translations_(code->zone()),
57 : last_lazy_deopt_pc_(0),
58 : jump_tables_(nullptr),
59 : ools_(nullptr),
60 : osr_pc_offset_(-1),
61 : optimized_out_literal_id_(-1),
62 : source_position_table_builder_(code->zone(),
63 8211524 : info->SourcePositionRecordingMode()) {
64 24619506 : for (int i = 0; i < code->InstructionBlockCount(); ++i) {
65 11397361 : new (&labels_[i]) Label;
66 : }
67 912392 : CreateFrameAccessState(frame);
68 912392 : }
69 :
70 11478247 : Isolate* CodeGenerator::isolate() const { return info_->isolate(); }
71 :
72 1824784 : void CodeGenerator::CreateFrameAccessState(Frame* frame) {
73 912392 : FinishFrame(frame);
74 1824784 : frame_access_state_ = new (code()->zone()) FrameAccessState(frame);
75 912392 : }
76 :
77 :
78 30055628 : Handle<Code> CodeGenerator::GenerateCode() {
79 : CompilationInfo* info = this->info();
80 :
81 : // Open a frame scope to indicate that there is a frame on the stack. The
82 : // MANUAL indicates that the scope shouldn't actually generate code to set up
83 : // the frame (that is done in AssemblePrologue).
84 912392 : FrameScope frame_scope(masm(), StackFrame::MANUAL);
85 :
86 912392 : if (info->is_source_positions_enabled()) {
87 87084 : SourcePosition source_position(info->shared_info()->start_position());
88 43542 : AssembleSourcePosition(source_position);
89 : }
90 :
91 : // Place function entry hook if requested to do so.
92 912392 : if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
93 429002 : ProfileEntryHookStub::MaybeCallEntryHook(masm());
94 : }
95 : // Architecture-specific, linkage-specific prologue.
96 912392 : info->set_prologue_offset(masm()->pc_offset());
97 :
98 : // Define deoptimization literals for all inlined functions.
99 : DCHECK_EQ(0u, deoptimization_literals_.size());
100 1862329 : for (CompilationInfo::InlinedFunctionHolder& inlined :
101 : info->inlined_functions()) {
102 75090 : if (!inlined.shared_info.is_identical_to(info->shared_info())) {
103 37545 : int index = DefineDeoptimizationLiteral(inlined.shared_info);
104 : inlined.RegisterInlinedFunctionId(index);
105 : }
106 : }
107 912392 : inlined_function_count_ = deoptimization_literals_.size();
108 :
109 : // Define deoptimization literals for all unoptimized code objects of inlined
110 : // functions. This ensures unoptimized code is kept alive by optimized code.
111 1862329 : for (const CompilationInfo::InlinedFunctionHolder& inlined :
112 : info->inlined_functions()) {
113 75090 : if (!inlined.shared_info.is_identical_to(info->shared_info())) {
114 37545 : DefineDeoptimizationLiteral(inlined.inlined_code_object_root);
115 : }
116 : }
117 :
118 : unwinding_info_writer_.SetNumberOfInstructionBlocks(
119 912392 : code()->InstructionBlockCount());
120 :
121 : // Assemble all non-deferred blocks, followed by deferred ones.
122 1824783 : for (int deferred = 0; deferred < 2; ++deferred) {
123 60636282 : for (const InstructionBlock* block : code()->instruction_blocks()) {
124 22794689 : if (block->IsDeferred() == (deferred == 0)) {
125 : continue;
126 : }
127 : // Align loop headers on 16-byte boundaries.
128 11397342 : if (block->IsLoopHeader()) masm()->Align(16);
129 : // Ensure lazy deopt doesn't patch handler entry points.
130 11397343 : if (block->IsHandler()) EnsureSpaceForLazyDeopt();
131 : // Bind a label for a block.
132 11397343 : current_block_ = block->rpo_number();
133 22794686 : unwinding_info_writer_.BeginInstructionBlock(masm()->pc_offset(), block);
134 11397343 : if (FLAG_code_comments) {
135 : // TODO(titzer): these code comments are a giant memory leak.
136 0 : Vector<char> buffer = Vector<char>::New(200);
137 : char* buffer_start = buffer.start();
138 :
139 : int next = SNPrintF(
140 : buffer, "-- B%d start%s%s%s%s", block->rpo_number().ToInt(),
141 : block->IsDeferred() ? " (deferred)" : "",
142 : block->needs_frame() ? "" : " (no frame)",
143 : block->must_construct_frame() ? " (construct frame)" : "",
144 0 : block->must_deconstruct_frame() ? " (deconstruct frame)" : "");
145 :
146 : buffer = buffer.SubVector(next, buffer.length());
147 :
148 0 : if (block->IsLoopHeader()) {
149 : next =
150 0 : SNPrintF(buffer, " (loop up to %d)", block->loop_end().ToInt());
151 : buffer = buffer.SubVector(next, buffer.length());
152 : }
153 0 : if (block->loop_header().IsValid()) {
154 : next =
155 0 : SNPrintF(buffer, " (in loop %d)", block->loop_header().ToInt());
156 : buffer = buffer.SubVector(next, buffer.length());
157 : }
158 0 : SNPrintF(buffer, " --");
159 0 : masm()->RecordComment(buffer_start);
160 : }
161 :
162 22794686 : frame_access_state()->MarkHasFrame(block->needs_frame());
163 :
164 11397343 : masm()->bind(GetLabel(current_block_));
165 11397341 : if (block->must_construct_frame()) {
166 874199 : AssembleConstructFrame();
167 : // We need to setup the root register after we assemble the prologue, to
168 : // avoid clobbering callee saved registers in case of C linkage and
169 : // using the roots.
170 : // TODO(mtrofin): investigate how we can avoid doing this repeatedly.
171 874199 : if (linkage()->GetIncomingDescriptor()->InitializeRootRegister()) {
172 224034 : masm()->InitializeRootRegister();
173 : }
174 : }
175 :
176 : CodeGenResult result;
177 : if (FLAG_enable_embedded_constant_pool && !block->needs_frame()) {
178 : ConstantPoolUnavailableScope constant_pool_unavailable(masm());
179 : result = AssembleBlock(block);
180 : } else {
181 11397341 : result = AssembleBlock(block);
182 : }
183 11397345 : if (result != kSuccess) return Handle<Code>();
184 11397345 : unwinding_info_writer_.EndInstructionBlock(block);
185 : }
186 : }
187 :
188 : // Assemble all out-of-line code.
189 912391 : if (ools_) {
190 131070 : masm()->RecordComment("-- Out of line code --");
191 692607 : for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
192 561537 : masm()->bind(ool->entry());
193 561537 : ool->Generate();
194 561537 : if (ool->exit()->is_bound()) masm()->jmp(ool->exit());
195 : }
196 : }
197 :
198 : // Assemble all eager deoptimization exits.
199 1699592 : for (DeoptimizationExit* exit : deoptimization_exits_) {
200 262400 : masm()->bind(exit->label());
201 262400 : AssembleDeoptimizerCall(exit->deoptimization_id(), exit->pos());
202 : }
203 :
204 : // Ensure there is space for lazy deoptimization in the code.
205 912392 : if (info->ShouldEnsureSpaceForLazyDeopt()) {
206 783628 : int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
207 11362590 : while (masm()->pc_offset() < target_offset) {
208 5093574 : masm()->nop();
209 : }
210 : }
211 :
212 912392 : FinishCode();
213 :
214 : // Emit the jump tables.
215 912392 : if (jump_tables_) {
216 3734 : masm()->Align(kPointerSize);
217 15878 : for (JumpTable* table = jump_tables_; table; table = table->next()) {
218 6072 : masm()->bind(table->label());
219 6072 : AssembleJumpTable(table->targets(), table->target_count());
220 : }
221 : }
222 :
223 : // The PerfJitLogger logs code up until here, excluding the safepoint
224 : // table. Resolve the unwinding info now so it is aware of the same code size
225 : // as reported by perf.
226 912392 : unwinding_info_writer_.Finish(masm()->pc_offset());
227 :
228 912392 : safepoints()->Emit(masm(), frame()->GetTotalFrameSlotCount());
229 :
230 : Handle<Code> result = v8::internal::CodeGenerator::MakeCodeEpilogue(
231 912391 : masm(), unwinding_info_writer_.eh_frame_writer(), info, Handle<Object>());
232 : result->set_is_turbofanned(true);
233 912392 : result->set_stack_slots(frame()->GetTotalFrameSlotCount());
234 1824784 : result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
235 : Handle<ByteArray> source_positions =
236 : source_position_table_builder_.ToSourcePositionTable(
237 912392 : isolate(), Handle<AbstractCode>::cast(result));
238 912392 : result->set_source_position_table(*source_positions);
239 :
240 : // Emit exception handler table.
241 1388691 : if (!handlers_.empty()) {
242 : Handle<HandlerTable> table =
243 : Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
244 : HandlerTable::LengthForReturn(static_cast<int>(handlers_.size())),
245 33698 : TENURED));
246 493148 : for (size_t i = 0; i < handlers_.size(); ++i) {
247 459450 : table->SetReturnOffset(static_cast<int>(i), handlers_[i].pc_offset);
248 459450 : table->SetReturnHandler(static_cast<int>(i), handlers_[i].handler->pos());
249 : }
250 16849 : result->set_handler_table(*table);
251 : }
252 :
253 912392 : PopulateDeoptimizationData(result);
254 :
255 : // Ensure there is space for lazy deoptimization in the relocation info.
256 912392 : if (info->ShouldEnsureSpaceForLazyDeopt()) {
257 391814 : Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result);
258 : }
259 :
260 912392 : return result;
261 : }
262 :
263 :
264 9862274 : bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
265 : return code()
266 : ->InstructionBlockAt(current_block_)
267 9862276 : ->ao_number()
268 19724550 : .IsNext(code()->InstructionBlockAt(block)->ao_number());
269 : }
270 :
271 :
272 3391040 : void CodeGenerator::RecordSafepoint(ReferenceMap* references,
273 : Safepoint::Kind kind, int arguments,
274 3391033 : Safepoint::DeoptMode deopt_mode) {
275 : Safepoint safepoint =
276 3391040 : safepoints()->DefineSafepoint(masm(), kind, arguments, deopt_mode);
277 : int stackSlotToSpillSlotDelta =
278 3391033 : frame()->GetTotalFrameSlotCount() - frame()->GetSpillSlotCount();
279 49161903 : for (const InstructionOperand& operand : references->reference_operands()) {
280 42379830 : if (operand.IsStackSlot()) {
281 : int index = LocationOperand::cast(operand).index();
282 : DCHECK(index >= 0);
283 : // We might index values in the fixed part of the frame (i.e. the
284 : // closure pointer or the context pointer); these are not spill slots
285 : // and therefore don't work with the SafepointTable currently, but
286 : // we also don't need to worry about them, since the GC has special
287 : // knowledge about those fields anyway.
288 5363503 : if (index < stackSlotToSpillSlotDelta) continue;
289 5359808 : safepoint.DefinePointerSlot(index, zone());
290 37016327 : } else if (operand.IsRegister() && (kind & Safepoint::kWithRegisters)) {
291 0 : Register reg = LocationOperand::cast(operand).GetRegister();
292 0 : safepoint.DefinePointerRegister(reg, zone());
293 : }
294 : }
295 3391040 : }
296 :
297 4592154 : bool CodeGenerator::IsMaterializableFromRoot(
298 4592154 : Handle<HeapObject> object, Heap::RootListIndex* index_return) {
299 : const CallDescriptor* incoming_descriptor =
300 : linkage()->GetIncomingDescriptor();
301 4592154 : if (incoming_descriptor->flags() & CallDescriptor::kCanUseRoots) {
302 4362533 : RootIndexMap map(isolate());
303 4362531 : int root_index = map.Lookup(*object);
304 4362535 : if (root_index != RootIndexMap::kInvalidRootIndex) {
305 1551156 : *index_return = static_cast<Heap::RootListIndex>(root_index);
306 1551156 : return true;
307 : }
308 : }
309 : return false;
310 : }
311 :
312 11397341 : CodeGenerator::CodeGenResult CodeGenerator::AssembleBlock(
313 100639784 : const InstructionBlock* block) {
314 100639776 : for (int i = block->code_start(); i < block->code_end(); ++i) {
315 : Instruction* instr = code()->InstructionAt(i);
316 38922564 : CodeGenResult result = AssembleInstruction(instr, block);
317 38922542 : if (result != kSuccess) return result;
318 : }
319 : return kSuccess;
320 : }
321 :
322 44865 : bool CodeGenerator::IsValidPush(InstructionOperand source,
323 : CodeGenerator::PushTypeFlags push_type) {
324 44865 : if (source.IsImmediate() &&
325 : ((push_type & CodeGenerator::kImmediatePush) != 0)) {
326 : return true;
327 : }
328 99341 : if ((source.IsRegister() || source.IsStackSlot()) &&
329 : ((push_type & CodeGenerator::kScalarPush) != 0)) {
330 : return true;
331 : }
332 216 : if ((source.IsFloatRegister() || source.IsFloatStackSlot()) &&
333 : ((push_type & CodeGenerator::kFloat32Push) != 0)) {
334 : return true;
335 : }
336 216 : if ((source.IsDoubleRegister() || source.IsFloatStackSlot()) &&
337 : ((push_type & CodeGenerator::kFloat64Push) != 0)) {
338 : return true;
339 : }
340 108 : return false;
341 : }
342 :
343 149226 : void CodeGenerator::GetPushCompatibleMoves(Instruction* instr,
344 : PushTypeFlags push_type,
345 : ZoneVector<MoveOperands*>* pushes) {
346 231505 : pushes->clear();
347 433208 : for (int i = Instruction::FIRST_GAP_POSITION;
348 : i <= Instruction::LAST_GAP_POSITION; ++i) {
349 : Instruction::GapPosition inner_pos =
350 : static_cast<Instruction::GapPosition>(i);
351 : ParallelMove* parallel_move = instr->GetParallelMove(inner_pos);
352 291217 : if (parallel_move != nullptr) {
353 590864 : for (auto move : *parallel_move) {
354 278239 : InstructionOperand source = move->source();
355 278239 : InstructionOperand destination = move->destination();
356 : int first_push_compatible_index =
357 : V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
358 : // If there are any moves from slots that will be overridden by pushes,
359 : // then the full gap resolver must be used since optimization with
360 : // pushes don't participate in the parallel move and might clobber
361 : // values needed for the gap resolve.
362 295387 : if (source.IsStackSlot() &&
363 : LocationOperand::cast(source).index() >=
364 : first_push_compatible_index) {
365 : pushes->clear();
366 149226 : return;
367 : }
368 : // TODO(danno): Right now, only consider moves from the FIRST gap for
369 : // pushes. Theoretically, we could extract pushes for both gaps (there
370 : // are cases where this happens), but the logic for that would also have
371 : // to check to make sure that non-memory inputs to the pushes from the
372 : // LAST gap don't get clobbered in the FIRST gap.
373 271004 : if (i == Instruction::FIRST_GAP_POSITION) {
374 328437 : if (destination.IsStackSlot() &&
375 : LocationOperand::cast(destination).index() >=
376 : first_push_compatible_index) {
377 : int index = LocationOperand::cast(destination).index();
378 44865 : if (IsValidPush(source, push_type)) {
379 44757 : if (index >= static_cast<int>(pushes->size())) {
380 44499 : pushes->resize(index + 1);
381 : }
382 89514 : (*pushes)[index] = move;
383 : }
384 : }
385 : }
386 : }
387 : }
388 : }
389 :
390 : // For now, only support a set of continuous pushes at the end of the list.
391 : size_t push_count_upper_bound = pushes->size();
392 : size_t push_begin = push_count_upper_bound;
393 235264 : for (auto move : base::Reversed(*pushes)) {
394 51442 : if (move == nullptr) break;
395 41831 : push_begin--;
396 : }
397 141991 : size_t push_count = pushes->size() - push_begin;
398 : std::copy(pushes->begin() + push_begin,
399 : pushes->begin() + push_begin + push_count, pushes->begin());
400 141991 : pushes->resize(push_count);
401 : }
402 :
403 38922567 : CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction(
404 90979442 : Instruction* instr, const InstructionBlock* block) {
405 : int first_unused_stack_slot;
406 38922567 : FlagsMode mode = FlagsModeField::decode(instr->opcode());
407 38922567 : if (mode != kFlags_trap) {
408 38906262 : AssembleSourcePosition(instr);
409 : }
410 : bool adjust_stack =
411 38922546 : GetSlotAboveSPBeforeTailCall(instr, &first_unused_stack_slot);
412 38922545 : if (adjust_stack) AssembleTailCallBeforeGap(instr, first_unused_stack_slot);
413 38922545 : AssembleGaps(instr);
414 38922567 : if (adjust_stack) AssembleTailCallAfterGap(instr, first_unused_stack_slot);
415 : DCHECK_IMPLIES(
416 : block->must_deconstruct_frame(),
417 : instr != code()->InstructionAt(block->last_instruction_index()) ||
418 : instr->IsRet() || instr->IsJump());
419 42190122 : if (instr->IsJump() && block->must_deconstruct_frame()) {
420 117798 : AssembleDeconstructFrame();
421 : }
422 : // Assemble architecture-specific code for the instruction.
423 38922569 : CodeGenResult result = AssembleArchInstruction(instr);
424 38922546 : if (result != kSuccess) return result;
425 :
426 38922549 : FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
427 38922549 : switch (mode) {
428 : case kFlags_branch: {
429 : // Assemble a branch after this instruction.
430 : InstructionOperandConverter i(this, instr);
431 3289113 : RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
432 3289113 : RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
433 :
434 3289112 : if (true_rpo == false_rpo) {
435 : // redundant branch.
436 566 : if (!IsNextInAssemblyOrder(true_rpo)) {
437 314 : AssembleArchJump(true_rpo);
438 : }
439 566 : return kSuccess;
440 : }
441 3288546 : if (IsNextInAssemblyOrder(true_rpo)) {
442 : // true block is next, can fall through if condition negated.
443 : std::swap(true_rpo, false_rpo);
444 : condition = NegateFlagsCondition(condition);
445 : }
446 : BranchInfo branch;
447 3288547 : branch.condition = condition;
448 3288547 : branch.true_label = GetLabel(true_rpo);
449 3288547 : branch.false_label = GetLabel(false_rpo);
450 3288547 : branch.fallthru = IsNextInAssemblyOrder(false_rpo);
451 : // Assemble architecture-specific branch.
452 3288546 : AssembleArchBranch(instr, &branch);
453 3288545 : break;
454 : }
455 : case kFlags_deoptimize: {
456 : // Assemble a conditional eager deoptimization after this instruction.
457 : InstructionOperandConverter i(this, instr);
458 262400 : size_t frame_state_offset = MiscField::decode(instr->opcode());
459 : DeoptimizationExit* const exit =
460 262400 : AddDeoptimizationExit(instr, frame_state_offset);
461 : Label continue_label;
462 : BranchInfo branch;
463 262400 : branch.condition = condition;
464 262400 : branch.true_label = exit->label();
465 262400 : branch.false_label = &continue_label;
466 262400 : branch.fallthru = true;
467 : // Assemble architecture-specific branch.
468 262400 : AssembleArchBranch(instr, &branch);
469 262400 : masm()->bind(&continue_label);
470 : break;
471 : }
472 : case kFlags_set: {
473 : // Assemble a boolean materialization after this instruction.
474 216577 : AssembleArchBoolean(instr, condition);
475 216577 : break;
476 : }
477 : case kFlags_trap: {
478 16305 : AssembleArchTrap(instr, condition);
479 16305 : break;
480 : }
481 : case kFlags_none: {
482 : break;
483 : }
484 : }
485 : return kSuccess;
486 : }
487 :
488 62866842 : void CodeGenerator::AssembleSourcePosition(Instruction* instr) {
489 38916765 : SourcePosition source_position = SourcePosition::Unknown();
490 74376543 : if (instr->IsNop() && instr->AreMovesRedundant()) return;
491 23950077 : if (!code()->GetSourcePosition(instr, &source_position)) return;
492 3456988 : AssembleSourcePosition(source_position);
493 : }
494 :
495 3500529 : void CodeGenerator::AssembleSourcePosition(SourcePosition source_position) {
496 3500529 : if (source_position == current_source_position_) return;
497 2034357 : current_source_position_ = source_position;
498 2034357 : if (!source_position.IsKnown()) return;
499 2034357 : source_position_table_builder_.AddPosition(masm()->pc_offset(),
500 2034357 : source_position, false);
501 2034358 : if (FLAG_code_comments) {
502 0 : CompilationInfo* info = this->info();
503 0 : if (!info->parse_info()) return;
504 0 : std::ostringstream buffer;
505 0 : buffer << "-- ";
506 0 : if (FLAG_trace_turbo) {
507 0 : buffer << source_position;
508 : } else {
509 0 : buffer << source_position.InliningStack(info);
510 : }
511 0 : buffer << " --";
512 0 : masm()->RecordComment(StrDup(buffer.str().c_str()));
513 : }
514 : }
515 :
516 39071770 : bool CodeGenerator::GetSlotAboveSPBeforeTailCall(Instruction* instr,
517 : int* slot) {
518 38922544 : if (instr->IsTailCall()) {
519 : InstructionOperandConverter g(this, instr);
520 149226 : *slot = g.InputInt32(instr->InputCount() - 1);
521 : return true;
522 : } else {
523 : return false;
524 : }
525 : }
526 :
527 38922452 : void CodeGenerator::AssembleGaps(Instruction* instr) {
528 116767545 : for (int i = Instruction::FIRST_GAP_POSITION;
529 : i <= Instruction::LAST_GAP_POSITION; i++) {
530 : Instruction::GapPosition inner_pos =
531 : static_cast<Instruction::GapPosition>(i);
532 : ParallelMove* move = instr->GetParallelMove(inner_pos);
533 77844981 : if (move != nullptr) resolver()->Resolve(move);
534 : }
535 38922564 : }
536 :
537 : namespace {
538 :
539 391806 : Handle<PodArray<InliningPosition>> CreateInliningPositions(
540 391806 : CompilationInfo* info) {
541 438752 : const CompilationInfo::InlinedFunctionList& inlined_functions =
542 : info->inlined_functions();
543 391806 : if (inlined_functions.size() == 0) {
544 : return Handle<PodArray<InliningPosition>>::cast(
545 : info->isolate()->factory()->empty_byte_array());
546 : }
547 : Handle<PodArray<InliningPosition>> inl_positions =
548 : PodArray<InliningPosition>::New(
549 9401 : info->isolate(), static_cast<int>(inlined_functions.size()), TENURED);
550 93892 : for (size_t i = 0; i < inlined_functions.size(); ++i) {
551 37545 : inl_positions->set(static_cast<int>(i), inlined_functions[i].position);
552 : }
553 9401 : return inl_positions;
554 : }
555 :
556 : } // namespace
557 :
558 912392 : void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
559 391807 : CompilationInfo* info = this->info();
560 912392 : int deopt_count = static_cast<int>(deoptimization_states_.size());
561 1953562 : if (deopt_count == 0 && !info->is_osr()) return;
562 : Handle<DeoptimizationInputData> data =
563 391807 : DeoptimizationInputData::New(isolate(), deopt_count, TENURED);
564 :
565 : Handle<ByteArray> translation_array =
566 391807 : translations_.CreateByteArray(isolate()->factory());
567 :
568 : data->SetTranslationByteArray(*translation_array);
569 : data->SetInlinedFunctionCount(
570 391807 : Smi::FromInt(static_cast<int>(inlined_function_count_)));
571 : data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
572 :
573 391807 : if (info->has_shared_info()) {
574 783614 : data->SetSharedFunctionInfo(*info->shared_info());
575 : } else {
576 : data->SetSharedFunctionInfo(Smi::kZero);
577 : }
578 :
579 : Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
580 783614 : static_cast<int>(deoptimization_literals_.size()), TENURED);
581 : {
582 : AllowDeferredHandleDereference copy_handles;
583 3938088 : for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
584 3154474 : literals->set(i, *deoptimization_literals_[i]);
585 : }
586 : data->SetLiteralArray(*literals);
587 : }
588 :
589 391806 : Handle<PodArray<InliningPosition>> inl_pos = CreateInliningPositions(info);
590 : data->SetInliningPositions(*inl_pos);
591 :
592 391806 : if (info->is_osr()) {
593 : DCHECK(osr_pc_offset_ >= 0);
594 5812 : data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
595 5812 : data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
596 : } else {
597 : BailoutId osr_ast_id = BailoutId::None();
598 : data->SetOsrAstId(Smi::FromInt(osr_ast_id.ToInt()));
599 : data->SetOsrPcOffset(Smi::FromInt(-1));
600 : }
601 :
602 : // Populate deoptimization entries.
603 4411713 : for (int i = 0; i < deopt_count; i++) {
604 8823426 : DeoptimizationState* deoptimization_state = deoptimization_states_[i];
605 : data->SetAstId(i, deoptimization_state->bailout_id());
606 4411713 : CHECK(deoptimization_states_[i]);
607 : data->SetTranslationIndex(
608 8823426 : i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
609 4411713 : data->SetArgumentsStackHeight(i, Smi::kZero);
610 4411713 : data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
611 : }
612 :
613 391807 : code_object->set_deoptimization_data(*data);
614 : }
615 :
616 :
617 6072 : Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
618 12144 : jump_tables_ = new (zone()) JumpTable(jump_tables_, targets, target_count);
619 6072 : return jump_tables_->label();
620 : }
621 :
622 :
623 7217284 : void CodeGenerator::RecordCallPosition(Instruction* instr) {
624 3378917 : CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
625 :
626 3378917 : bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
627 :
628 : RecordSafepoint(
629 : instr->reference_map(), Safepoint::kSimple, 0,
630 6757834 : needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
631 :
632 3378917 : if (flags & CallDescriptor::kHasExceptionHandler) {
633 : InstructionOperandConverter i(this, instr);
634 229725 : RpoNumber handler_rpo = i.InputRpo(instr->InputCount() - 1);
635 689175 : handlers_.push_back({GetLabel(handler_rpo), masm()->pc_offset()});
636 : }
637 :
638 3378917 : if (needs_frame_state) {
639 : MarkLazyDeoptSite();
640 : // If the frame state is present, it starts at argument 1 (just after the
641 : // code address).
642 : size_t frame_state_offset = 1;
643 : FrameStateDescriptor* descriptor =
644 2398149 : GetDeoptimizationEntry(instr, frame_state_offset).descriptor();
645 2398149 : int pc_offset = masm()->pc_offset();
646 : int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
647 2398149 : descriptor->state_combine());
648 : // If the pre-call frame state differs from the post-call one, produce the
649 : // pre-call frame state, too.
650 : // TODO(jarin) We might want to avoid building the pre-call frame state
651 : // because it is only used to get locals and arguments (by the debugger and
652 : // f.arguments), and those are the same in the pre-call and post-call
653 : // states.
654 2398148 : if (!descriptor->state_combine().IsOutputIgnored()) {
655 : deopt_state_id = BuildTranslation(instr, -1, frame_state_offset,
656 1735600 : OutputFrameStateCombine::Ignore());
657 : }
658 2398148 : safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
659 : }
660 3378916 : }
661 :
662 :
663 9547021 : int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
664 9547021 : int result = static_cast<int>(deoptimization_literals_.size());
665 1133712122 : for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
666 565278880 : if (deoptimization_literals_[i].is_identical_to(literal)) return i;
667 : }
668 1577237 : deoptimization_literals_.push_back(literal);
669 1577236 : return result;
670 : }
671 :
672 6809849 : DeoptimizationEntry const& CodeGenerator::GetDeoptimizationEntry(
673 6809851 : Instruction* instr, size_t frame_state_offset) {
674 : InstructionOperandConverter i(this, instr);
675 6809849 : int const state_id = i.InputInt32(frame_state_offset);
676 6809851 : return code()->GetDeoptimizationEntry(state_id);
677 : }
678 :
679 277964 : DeoptimizeKind CodeGenerator::GetDeoptimizationKind(
680 : int deoptimization_id) const {
681 : size_t const index = static_cast<size_t>(deoptimization_id);
682 : DCHECK_LT(index, deoptimization_states_.size());
683 277964 : return deoptimization_states_[index]->kind();
684 : }
685 :
686 277964 : DeoptimizeReason CodeGenerator::GetDeoptimizationReason(
687 : int deoptimization_id) const {
688 : size_t const index = static_cast<size_t>(deoptimization_id);
689 : DCHECK_LT(index, deoptimization_states_.size());
690 277964 : return deoptimization_states_[index]->reason();
691 : }
692 :
693 76471786 : void CodeGenerator::TranslateStateValueDescriptor(
694 76478582 : StateValueDescriptor* desc, StateValueList* nested,
695 : Translation* translation, InstructionOperandIterator* iter) {
696 : // Note:
697 : // If translation is null, we just skip the relevant instruction operands.
698 76471786 : if (desc->IsNested()) {
699 14934 : if (translation != nullptr) {
700 14916 : translation->BeginCapturedObject(static_cast<int>(nested->size()));
701 : }
702 100621 : for (auto field : *nested) {
703 : TranslateStateValueDescriptor(field.desc, field.nested, translation,
704 70753 : iter);
705 : }
706 76456852 : } else if (desc->IsArgumentsElements()) {
707 2606 : if (translation != nullptr) {
708 2604 : translation->ArgumentsElements(desc->is_rest());
709 : }
710 76454246 : } else if (desc->IsArgumentsLength()) {
711 2670 : if (translation != nullptr) {
712 2668 : translation->ArgumentsLength(desc->is_rest());
713 : }
714 76451576 : } else if (desc->IsDuplicate()) {
715 1716 : if (translation != nullptr) {
716 1524 : translation->DuplicateObject(static_cast<int>(desc->id()));
717 : }
718 76449860 : } else if (desc->IsPlain()) {
719 31696858 : InstructionOperand* op = iter->Advance();
720 31696858 : if (translation != nullptr) {
721 : AddTranslationForOperand(translation, iter->instruction(), op,
722 30225518 : desc->type());
723 : }
724 : } else {
725 : DCHECK(desc->IsOptimizedOut());
726 44753002 : if (translation != nullptr) {
727 44485863 : if (optimized_out_literal_id_ == -1) {
728 : optimized_out_literal_id_ =
729 386618 : DefineDeoptimizationLiteral(isolate()->factory()->optimized_out());
730 : }
731 44485863 : translation->StoreLiteral(optimized_out_literal_id_);
732 : }
733 : }
734 76471730 : }
735 :
736 :
737 4777808 : void CodeGenerator::TranslateFrameStateDescriptorOperands(
738 3624257 : FrameStateDescriptor* desc, InstructionOperandIterator* iter,
739 : OutputFrameStateCombine combine, Translation* translation) {
740 : size_t index = 0;
741 : StateValueList* values = desc->GetStateValueDescriptors();
742 162356954 : for (StateValueList::iterator it = values->begin(); it != values->end();
743 : ++it, ++index) {
744 : StateValueDescriptor* value_desc = (*it).desc;
745 76400721 : if (combine.kind() == OutputFrameStateCombine::kPokeAt) {
746 : // The result of the call should be placed at position
747 : // [index_from_top] in the stack (overwriting whatever was
748 : // previously there).
749 : size_t index_from_top =
750 30106988 : desc->GetSize(combine) - 1 - combine.GetOffsetToPokeAt();
751 31992364 : if (index >= index_from_top &&
752 3770750 : index < index_from_top + iter->instruction()->OutputCount()) {
753 : DCHECK_NOT_NULL(translation);
754 : AddTranslationForOperand(
755 : translation, iter->instruction(),
756 : iter->instruction()->OutputAt(index - index_from_top),
757 3477262 : MachineType::AnyTagged());
758 : // Skip the instruction operands.
759 1738630 : TranslateStateValueDescriptor(value_desc, (*it).nested, nullptr, iter);
760 1738631 : continue;
761 : }
762 : }
763 74662091 : TranslateStateValueDescriptor(value_desc, (*it).nested, translation, iter);
764 : }
765 : DCHECK_EQ(desc->GetSize(OutputFrameStateCombine::Ignore()), index);
766 :
767 4777756 : if (combine.kind() == OutputFrameStateCombine::kPushOutput) {
768 : DCHECK(combine.GetPushCount() <= iter->instruction()->OutputCount());
769 251 : for (size_t output = 0; output < combine.GetPushCount(); output++) {
770 : // Materialize the result of the call instruction in this slot.
771 : AddTranslationForOperand(translation, iter->instruction(),
772 : iter->instruction()->OutputAt(output),
773 251 : MachineType::AnyTagged());
774 : }
775 : }
776 4777756 : }
777 :
778 :
779 4777754 : void CodeGenerator::BuildTranslationForFrameStateDescriptor(
780 14327230 : FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
781 14308 : Translation* translation, OutputFrameStateCombine state_combine) {
782 : // Outer-most state must be added to translation first.
783 4777754 : if (descriptor->outer_state() != nullptr) {
784 : BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), iter,
785 : translation,
786 366046 : OutputFrameStateCombine::Ignore());
787 : }
788 :
789 : Handle<SharedFunctionInfo> shared_info;
790 4777755 : if (!descriptor->shared_info().ToHandle(&shared_info)) {
791 7154 : if (!info()->has_shared_info()) {
792 4777755 : return; // Stub with no SharedFunctionInfo.
793 : }
794 7154 : shared_info = info()->shared_info();
795 : }
796 4777755 : int shared_info_id = DefineDeoptimizationLiteral(shared_info);
797 :
798 4777754 : switch (descriptor->type()) {
799 : case FrameStateType::kJavaScriptFunction:
800 : translation->BeginJSFrame(
801 : descriptor->bailout_id(), shared_info_id,
802 8195 : static_cast<unsigned int>(descriptor->GetSize(state_combine) -
803 16390 : (1 + descriptor->parameters_count())));
804 8195 : break;
805 : case FrameStateType::kInterpretedFunction:
806 : translation->BeginInterpretedFrame(
807 : descriptor->bailout_id(), shared_info_id,
808 4667364 : static_cast<unsigned int>(descriptor->locals_count() + 1));
809 4667364 : break;
810 : case FrameStateType::kArgumentsAdaptor:
811 : translation->BeginArgumentsAdaptorFrame(
812 : shared_info_id,
813 94452 : static_cast<unsigned int>(descriptor->parameters_count()));
814 94452 : break;
815 : case FrameStateType::kTailCallerFunction:
816 4944 : translation->BeginTailCallerFrame(shared_info_id);
817 4944 : break;
818 : case FrameStateType::kConstructStub:
819 : DCHECK(descriptor->bailout_id().IsValidForConstructStub());
820 : translation->BeginConstructStubFrame(
821 : descriptor->bailout_id(), shared_info_id,
822 1711 : static_cast<unsigned int>(descriptor->parameters_count()));
823 1711 : break;
824 : case FrameStateType::kGetterStub:
825 633 : translation->BeginGetterStubFrame(shared_info_id);
826 633 : break;
827 : case FrameStateType::kSetterStub:
828 455 : translation->BeginSetterStubFrame(shared_info_id);
829 455 : break;
830 : }
831 :
832 : TranslateFrameStateDescriptorOperands(descriptor, iter, state_combine,
833 4777754 : translation);
834 : }
835 :
836 :
837 4411705 : int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
838 : size_t frame_state_offset,
839 : OutputFrameStateCombine state_combine) {
840 8823417 : DeoptimizationEntry const& entry =
841 4411705 : GetDeoptimizationEntry(instr, frame_state_offset);
842 : FrameStateDescriptor* const descriptor = entry.descriptor();
843 4411707 : frame_state_offset++;
844 :
845 : Translation translation(
846 4411706 : &translations_, static_cast<int>(descriptor->GetFrameCount()),
847 8823415 : static_cast<int>(descriptor->GetJSFrameCount()), zone());
848 : InstructionOperandIterator iter(instr, frame_state_offset);
849 : BuildTranslationForFrameStateDescriptor(descriptor, &iter, &translation,
850 4411708 : state_combine);
851 :
852 4411710 : int deoptimization_id = static_cast<int>(deoptimization_states_.size());
853 :
854 : deoptimization_states_.push_back(new (zone()) DeoptimizationState(
855 4411710 : descriptor->bailout_id(), translation.index(), pc_offset, entry.kind(),
856 8823422 : entry.reason()));
857 :
858 4411710 : return deoptimization_id;
859 : }
860 :
861 :
862 31964480 : void CodeGenerator::AddTranslationForOperand(Translation* translation,
863 : Instruction* instr,
864 : InstructionOperand* op,
865 8362727 : MachineType type) {
866 31964480 : if (op->IsStackSlot()) {
867 21090143 : if (type.representation() == MachineRepresentation::kBit) {
868 70338 : translation->StoreBoolStackSlot(LocationOperand::cast(op)->index());
869 63057534 : } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
870 : type == MachineType::Int32()) {
871 272594 : translation->StoreInt32StackSlot(LocationOperand::cast(op)->index());
872 62239785 : } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
873 : type == MachineType::Uint32()) {
874 820 : translation->StoreUint32StackSlot(LocationOperand::cast(op)->index());
875 : } else {
876 20746391 : CHECK_EQ(MachineRepresentation::kTagged, type.representation());
877 20746391 : translation->StoreStackSlot(LocationOperand::cast(op)->index());
878 : }
879 10874337 : } else if (op->IsFPStackSlot()) {
880 328039 : if (type.representation() == MachineRepresentation::kFloat64) {
881 327577 : translation->StoreDoubleStackSlot(LocationOperand::cast(op)->index());
882 : } else {
883 462 : CHECK_EQ(MachineRepresentation::kFloat32, type.representation());
884 462 : translation->StoreFloatStackSlot(LocationOperand::cast(op)->index());
885 : }
886 10546298 : } else if (op->IsRegister()) {
887 : InstructionOperandConverter converter(this, instr);
888 2119692 : if (type.representation() == MachineRepresentation::kBit) {
889 1049 : translation->StoreBoolRegister(converter.ToRegister(op));
890 6355929 : } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
891 : type == MachineType::Int32()) {
892 40376 : translation->StoreInt32Register(converter.ToRegister(op));
893 6234801 : } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
894 : type == MachineType::Uint32()) {
895 56 : translation->StoreUint32Register(converter.ToRegister(op));
896 : } else {
897 2078211 : CHECK_EQ(MachineRepresentation::kTagged, type.representation());
898 2078211 : translation->StoreRegister(converter.ToRegister(op));
899 : }
900 8426606 : } else if (op->IsFPRegister()) {
901 : InstructionOperandConverter converter(this, instr);
902 63882 : if (type.representation() == MachineRepresentation::kFloat64) {
903 63791 : translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
904 : } else {
905 91 : CHECK_EQ(MachineRepresentation::kFloat32, type.representation());
906 91 : translation->StoreFloatRegister(converter.ToFloatRegister(op));
907 : }
908 : } else {
909 8362724 : CHECK(op->IsImmediate());
910 : InstructionOperandConverter converter(this, instr);
911 8362724 : Constant constant = converter.ToConstant(op);
912 : Handle<Object> constant_object;
913 8362725 : switch (constant.type()) {
914 : case Constant::kInt32:
915 16726 : if (type.representation() == MachineRepresentation::kTagged) {
916 : // When pointers are 4 bytes, we can use int32 constants to represent
917 : // Smis.
918 : DCHECK_EQ(4, kPointerSize);
919 : constant_object =
920 0 : handle(reinterpret_cast<Smi*>(constant.ToInt32()), isolate());
921 : DCHECK(constant_object->IsSmi());
922 16726 : } else if (type.representation() == MachineRepresentation::kBit) {
923 4260 : if (constant.ToInt32() == 0) {
924 : constant_object = isolate()->factory()->false_value();
925 : } else {
926 : DCHECK_EQ(1, constant.ToInt32());
927 : constant_object = isolate()->factory()->true_value();
928 : }
929 : } else {
930 : // TODO(jarin,bmeurer): We currently pass in raw pointers to the
931 : // JSFunction::entry here. We should really consider fixing this.
932 : DCHECK(type == MachineType::Int32() ||
933 : type == MachineType::Uint32() ||
934 : type.representation() == MachineRepresentation::kWord32 ||
935 : type.representation() == MachineRepresentation::kNone);
936 : DCHECK(type.representation() != MachineRepresentation::kNone ||
937 : constant.ToInt32() == FrameStateDescriptor::kImpossibleValue);
938 14596 : if (type == MachineType::Uint32()) {
939 : constant_object =
940 2541 : isolate()->factory()->NewNumberFromUint(constant.ToInt32());
941 : } else {
942 : constant_object =
943 27498 : isolate()->factory()->NewNumberFromInt(constant.ToInt32());
944 : }
945 : }
946 : break;
947 : case Constant::kInt64:
948 : // When pointers are 8 bytes, we can use int64 constants to represent
949 : // Smis.
950 : // TODO(jarin,bmeurer): We currently pass in raw pointers to the
951 : // JSFunction::entry here. We should really consider fixing this.
952 : DCHECK(type.representation() == MachineRepresentation::kWord64 ||
953 : type.representation() == MachineRepresentation::kTagged);
954 : DCHECK_EQ(8, kPointerSize);
955 : constant_object =
956 0 : handle(reinterpret_cast<Smi*>(constant.ToInt64()), isolate());
957 : DCHECK(constant_object->IsSmi());
958 0 : break;
959 : case Constant::kFloat32:
960 : DCHECK(type.representation() == MachineRepresentation::kFloat32 ||
961 : type.representation() == MachineRepresentation::kTagged);
962 144 : constant_object = isolate()->factory()->NewNumber(constant.ToFloat32());
963 48 : break;
964 : case Constant::kFloat64:
965 : DCHECK(type.representation() == MachineRepresentation::kFloat64 ||
966 : type.representation() == MachineRepresentation::kTagged);
967 2667946 : constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
968 1333972 : break;
969 : case Constant::kHeapObject:
970 : DCHECK_EQ(MachineRepresentation::kTagged, type.representation());
971 7011978 : constant_object = constant.ToHeapObject();
972 7011979 : break;
973 : default:
974 0 : UNREACHABLE();
975 : }
976 8362727 : if (constant_object.is_identical_to(info()->closure())) {
977 4055112 : translation->StoreJSFrameFunction();
978 : } else {
979 4307615 : int literal_id = DefineDeoptimizationLiteral(constant_object);
980 4307618 : translation->StoreLiteral(literal_id);
981 : }
982 : }
983 31964447 : }
984 :
985 :
986 0 : void CodeGenerator::MarkLazyDeoptSite() {
987 4796298 : last_lazy_deopt_pc_ = masm()->pc_offset();
988 0 : }
989 :
990 262400 : DeoptimizationExit* CodeGenerator::AddDeoptimizationExit(
991 : Instruction* instr, size_t frame_state_offset) {
992 : int const deoptimization_id = BuildTranslation(
993 262400 : instr, -1, frame_state_offset, OutputFrameStateCombine::Ignore());
994 : DeoptimizationExit* const exit = new (zone())
995 524800 : DeoptimizationExit(deoptimization_id, current_source_position_);
996 262400 : deoptimization_exits_.push_back(exit);
997 262400 : return exit;
998 : }
999 :
1000 1123074 : OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
1001 1123074 : : frame_(gen->frame()), masm_(gen->masm()), next_(gen->ools_) {
1002 561537 : gen->ools_ = this;
1003 561537 : }
1004 :
1005 :
1006 0 : OutOfLineCode::~OutOfLineCode() {}
1007 :
1008 : } // namespace compiler
1009 : } // namespace internal
1010 : } // namespace v8
|