Line data Source code
1 : // Copyright 2013 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/code-generator.h"
6 :
7 : #include "src/address-map.h"
8 : #include "src/assembler-inl.h"
9 : #include "src/base/adapters.h"
10 : #include "src/compilation-info.h"
11 : #include "src/compiler/code-generator-impl.h"
12 : #include "src/compiler/linkage.h"
13 : #include "src/compiler/pipeline.h"
14 : #include "src/frames-inl.h"
15 : #include "src/macro-assembler-inl.h"
16 :
17 : namespace v8 {
18 : namespace internal {
19 : namespace compiler {
20 :
21 : class CodeGenerator::JumpTable final : public ZoneObject {
22 : public:
23 : JumpTable(JumpTable* next, Label** targets, size_t target_count)
24 6148 : : next_(next), targets_(targets), target_count_(target_count) {}
25 :
26 : Label* label() { return &label_; }
27 : JumpTable* next() const { return next_; }
28 : Label** targets() const { return targets_; }
29 : size_t target_count() const { return target_count_; }
30 :
31 : private:
32 : Label label_;
33 : JumpTable* const next_;
34 : Label** const targets_;
35 : size_t const target_count_;
36 : };
37 :
38 912010 : CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
39 19605683 : InstructionSequence* code, CompilationInfo* info)
40 : : frame_access_state_(nullptr),
41 : linkage_(linkage),
42 : code_(code),
43 : unwinding_info_writer_(zone()),
44 : info_(info),
45 912007 : labels_(zone()->NewArray<Label>(code->InstructionBlockCount())),
46 : current_block_(RpoNumber::Invalid()),
47 : current_source_position_(SourcePosition::Unknown()),
48 : masm_(info->isolate(), nullptr, 0, CodeObjectRequired::kNo),
49 : resolver_(this),
50 : safepoints_(code->zone()),
51 : handlers_(code->zone()),
52 : deoptimization_exits_(code->zone()),
53 : deoptimization_states_(code->zone()),
54 : deoptimization_literals_(code->zone()),
55 : inlined_function_count_(0),
56 : translations_(code->zone()),
57 : last_lazy_deopt_pc_(0),
58 : jump_tables_(nullptr),
59 : ools_(nullptr),
60 : osr_pc_offset_(-1),
61 : optimized_out_literal_id_(-1),
62 : source_position_table_builder_(code->zone(),
63 8208077 : info->SourcePositionRecordingMode()) {
64 24619232 : for (int i = 0; i < code->InstructionBlockCount(); ++i) {
65 11397606 : new (&labels_[i]) Label;
66 : }
67 912010 : CreateFrameAccessState(frame);
68 912007 : }
69 :
70 11472351 : Isolate* CodeGenerator::isolate() const { return info_->isolate(); }
71 :
72 1824014 : void CodeGenerator::CreateFrameAccessState(Frame* frame) {
73 912006 : FinishFrame(frame);
74 1824016 : frame_access_state_ = new (code()->zone()) FrameAccessState(frame);
75 912008 : }
76 :
77 :
78 30053091 : Handle<Code> CodeGenerator::GenerateCode() {
79 : CompilationInfo* info = this->info();
80 :
81 : // Open a frame scope to indicate that there is a frame on the stack. The
82 : // MANUAL indicates that the scope shouldn't actually generate code to set up
83 : // the frame (that is done in AssemblePrologue).
84 912006 : FrameScope frame_scope(masm(), StackFrame::MANUAL);
85 :
86 912007 : if (info->is_source_positions_enabled()) {
87 86916 : SourcePosition source_position(info->shared_info()->start_position());
88 43458 : AssembleSourcePosition(source_position);
89 : }
90 :
91 : // Place function entry hook if requested to do so.
92 912007 : if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
93 428715 : ProfileEntryHookStub::MaybeCallEntryHook(masm());
94 : }
95 : // Architecture-specific, linkage-specific prologue.
96 912006 : info->set_prologue_offset(masm()->pc_offset());
97 :
98 : // Define deoptimization literals for all inlined functions.
99 : DCHECK_EQ(0u, deoptimization_literals_.size());
100 1861176 : for (CompilationInfo::InlinedFunctionHolder& inlined :
101 : info->inlined_functions()) {
102 74328 : if (!inlined.shared_info.is_identical_to(info->shared_info())) {
103 37164 : int index = DefineDeoptimizationLiteral(inlined.shared_info);
104 : inlined.RegisterInlinedFunctionId(index);
105 : }
106 : }
107 912006 : inlined_function_count_ = deoptimization_literals_.size();
108 :
109 : // Define deoptimization literals for all unoptimized code objects of inlined
110 : // functions. This ensures unoptimized code is kept alive by optimized code.
111 1861176 : for (const CompilationInfo::InlinedFunctionHolder& inlined :
112 : info->inlined_functions()) {
113 74328 : if (!inlined.shared_info.is_identical_to(info->shared_info())) {
114 37164 : DefineDeoptimizationLiteral(inlined.inlined_code_object_root);
115 : }
116 : }
117 :
118 : unwinding_info_writer_.SetNumberOfInstructionBlocks(
119 912006 : code()->InstructionBlockCount());
120 :
121 : // Assemble all non-deferred blocks, followed by deferred ones.
122 1824020 : for (int deferred = 0; deferred < 2; ++deferred) {
123 60636015 : for (const InstructionBlock* block : code()->instruction_blocks()) {
124 22795189 : if (block->IsDeferred() == (deferred == 0)) {
125 : continue;
126 : }
127 : // Align loop headers on 16-byte boundaries.
128 11397596 : if (block->IsLoopHeader()) masm()->Align(16);
129 : // Ensure lazy deopt doesn't patch handler entry points.
130 11397599 : if (block->IsHandler()) EnsureSpaceForLazyDeopt();
131 : // Bind a label for a block.
132 11397599 : current_block_ = block->rpo_number();
133 22795198 : unwinding_info_writer_.BeginInstructionBlock(masm()->pc_offset(), block);
134 11397595 : if (FLAG_code_comments) {
135 : // TODO(titzer): these code comments are a giant memory leak.
136 0 : Vector<char> buffer = Vector<char>::New(200);
137 : char* buffer_start = buffer.start();
138 :
139 : int next = SNPrintF(
140 : buffer, "-- B%d start%s%s%s%s", block->rpo_number().ToInt(),
141 : block->IsDeferred() ? " (deferred)" : "",
142 : block->needs_frame() ? "" : " (no frame)",
143 : block->must_construct_frame() ? " (construct frame)" : "",
144 0 : block->must_deconstruct_frame() ? " (deconstruct frame)" : "");
145 :
146 : buffer = buffer.SubVector(next, buffer.length());
147 :
148 0 : if (block->IsLoopHeader()) {
149 : next =
150 0 : SNPrintF(buffer, " (loop up to %d)", block->loop_end().ToInt());
151 : buffer = buffer.SubVector(next, buffer.length());
152 : }
153 0 : if (block->loop_header().IsValid()) {
154 : next =
155 0 : SNPrintF(buffer, " (in loop %d)", block->loop_header().ToInt());
156 : buffer = buffer.SubVector(next, buffer.length());
157 : }
158 0 : SNPrintF(buffer, " --");
159 0 : masm()->RecordComment(buffer_start);
160 : }
161 :
162 22795190 : frame_access_state()->MarkHasFrame(block->needs_frame());
163 :
164 11397598 : masm()->bind(GetLabel(current_block_));
165 11397596 : if (block->must_construct_frame()) {
166 873844 : AssembleConstructFrame();
167 : // We need to setup the root register after we assemble the prologue, to
168 : // avoid clobbering callee saved registers in case of C linkage and
169 : // using the roots.
170 : // TODO(mtrofin): investigate how we can avoid doing this repeatedly.
171 873844 : if (linkage()->GetIncomingDescriptor()->InitializeRootRegister()) {
172 224034 : masm()->InitializeRootRegister();
173 : }
174 : }
175 :
176 : CodeGenResult result;
177 : if (FLAG_enable_embedded_constant_pool && !block->needs_frame()) {
178 : ConstantPoolUnavailableScope constant_pool_unavailable(masm());
179 : result = AssembleBlock(block);
180 : } else {
181 11397596 : result = AssembleBlock(block);
182 : }
183 11397598 : if (result != kSuccess) return Handle<Code>();
184 11397598 : unwinding_info_writer_.EndInstructionBlock(block);
185 : }
186 : }
187 :
188 : // Assemble all out-of-line code.
189 912009 : if (ools_) {
190 131069 : masm()->RecordComment("-- Out of line code --");
191 696055 : for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
192 564986 : masm()->bind(ool->entry());
193 564986 : ool->Generate();
194 564986 : if (ool->exit()->is_bound()) masm()->jmp(ool->exit());
195 : }
196 : }
197 :
198 : // Assemble all eager deoptimization exits.
199 1695837 : for (DeoptimizationExit* exit : deoptimization_exits_) {
200 261276 : masm()->bind(exit->label());
201 261276 : AssembleDeoptimizerCall(exit->deoptimization_id(), exit->pos());
202 : }
203 :
204 : // Ensure there is space for lazy deoptimization in the code.
205 912009 : if (info->ShouldEnsureSpaceForLazyDeopt()) {
206 783088 : int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
207 11354780 : while (masm()->pc_offset() < target_offset) {
208 5090074 : masm()->nop();
209 : }
210 : }
211 :
212 912009 : FinishCode();
213 :
214 : // Emit the jump tables.
215 912009 : if (jump_tables_) {
216 3810 : masm()->Align(kPointerSize);
217 16106 : for (JumpTable* table = jump_tables_; table; table = table->next()) {
218 6148 : masm()->bind(table->label());
219 6148 : AssembleJumpTable(table->targets(), table->target_count());
220 : }
221 : }
222 :
223 : // The PerfJitLogger logs code up until here, excluding the safepoint
224 : // table. Resolve the unwinding info now so it is aware of the same code size
225 : // as reported by perf.
226 912009 : unwinding_info_writer_.Finish(masm()->pc_offset());
227 :
228 912009 : safepoints()->Emit(masm(), frame()->GetTotalFrameSlotCount());
229 :
230 : Handle<Code> result = v8::internal::CodeGenerator::MakeCodeEpilogue(
231 912010 : masm(), unwinding_info_writer_.eh_frame_writer(), info, Handle<Object>());
232 : result->set_is_turbofanned(true);
233 912010 : result->set_stack_slots(frame()->GetTotalFrameSlotCount());
234 1824020 : result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
235 : Handle<ByteArray> source_positions =
236 : source_position_table_builder_.ToSourcePositionTable(
237 912010 : isolate(), Handle<AbstractCode>::cast(result));
238 912010 : result->set_source_position_table(*source_positions);
239 :
240 : // Emit exception handler table.
241 1388906 : if (!handlers_.empty()) {
242 : Handle<HandlerTable> table =
243 : Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
244 : HandlerTable::LengthForReturn(static_cast<int>(handlers_.size())),
245 33776 : TENURED));
246 493784 : for (size_t i = 0; i < handlers_.size(); ++i) {
247 460008 : table->SetReturnOffset(static_cast<int>(i), handlers_[i].pc_offset);
248 460008 : table->SetReturnHandler(static_cast<int>(i), handlers_[i].handler->pos());
249 : }
250 16888 : result->set_handler_table(*table);
251 : }
252 :
253 912010 : PopulateDeoptimizationData(result);
254 :
255 : // Ensure there is space for lazy deoptimization in the relocation info.
256 912010 : if (info->ShouldEnsureSpaceForLazyDeopt()) {
257 391545 : Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result);
258 : }
259 :
260 912010 : return result;
261 : }
262 :
263 :
264 9862120 : bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
265 : return code()
266 : ->InstructionBlockAt(current_block_)
267 9862126 : ->ao_number()
268 19724247 : .IsNext(code()->InstructionBlockAt(block)->ao_number());
269 : }
270 :
271 :
272 3390409 : void CodeGenerator::RecordSafepoint(ReferenceMap* references,
273 : Safepoint::Kind kind, int arguments,
274 3390407 : Safepoint::DeoptMode deopt_mode) {
275 : Safepoint safepoint =
276 3390409 : safepoints()->DefineSafepoint(masm(), kind, arguments, deopt_mode);
277 : int stackSlotToSpillSlotDelta =
278 3390407 : frame()->GetTotalFrameSlotCount() - frame()->GetSpillSlotCount();
279 49160096 : for (const InstructionOperand& operand : references->reference_operands()) {
280 42379274 : if (operand.IsStackSlot()) {
281 : int index = LocationOperand::cast(operand).index();
282 : DCHECK(index >= 0);
283 : // We might index values in the fixed part of the frame (i.e. the
284 : // closure pointer or the context pointer); these are not spill slots
285 : // and therefore don't work with the SafepointTable currently, but
286 : // we also don't need to worry about them, since the GC has special
287 : // knowledge about those fields anyway.
288 5372443 : if (index < stackSlotToSpillSlotDelta) continue;
289 5368406 : safepoint.DefinePointerSlot(index, zone());
290 37006831 : } else if (operand.IsRegister() && (kind & Safepoint::kWithRegisters)) {
291 0 : Register reg = LocationOperand::cast(operand).GetRegister();
292 0 : safepoint.DefinePointerRegister(reg, zone());
293 : }
294 : }
295 3390415 : }
296 :
297 4593744 : bool CodeGenerator::IsMaterializableFromRoot(
298 4593744 : Handle<HeapObject> object, Heap::RootListIndex* index_return) {
299 : const CallDescriptor* incoming_descriptor =
300 : linkage()->GetIncomingDescriptor();
301 4593744 : if (incoming_descriptor->flags() & CallDescriptor::kCanUseRoots) {
302 4364122 : RootIndexMap map(isolate());
303 4364122 : int root_index = map.Lookup(*object);
304 4364122 : if (root_index != RootIndexMap::kInvalidRootIndex) {
305 1553326 : *index_return = static_cast<Heap::RootListIndex>(root_index);
306 1553326 : return true;
307 : }
308 : }
309 : return false;
310 : }
311 :
312 11397600 : CodeGenerator::CodeGenResult CodeGenerator::AssembleBlock(
313 100669560 : const InstructionBlock* block) {
314 100669538 : for (int i = block->code_start(); i < block->code_end(); ++i) {
315 : Instruction* instr = code()->InstructionAt(i);
316 38937201 : CodeGenResult result = AssembleInstruction(instr, block);
317 38937162 : if (result != kSuccess) return result;
318 : }
319 : return kSuccess;
320 : }
321 :
322 44749 : bool CodeGenerator::IsValidPush(InstructionOperand source,
323 : CodeGenerator::PushTypeFlags push_type) {
324 44749 : if (source.IsImmediate() &&
325 : ((push_type & CodeGenerator::kImmediatePush) != 0)) {
326 : return true;
327 : }
328 99085 : if ((source.IsRegister() || source.IsStackSlot()) &&
329 : ((push_type & CodeGenerator::kScalarPush) != 0)) {
330 : return true;
331 : }
332 216 : if ((source.IsFloatRegister() || source.IsFloatStackSlot()) &&
333 : ((push_type & CodeGenerator::kFloat32Push) != 0)) {
334 : return true;
335 : }
336 216 : if ((source.IsDoubleRegister() || source.IsFloatStackSlot()) &&
337 : ((push_type & CodeGenerator::kFloat64Push) != 0)) {
338 : return true;
339 : }
340 108 : return false;
341 : }
342 :
343 149085 : void CodeGenerator::GetPushCompatibleMoves(Instruction* instr,
344 : PushTypeFlags push_type,
345 : ZoneVector<MoveOperands*>* pushes) {
346 231131 : pushes->clear();
347 432783 : for (int i = Instruction::FIRST_GAP_POSITION;
348 : i <= Instruction::LAST_GAP_POSITION; ++i) {
349 : Instruction::GapPosition inner_pos =
350 : static_cast<Instruction::GapPosition>(i);
351 : ParallelMove* parallel_move = instr->GetParallelMove(inner_pos);
352 290934 : if (parallel_move != nullptr) {
353 590151 : for (auto move : *parallel_move) {
354 277857 : InstructionOperand source = move->source();
355 277857 : InstructionOperand destination = move->destination();
356 : int first_push_compatible_index =
357 : V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
358 : // If there are any moves from slots that will be overridden by pushes,
359 : // then the full gap resolver must be used since optimization with
360 : // pushes don't participate in the parallel move and might clobber
361 : // values needed for the gap resolve.
362 294982 : if (source.IsStackSlot() &&
363 : LocationOperand::cast(source).index() >=
364 : first_push_compatible_index) {
365 : pushes->clear();
366 149085 : return;
367 : }
368 : // TODO(danno): Right now, only consider moves from the FIRST gap for
369 : // pushes. Theoretically, we could extract pushes for both gaps (there
370 : // are cases where this happens), but the logic for that would also have
371 : // to check to make sure that non-memory inputs to the pushes from the
372 : // LAST gap don't get clobbered in the FIRST gap.
373 270621 : if (i == Instruction::FIRST_GAP_POSITION) {
374 327915 : if (destination.IsStackSlot() &&
375 : LocationOperand::cast(destination).index() >=
376 : first_push_compatible_index) {
377 : int index = LocationOperand::cast(destination).index();
378 44749 : if (IsValidPush(source, push_type)) {
379 44641 : if (index >= static_cast<int>(pushes->size())) {
380 44383 : pushes->resize(index + 1);
381 : }
382 89282 : (*pushes)[index] = move;
383 : }
384 : }
385 : }
386 : }
387 : }
388 : }
389 :
390 : // For now, only support a set of continuous pushes at the end of the list.
391 : size_t push_count_upper_bound = pushes->size();
392 : size_t push_begin = push_count_upper_bound;
393 234858 : for (auto move : base::Reversed(*pushes)) {
394 51298 : if (move == nullptr) break;
395 41711 : push_begin--;
396 : }
397 141849 : size_t push_count = pushes->size() - push_begin;
398 : std::copy(pushes->begin() + push_begin,
399 : pushes->begin() + push_begin + push_count, pushes->begin());
400 141849 : pushes->resize(push_count);
401 : }
402 :
403 38937191 : CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction(
404 91006950 : Instruction* instr, const InstructionBlock* block) {
405 : int first_unused_stack_slot;
406 38937191 : FlagsMode mode = FlagsModeField::decode(instr->opcode());
407 38937191 : if (mode != kFlags_trap) {
408 38920896 : AssembleSourcePosition(instr);
409 : }
410 : bool adjust_stack =
411 38937165 : GetSlotAboveSPBeforeTailCall(instr, &first_unused_stack_slot);
412 38937177 : if (adjust_stack) AssembleTailCallBeforeGap(instr, first_unused_stack_slot);
413 38937177 : AssembleGaps(instr);
414 38937165 : if (adjust_stack) AssembleTailCallAfterGap(instr, first_unused_stack_slot);
415 : DCHECK_IMPLIES(
416 : block->must_deconstruct_frame(),
417 : instr != code()->InstructionAt(block->last_instruction_index()) ||
418 : instr->IsRet() || instr->IsJump());
419 42206251 : if (instr->IsJump() && block->must_deconstruct_frame()) {
420 117725 : AssembleDeconstructFrame();
421 : }
422 : // Assemble architecture-specific code for the instruction.
423 38937171 : CodeGenResult result = AssembleArchInstruction(instr);
424 38937162 : if (result != kSuccess) return result;
425 :
426 38937172 : FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
427 38937172 : switch (mode) {
428 : case kFlags_branch: {
429 : // Assemble a branch after this instruction.
430 : InstructionOperandConverter i(this, instr);
431 3288024 : RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
432 3288025 : RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
433 :
434 3288021 : if (true_rpo == false_rpo) {
435 : // redundant branch.
436 564 : if (!IsNextInAssemblyOrder(true_rpo)) {
437 312 : AssembleArchJump(true_rpo);
438 : }
439 564 : return kSuccess;
440 : }
441 3287457 : if (IsNextInAssemblyOrder(true_rpo)) {
442 : // true block is next, can fall through if condition negated.
443 : std::swap(true_rpo, false_rpo);
444 : condition = NegateFlagsCondition(condition);
445 : }
446 : BranchInfo branch;
447 3287458 : branch.condition = condition;
448 3287458 : branch.true_label = GetLabel(true_rpo);
449 3287458 : branch.false_label = GetLabel(false_rpo);
450 3287458 : branch.fallthru = IsNextInAssemblyOrder(false_rpo);
451 : // Assemble architecture-specific branch.
452 3287456 : AssembleArchBranch(instr, &branch);
453 3287456 : break;
454 : }
455 : case kFlags_deoptimize: {
456 : // Assemble a conditional eager deoptimization after this instruction.
457 : InstructionOperandConverter i(this, instr);
458 261276 : size_t frame_state_offset = MiscField::decode(instr->opcode());
459 : DeoptimizationExit* const exit =
460 261276 : AddDeoptimizationExit(instr, frame_state_offset);
461 : Label continue_label;
462 : BranchInfo branch;
463 261276 : branch.condition = condition;
464 261276 : branch.true_label = exit->label();
465 261276 : branch.false_label = &continue_label;
466 261276 : branch.fallthru = true;
467 : // Assemble architecture-specific branch.
468 261276 : AssembleArchBranch(instr, &branch);
469 261276 : masm()->bind(&continue_label);
470 : break;
471 : }
472 : case kFlags_set: {
473 : // Assemble a boolean materialization after this instruction.
474 216689 : AssembleArchBoolean(instr, condition);
475 216689 : break;
476 : }
477 : case kFlags_trap: {
478 16305 : AssembleArchTrap(instr, condition);
479 16305 : break;
480 : }
481 : case kFlags_none: {
482 : break;
483 : }
484 : }
485 : return kSuccess;
486 : }
487 :
488 62894687 : void CodeGenerator::AssembleSourcePosition(Instruction* instr) {
489 38931413 : SourcePosition source_position = SourcePosition::Unknown();
490 74407946 : if (instr->IsNop() && instr->AreMovesRedundant()) return;
491 23963274 : if (!code()->GetSourcePosition(instr, &source_position)) return;
492 3454884 : AssembleSourcePosition(source_position);
493 : }
494 :
495 3498341 : void CodeGenerator::AssembleSourcePosition(SourcePosition source_position) {
496 3498341 : if (source_position == current_source_position_) return;
497 2033963 : current_source_position_ = source_position;
498 2033963 : if (!source_position.IsKnown()) return;
499 2033963 : source_position_table_builder_.AddPosition(masm()->pc_offset(),
500 2033963 : source_position, false);
501 2033963 : if (FLAG_code_comments) {
502 0 : CompilationInfo* info = this->info();
503 0 : if (!info->parse_info()) return;
504 0 : std::ostringstream buffer;
505 0 : buffer << "-- ";
506 0 : if (FLAG_trace_turbo) {
507 0 : buffer << source_position;
508 : } else {
509 0 : buffer << source_position.InliningStack(info);
510 : }
511 0 : buffer << " --";
512 0 : masm()->RecordComment(StrDup(buffer.str().c_str()));
513 : }
514 : }
515 :
516 39086256 : bool CodeGenerator::GetSlotAboveSPBeforeTailCall(Instruction* instr,
517 : int* slot) {
518 38937171 : if (instr->IsTailCall()) {
519 : InstructionOperandConverter g(this, instr);
520 149085 : *slot = g.InputInt32(instr->InputCount() - 1);
521 : return true;
522 : } else {
523 : return false;
524 : }
525 : }
526 :
527 38937041 : void CodeGenerator::AssembleGaps(Instruction* instr) {
528 116811313 : for (int i = Instruction::FIRST_GAP_POSITION;
529 : i <= Instruction::LAST_GAP_POSITION; i++) {
530 : Instruction::GapPosition inner_pos =
531 : static_cast<Instruction::GapPosition>(i);
532 : ParallelMove* move = instr->GetParallelMove(inner_pos);
533 77874145 : if (move != nullptr) resolver()->Resolve(move);
534 : }
535 38937168 : }
536 :
537 : namespace {
538 :
539 391538 : Handle<PodArray<InliningPosition>> CreateInliningPositions(
540 391538 : CompilationInfo* info) {
541 438097 : const CompilationInfo::InlinedFunctionList& inlined_functions =
542 : info->inlined_functions();
543 391538 : if (inlined_functions.size() == 0) {
544 : return Handle<PodArray<InliningPosition>>::cast(
545 : info->isolate()->factory()->empty_byte_array());
546 : }
547 : Handle<PodArray<InliningPosition>> inl_positions =
548 : PodArray<InliningPosition>::New(
549 9395 : info->isolate(), static_cast<int>(inlined_functions.size()), TENURED);
550 93118 : for (size_t i = 0; i < inlined_functions.size(); ++i) {
551 37164 : inl_positions->set(static_cast<int>(i), inlined_functions[i].position);
552 : }
553 9395 : return inl_positions;
554 : }
555 :
556 : } // namespace
557 :
558 912009 : void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
559 391538 : CompilationInfo* info = this->info();
560 912009 : int deopt_count = static_cast<int>(deoptimization_states_.size());
561 1952953 : if (deopt_count == 0 && !info->is_osr()) return;
562 : Handle<DeoptimizationInputData> data =
563 391537 : DeoptimizationInputData::New(isolate(), deopt_count, TENURED);
564 :
565 : Handle<ByteArray> translation_array =
566 391538 : translations_.CreateByteArray(isolate()->factory());
567 :
568 : data->SetTranslationByteArray(*translation_array);
569 : data->SetInlinedFunctionCount(
570 391538 : Smi::FromInt(static_cast<int>(inlined_function_count_)));
571 : data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
572 :
573 391538 : if (info->has_shared_info()) {
574 783076 : data->SetSharedFunctionInfo(*info->shared_info());
575 : } else {
576 : data->SetSharedFunctionInfo(Smi::kZero);
577 : }
578 :
579 : Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
580 783076 : static_cast<int>(deoptimization_literals_.size()), TENURED);
581 : {
582 : AllowDeferredHandleDereference copy_handles;
583 3935514 : for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
584 3152438 : literals->set(i, *deoptimization_literals_[i]);
585 : }
586 : data->SetLiteralArray(*literals);
587 : }
588 :
589 391538 : Handle<PodArray<InliningPosition>> inl_pos = CreateInliningPositions(info);
590 : data->SetInliningPositions(*inl_pos);
591 :
592 391538 : if (info->is_osr()) {
593 : DCHECK(osr_pc_offset_ >= 0);
594 5822 : data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
595 5822 : data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
596 : } else {
597 : BailoutId osr_ast_id = BailoutId::None();
598 : data->SetOsrAstId(Smi::FromInt(osr_ast_id.ToInt()));
599 : data->SetOsrPcOffset(Smi::FromInt(-1));
600 : }
601 :
602 : // Populate deoptimization entries.
603 4408309 : for (int i = 0; i < deopt_count; i++) {
604 8816617 : DeoptimizationState* deoptimization_state = deoptimization_states_[i];
605 : data->SetAstId(i, deoptimization_state->bailout_id());
606 4408307 : CHECK(deoptimization_states_[i]);
607 : data->SetTranslationIndex(
608 8816616 : i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
609 4408308 : data->SetArgumentsStackHeight(i, Smi::kZero);
610 4408309 : data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
611 : }
612 :
613 391538 : code_object->set_deoptimization_data(*data);
614 : }
615 :
616 :
617 6148 : Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
618 12296 : jump_tables_ = new (zone()) JumpTable(jump_tables_, targets, target_count);
619 6148 : return jump_tables_->label();
620 : }
621 :
622 :
623 7216582 : void CodeGenerator::RecordCallPosition(Instruction* instr) {
624 3378287 : CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
625 :
626 3378287 : bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
627 :
628 : RecordSafepoint(
629 : instr->reference_map(), Safepoint::kSimple, 0,
630 6756574 : needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
631 :
632 3378293 : if (flags & CallDescriptor::kHasExceptionHandler) {
633 : InstructionOperandConverter i(this, instr);
634 230004 : RpoNumber handler_rpo = i.InputRpo(instr->InputCount() - 1);
635 690012 : handlers_.push_back({GetLabel(handler_rpo), masm()->pc_offset()});
636 : }
637 :
638 3378293 : if (needs_frame_state) {
639 : MarkLazyDeoptSite();
640 : // If the frame state is present, it starts at argument 1 (just after the
641 : // code address).
642 : size_t frame_state_offset = 1;
643 : FrameStateDescriptor* descriptor =
644 2396710 : GetDeoptimizationEntry(instr, frame_state_offset).descriptor();
645 2396710 : int pc_offset = masm()->pc_offset();
646 : int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
647 2396710 : descriptor->state_combine());
648 : // If the pre-call frame state differs from the post-call one, produce the
649 : // pre-call frame state, too.
650 : // TODO(jarin) We might want to avoid building the pre-call frame state
651 : // because it is only used to get locals and arguments (by the debugger and
652 : // f.arguments), and those are the same in the pre-call and post-call
653 : // states.
654 2396705 : if (!descriptor->state_combine().IsOutputIgnored()) {
655 : deopt_state_id = BuildTranslation(instr, -1, frame_state_offset,
656 1734629 : OutputFrameStateCombine::Ignore());
657 : }
658 2396711 : safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
659 : }
660 3378291 : }
661 :
662 :
663 9539313 : int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
664 9539313 : int result = static_cast<int>(deoptimization_literals_.size());
665 1133740016 : for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
666 565294883 : if (deoptimization_literals_[i].is_identical_to(literal)) return i;
667 : }
668 1576210 : deoptimization_literals_.push_back(literal);
669 1576212 : return result;
670 : }
671 :
672 6805002 : DeoptimizationEntry const& CodeGenerator::GetDeoptimizationEntry(
673 6805007 : Instruction* instr, size_t frame_state_offset) {
674 : InstructionOperandConverter i(this, instr);
675 6805002 : int const state_id = i.InputInt32(frame_state_offset);
676 6805007 : return code()->GetDeoptimizationEntry(state_id);
677 : }
678 :
679 276969 : DeoptimizeKind CodeGenerator::GetDeoptimizationKind(
680 : int deoptimization_id) const {
681 : size_t const index = static_cast<size_t>(deoptimization_id);
682 : DCHECK_LT(index, deoptimization_states_.size());
683 276969 : return deoptimization_states_[index]->kind();
684 : }
685 :
686 276969 : DeoptimizeReason CodeGenerator::GetDeoptimizationReason(
687 : int deoptimization_id) const {
688 : size_t const index = static_cast<size_t>(deoptimization_id);
689 : DCHECK_LT(index, deoptimization_states_.size());
690 276969 : return deoptimization_states_[index]->reason();
691 : }
692 :
693 76451558 : void CodeGenerator::TranslateStateValueDescriptor(
694 76458588 : StateValueDescriptor* desc, StateValueList* nested,
695 : Translation* translation, InstructionOperandIterator* iter) {
696 : // Note:
697 : // If translation is null, we just skip the relevant instruction operands.
698 76451558 : if (desc->IsNested()) {
699 15036 : if (translation != nullptr) {
700 15018 : translation->BeginCapturedObject(static_cast<int>(nested->size()));
701 : }
702 101228 : for (auto field : *nested) {
703 : TranslateStateValueDescriptor(field.desc, field.nested, translation,
704 71156 : iter);
705 : }
706 76436522 : } else if (desc->IsArgumentsElements()) {
707 2723 : if (translation != nullptr) {
708 2721 : translation->ArgumentsElements(desc->is_rest());
709 : }
710 76433799 : } else if (desc->IsArgumentsLength()) {
711 2787 : if (translation != nullptr) {
712 2785 : translation->ArgumentsLength(desc->is_rest());
713 : }
714 76431012 : } else if (desc->IsDuplicate()) {
715 1716 : if (translation != nullptr) {
716 1524 : translation->DuplicateObject(static_cast<int>(desc->id()));
717 : }
718 76429296 : } else if (desc->IsPlain()) {
719 31683740 : InstructionOperand* op = iter->Advance();
720 31683740 : if (translation != nullptr) {
721 : AddTranslationForOperand(translation, iter->instruction(), op,
722 30213220 : desc->type());
723 : }
724 : } else {
725 : DCHECK(desc->IsOptimizedOut());
726 44745556 : if (translation != nullptr) {
727 44478572 : if (optimized_out_literal_id_ == -1) {
728 : optimized_out_literal_id_ =
729 386321 : DefineDeoptimizationLiteral(isolate()->factory()->optimized_out());
730 : }
731 44478573 : translation->StoreLiteral(optimized_out_literal_id_);
732 : }
733 : }
734 76451379 : }
735 :
736 :
737 4775055 : void CodeGenerator::TranslateFrameStateDescriptorOperands(
738 3622263 : FrameStateDescriptor* desc, InstructionOperandIterator* iter,
739 : OutputFrameStateCombine combine, Translation* translation) {
740 : size_t index = 0;
741 : StateValueList* values = desc->GetStateValueDescriptors();
742 162309132 : for (StateValueList::iterator it = values->begin(); it != values->end();
743 : ++it, ++index) {
744 : StateValueDescriptor* value_desc = (*it).desc;
745 76379627 : if (combine.kind() == OutputFrameStateCombine::kPokeAt) {
746 : // The result of the call should be placed at position
747 : // [index_from_top] in the stack (overwriting whatever was
748 : // previously there).
749 : size_t index_from_top =
750 30087559 : desc->GetSize(combine) - 1 - combine.GetOffsetToPokeAt();
751 31971907 : if (index >= index_from_top &&
752 3768708 : index < index_from_top + iter->instruction()->OutputCount()) {
753 : DCHECK_NOT_NULL(translation);
754 : AddTranslationForOperand(
755 : translation, iter->instruction(),
756 : iter->instruction()->OutputAt(index - index_from_top),
757 3475316 : MachineType::AnyTagged());
758 : // Skip the instruction operands.
759 1737657 : TranslateStateValueDescriptor(value_desc, (*it).nested, nullptr, iter);
760 1737659 : continue;
761 : }
762 : }
763 74641963 : TranslateStateValueDescriptor(value_desc, (*it).nested, translation, iter);
764 : }
765 : DCHECK_EQ(desc->GetSize(OutputFrameStateCombine::Ignore()), index);
766 :
767 4774939 : if (combine.kind() == OutputFrameStateCombine::kPushOutput) {
768 : DCHECK(combine.GetPushCount() <= iter->instruction()->OutputCount());
769 251 : for (size_t output = 0; output < combine.GetPushCount(); output++) {
770 : // Materialize the result of the call instruction in this slot.
771 : AddTranslationForOperand(translation, iter->instruction(),
772 : iter->instruction()->OutputAt(output),
773 251 : MachineType::AnyTagged());
774 : }
775 : }
776 4774939 : }
777 :
778 :
779 4774926 : void CodeGenerator::BuildTranslationForFrameStateDescriptor(
780 14318767 : FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
781 14388 : Translation* translation, OutputFrameStateCombine state_combine) {
782 : // Outer-most state must be added to translation first.
783 4774926 : if (descriptor->outer_state() != nullptr) {
784 : BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), iter,
785 : translation,
786 366623 : OutputFrameStateCombine::Ignore());
787 : }
788 :
789 : Handle<SharedFunctionInfo> shared_info;
790 4774925 : if (!descriptor->shared_info().ToHandle(&shared_info)) {
791 7194 : if (!info()->has_shared_info()) {
792 4774937 : return; // Stub with no SharedFunctionInfo.
793 : }
794 7194 : shared_info = info()->shared_info();
795 : }
796 4774925 : int shared_info_id = DefineDeoptimizationLiteral(shared_info);
797 :
798 4774927 : switch (descriptor->type()) {
799 : case FrameStateType::kJavaScriptFunction:
800 : translation->BeginJSFrame(
801 : descriptor->bailout_id(), shared_info_id,
802 8235 : static_cast<unsigned int>(descriptor->GetSize(state_combine) -
803 16470 : (1 + descriptor->parameters_count())));
804 8235 : break;
805 : case FrameStateType::kInterpretedFunction:
806 : translation->BeginInterpretedFrame(
807 : descriptor->bailout_id(), shared_info_id,
808 4665214 : static_cast<unsigned int>(descriptor->locals_count() + 1));
809 4665215 : break;
810 : case FrameStateType::kArgumentsAdaptor:
811 : translation->BeginArgumentsAdaptorFrame(
812 : shared_info_id,
813 93755 : static_cast<unsigned int>(descriptor->parameters_count()));
814 93755 : break;
815 : case FrameStateType::kTailCallerFunction:
816 4944 : translation->BeginTailCallerFrame(shared_info_id);
817 4944 : break;
818 : case FrameStateType::kConstructStub:
819 : DCHECK(descriptor->bailout_id().IsValidForConstructStub());
820 : translation->BeginConstructStubFrame(
821 : descriptor->bailout_id(), shared_info_id,
822 1710 : static_cast<unsigned int>(descriptor->parameters_count()));
823 1710 : break;
824 : case FrameStateType::kGetterStub:
825 619 : translation->BeginGetterStubFrame(shared_info_id);
826 619 : break;
827 : case FrameStateType::kSetterStub:
828 451 : translation->BeginSetterStubFrame(shared_info_id);
829 451 : break;
830 : }
831 :
832 : TranslateFrameStateDescriptorOperands(descriptor, iter, state_combine,
833 4774928 : translation);
834 : }
835 :
836 :
837 4408302 : int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
838 : size_t frame_state_offset,
839 : OutputFrameStateCombine state_combine) {
840 8816626 : DeoptimizationEntry const& entry =
841 4408302 : GetDeoptimizationEntry(instr, frame_state_offset);
842 : FrameStateDescriptor* const descriptor = entry.descriptor();
843 4408310 : frame_state_offset++;
844 :
845 : Translation translation(
846 4408305 : &translations_, static_cast<int>(descriptor->GetFrameCount()),
847 8816613 : static_cast<int>(descriptor->GetJSFrameCount()), zone());
848 : InstructionOperandIterator iter(instr, frame_state_offset);
849 : BuildTranslationForFrameStateDescriptor(descriptor, &iter, &translation,
850 4408310 : state_combine);
851 :
852 4408316 : int deoptimization_id = static_cast<int>(deoptimization_states_.size());
853 :
854 : deoptimization_states_.push_back(new (zone()) DeoptimizationState(
855 4408316 : descriptor->bailout_id(), translation.index(), pc_offset, entry.kind(),
856 8816622 : entry.reason()));
857 :
858 4408307 : return deoptimization_id;
859 : }
860 :
861 :
862 31951336 : void CodeGenerator::AddTranslationForOperand(Translation* translation,
863 : Instruction* instr,
864 : InstructionOperand* op,
865 8357598 : MachineType type) {
866 31951336 : if (op->IsStackSlot()) {
867 21081123 : if (type.representation() == MachineRepresentation::kBit) {
868 70252 : translation->StoreBoolStackSlot(LocationOperand::cast(op)->index());
869 63030639 : } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
870 : type == MachineType::Int32()) {
871 270881 : translation->StoreInt32StackSlot(LocationOperand::cast(op)->index());
872 62219837 : } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
873 : type == MachineType::Uint32()) {
874 716 : translation->StoreUint32StackSlot(LocationOperand::cast(op)->index());
875 : } else {
876 20739274 : CHECK_EQ(MachineRepresentation::kTagged, type.representation());
877 20739274 : translation->StoreStackSlot(LocationOperand::cast(op)->index());
878 : }
879 10870213 : } else if (op->IsFPStackSlot()) {
880 330143 : if (type.representation() == MachineRepresentation::kFloat64) {
881 329555 : translation->StoreDoubleStackSlot(LocationOperand::cast(op)->index());
882 : } else {
883 588 : CHECK_EQ(MachineRepresentation::kFloat32, type.representation());
884 588 : translation->StoreFloatStackSlot(LocationOperand::cast(op)->index());
885 : }
886 10540070 : } else if (op->IsRegister()) {
887 : InstructionOperandConverter converter(this, instr);
888 2118377 : if (type.representation() == MachineRepresentation::kBit) {
889 1055 : translation->StoreBoolRegister(converter.ToRegister(op));
890 6351966 : } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
891 : type == MachineType::Int32()) {
892 40494 : translation->StoreInt32Register(converter.ToRegister(op));
893 6230482 : } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
894 : type == MachineType::Uint32()) {
895 56 : translation->StoreUint32Register(converter.ToRegister(op));
896 : } else {
897 2076772 : CHECK_EQ(MachineRepresentation::kTagged, type.representation());
898 2076772 : translation->StoreRegister(converter.ToRegister(op));
899 : }
900 8421693 : } else if (op->IsFPRegister()) {
901 : InstructionOperandConverter converter(this, instr);
902 64104 : if (type.representation() == MachineRepresentation::kFloat64) {
903 64013 : translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
904 : } else {
905 91 : CHECK_EQ(MachineRepresentation::kFloat32, type.representation());
906 91 : translation->StoreFloatRegister(converter.ToFloatRegister(op));
907 : }
908 : } else {
909 8357589 : CHECK(op->IsImmediate());
910 : InstructionOperandConverter converter(this, instr);
911 8357589 : Constant constant = converter.ToConstant(op);
912 : Handle<Object> constant_object;
913 8357596 : switch (constant.type()) {
914 : case Constant::kInt32:
915 16851 : if (type.representation() == MachineRepresentation::kTagged) {
916 : // When pointers are 4 bytes, we can use int32 constants to represent
917 : // Smis.
918 : DCHECK_EQ(4, kPointerSize);
919 : constant_object =
920 0 : handle(reinterpret_cast<Smi*>(constant.ToInt32()), isolate());
921 : DCHECK(constant_object->IsSmi());
922 16851 : } else if (type.representation() == MachineRepresentation::kBit) {
923 4408 : if (constant.ToInt32() == 0) {
924 : constant_object = isolate()->factory()->false_value();
925 : } else {
926 : DCHECK_EQ(1, constant.ToInt32());
927 : constant_object = isolate()->factory()->true_value();
928 : }
929 : } else {
930 : // TODO(jarin,bmeurer): We currently pass in raw pointers to the
931 : // JSFunction::entry here. We should really consider fixing this.
932 : DCHECK(type == MachineType::Int32() ||
933 : type == MachineType::Uint32() ||
934 : type.representation() == MachineRepresentation::kWord32 ||
935 : type.representation() == MachineRepresentation::kNone);
936 : DCHECK(type.representation() != MachineRepresentation::kNone ||
937 : constant.ToInt32() == FrameStateDescriptor::kImpossibleValue);
938 14647 : if (type == MachineType::Uint32()) {
939 : constant_object =
940 2541 : isolate()->factory()->NewNumberFromUint(constant.ToInt32());
941 : } else {
942 : constant_object =
943 27600 : isolate()->factory()->NewNumberFromInt(constant.ToInt32());
944 : }
945 : }
946 : break;
947 : case Constant::kInt64:
948 : // When pointers are 8 bytes, we can use int64 constants to represent
949 : // Smis.
950 : // TODO(jarin,bmeurer): We currently pass in raw pointers to the
951 : // JSFunction::entry here. We should really consider fixing this.
952 : DCHECK(type.representation() == MachineRepresentation::kWord64 ||
953 : type.representation() == MachineRepresentation::kTagged);
954 : DCHECK_EQ(8, kPointerSize);
955 : constant_object =
956 0 : handle(reinterpret_cast<Smi*>(constant.ToInt64()), isolate());
957 : DCHECK(constant_object->IsSmi());
958 0 : break;
959 : case Constant::kFloat32:
960 : DCHECK(type.representation() == MachineRepresentation::kFloat32 ||
961 : type.representation() == MachineRepresentation::kTagged);
962 144 : constant_object = isolate()->factory()->NewNumber(constant.ToFloat32());
963 48 : break;
964 : case Constant::kFloat64:
965 : DCHECK(type.representation() == MachineRepresentation::kFloat64 ||
966 : type.representation() == MachineRepresentation::kTagged);
967 2657954 : constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
968 1328977 : break;
969 : case Constant::kHeapObject:
970 : DCHECK_EQ(MachineRepresentation::kTagged, type.representation());
971 7011720 : constant_object = constant.ToHeapObject();
972 7011720 : break;
973 : default:
974 0 : UNREACHABLE();
975 : }
976 8357598 : if (constant_object.is_identical_to(info()->closure())) {
977 4052762 : translation->StoreJSFrameFunction();
978 : } else {
979 4304836 : int literal_id = DefineDeoptimizationLiteral(constant_object);
980 4304840 : translation->StoreLiteral(literal_id);
981 : }
982 : }
983 31951290 : }
984 :
985 :
986 0 : void CodeGenerator::MarkLazyDeoptSite() {
987 4793420 : last_lazy_deopt_pc_ = masm()->pc_offset();
988 0 : }
989 :
990 261276 : DeoptimizationExit* CodeGenerator::AddDeoptimizationExit(
991 : Instruction* instr, size_t frame_state_offset) {
992 : int const deoptimization_id = BuildTranslation(
993 261276 : instr, -1, frame_state_offset, OutputFrameStateCombine::Ignore());
994 : DeoptimizationExit* const exit = new (zone())
995 522552 : DeoptimizationExit(deoptimization_id, current_source_position_);
996 261276 : deoptimization_exits_.push_back(exit);
997 261276 : return exit;
998 : }
999 :
1000 1129972 : OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
1001 1129972 : : frame_(gen->frame()), masm_(gen->masm()), next_(gen->ools_) {
1002 564986 : gen->ools_ = this;
1003 564986 : }
1004 :
1005 :
1006 0 : OutOfLineCode::~OutOfLineCode() {}
1007 :
1008 : } // namespace compiler
1009 : } // namespace internal
1010 : } // namespace v8
|